repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
AnhellO/DAS_Sistemas | Ene-Jun-2018/Luis Carielo/Segundo Parcial/reporte.py | 1 | 1498 | """
import json
class Report:
def getTitle(self):
return 'Title!'
def getDate(self):
return '2018-05-23'
def getContents(self):
return {
'title': self.getTitle(),
'date': self.getDate()
}
def formatJson(self):
return json.dumps(self.getContents())
report = Report()
print(report.getContents())
"""
###################################################################################################
###################################################################################################
###################################################################################################
import json
class Title:
def getTitle(self):
return "Title"
def Json(self):
return json.dumps(self.getTitle())
class Date:
def getDate(self):
return "2018-05-23"
def Json(self):
return json.dumps(self.getDate())
class Contents:
def getContents(self, title="titulo", date="2018-05-23"):
return "title:{}\ndate:{}".format(title, date)
def Json(self):
return json.dumps(self.getContents())
titulo = Title()
print(titulo.getTitle())
fecha = Date()
print(fecha.getDate())
contenido = Contents()
print(contenido.getContents())
"""
Separé las funcionalidades en clases para que las peticiones sean más sencillas de implementar y
si llegase a existir algun crecimiento, que no haya tanto problema para implementar las nuevas
funcionalidades
""" | mit | -406,280,010,846,313,200 | 21.681818 | 99 | 0.514706 | false |
simgunz/anki | qt/aqt/dbcheck.py | 1 | 1471 | # Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
from __future__ import annotations
from concurrent.futures import Future
import aqt
from aqt.qt import *
from aqt.utils import showText, tooltip
def on_progress(mw: aqt.main.AnkiQt) -> None:
progress = mw.col.latest_progress()
if not progress.HasField("database_check"):
return
dbprogress = progress.database_check
mw.progress.update(
process=False,
label=dbprogress.stage,
value=dbprogress.stage_current,
max=dbprogress.stage_total,
)
def check_db(mw: aqt.AnkiQt) -> None:
def on_timer() -> None:
on_progress(mw)
timer = QTimer(mw)
qconnect(timer.timeout, on_timer)
timer.start(100)
def on_future_done(fut: Future) -> None:
timer.stop()
ret, ok = fut.result()
if not ok:
showText(ret)
else:
tooltip(ret)
# if an error has directed the user to check the database,
# silently clean up any broken reset hooks which distract from
# the underlying issue
n = 0
while n < 10:
try:
mw.reset()
break
except Exception as e:
print("swallowed exception in reset hook:", e)
n += 1
continue
mw.taskman.with_progress(mw.col.fixIntegrity, on_future_done)
| agpl-3.0 | 822,746,869,428,643,500 | 25.267857 | 78 | 0.598912 | false |
wroersma/volatility | volatility/plugins/filescan.py | 1 | 17093 | # fileobjscan.py
# Copyright 2009 Andreas Schuster <[email protected]>
# Copyright (C) 2009-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andreas Schuster
@license: GNU General Public License 2.0
@contact: [email protected]
@organization: http://computer.forensikblog.de/en/
"""
import volatility.plugins.common as common
import volatility.obj as obj
import volatility.poolscan as poolscan
import volatility.utils as utils
from volatility.renderers import TreeGrid
from volatility.renderers.basic import Address
class PoolScanFile(poolscan.PoolScanner):
"""Pool scanner for file objects"""
def __init__(self, address_space):
poolscan.PoolScanner.__init__(self, address_space)
self.struct_name = "_FILE_OBJECT"
self.object_type = "File"
self.pooltag = obj.VolMagic(address_space).FilePoolTag.v()
size = 0x98 # self.address_space.profile.get_obj_size("_FILE_OBJECT")
self.checks = [
('CheckPoolSize', dict(condition = lambda x: x >= size)),
('CheckPoolType', dict(paged = False, non_paged = True, free = True)),
('CheckPoolIndex', dict(value = lambda x : x < 5)),
]
class FileScan(common.AbstractScanCommand):
"""Pool scanner for file objects"""
scanners = [PoolScanFile]
# Declare meta information associated with this plugin
meta_info = {}
meta_info['author'] = 'Andreas Schuster'
meta_info['copyright'] = 'Copyright (c) 2009 Andreas Schuster'
meta_info['contact'] = '[email protected]'
meta_info['license'] = 'GNU General Public License 2.0'
meta_info['url'] = 'http://computer.forensikblog.de/en/'
meta_info['os'] = 'WIN_32_XP_SP2'
meta_info['version'] = '0.1'
def render_text(self, outfd, data):
self.table_header(outfd, [(self.offset_column(), '#018x'),
('#Ptr', '>6'),
('#Hnd', '>6'),
('Access', '>6'),
('Name', '')
])
for file in data:
header = file.get_object_header()
self.table_row(outfd,
file.obj_offset,
header.PointerCount,
header.HandleCount,
file.access_string(),
str(file.file_name_with_device() or ''))
def unified_output(self, data):
return TreeGrid([(self.offset_column(), Address),
("Pointers", int),
("Handles", int),
("Access", str),
("Name", str)],
self.generator(data))
def generator(self, data):
for file in data:
header = file.get_object_header()
yield (0, [Address(file.obj_offset),
int(header.PointerCount),
int(header.HandleCount),
str(file.access_string()),
str(file.file_name_with_device() or '')])
class PoolScanDriver(poolscan.PoolScanner):
"""Pool scanner for driver objects"""
def __init__(self, address_space):
poolscan.PoolScanner.__init__(self, address_space)
self.struct_name = "_DRIVER_OBJECT"
self.object_type = "Driver"
# due to the placement of the driver extension, we
# use the top down approach instead of bottom-up.
self.use_top_down = True
self.pooltag = obj.VolMagic(address_space).DriverPoolTag.v()
size = 0xf8 # self.address_space.profile.get_obj_size("_DRIVER_OBJECT")
self.checks = [
('CheckPoolSize', dict(condition = lambda x: x >= size)),
('CheckPoolType', dict(paged = False, non_paged = True, free = True)),
('CheckPoolIndex', dict(value = lambda x : x < 5)),
]
class DriverScan(common.AbstractScanCommand):
"""Pool scanner for driver objects"""
scanners = [PoolScanDriver]
def unified_output(self, data):
return TreeGrid([(self.offset_column(), Address),
("Pointers", int),
("Handles", int),
("Start", Address),
("Size", int),
("Service Key", str),
("Name", str),
("Driver Name", str)],
self.generator(data))
def generator(self, data):
for driver in data:
header = driver.get_object_header()
yield (0, [Address(driver.obj_offset),
int(header.PointerCount),
int(header.HandleCount),
Address(driver.DriverStart),
int(driver.DriverSize),
str(driver.DriverExtension.ServiceKeyName or ''),
str(header.NameInfo.Name or ''),
str(driver.DriverName or '')])
def render_text(self, outfd, data):
self.table_header(outfd, [(self.offset_column(), '#018x'),
('#Ptr', '>8'),
('#Hnd', '>8'),
('Start', '[addrpad]'),
('Size', '[addr]'),
('Service Key', '20'),
('Name', '12'),
('Driver Name', '')
])
for driver in data:
header = driver.get_object_header()
self.table_row(outfd,
driver.obj_offset,
header.PointerCount,
header.HandleCount,
driver.DriverStart,
driver.DriverSize,
str(driver.DriverExtension.ServiceKeyName or ''),
str(header.NameInfo.Name or ''),
str(driver.DriverName or ''))
class PoolScanSymlink(poolscan.PoolScanner):
"""Pool scanner for symlink objects"""
def __init__(self, address_space):
poolscan.PoolScanner.__init__(self, address_space)
self.struct_name = "_OBJECT_SYMBOLIC_LINK"
self.object_type = "SymbolicLink"
self.pooltag = obj.VolMagic(address_space).SymlinkPoolTag.v()
size = 0x48 # self.address_space.profile.get_obj_size("_OBJECT_SYMBOLIC_LINK")
self.checks = [
('CheckPoolSize', dict(condition = lambda x: x >= size)),
('CheckPoolType', dict(paged = True, non_paged = True, free = True)),
]
class SymLinkScan(common.AbstractScanCommand):
"""Pool scanner for symlink objects"""
scanners = [PoolScanSymlink]
def unified_output(self, data):
return TreeGrid([(self.offset_column(), Address),
("Pointers", int),
("Handles", int),
("Creation Time", str),
("Origin", str),
("Target", str)],
self.generator(data))
def generator(self, data):
for link in data:
header = link.get_object_header()
yield (0, [Address(link.obj_offset),
int(header.PointerCount),
int(header.HandleCount),
str(link.CreationTime or ''),
str(header.NameInfo.Name or ''),
str(link.LinkTarget or '')])
def render_text(self, outfd, data):
self.table_header(outfd, [(self.offset_column(), '#018x'),
('#Ptr', '>6'),
('#Hnd', '>6'),
('Creation time', '30'),
('From', '<20'),
('To', '60'),
])
for link in data:
header = link.get_object_header()
self.table_row(outfd,
link.obj_offset,
header.PointerCount,
header.HandleCount,
link.CreationTime or '',
str(header.NameInfo.Name or ''),
str(link.LinkTarget or ''))
class PoolScanMutant(poolscan.PoolScanner):
"""Pool scanner for mutex objects"""
def __init__(self, address_space, **kwargs):
poolscan.PoolScanner.__init__(self, address_space, **kwargs)
self.struct_name = "_KMUTANT"
self.object_type = "Mutant"
self.pooltag = obj.VolMagic(address_space).MutexPoolTag.v()
size = 0x40 # self.address_space.profile.get_obj_size("_KMUTANT")
self.checks = [
('CheckPoolSize', dict(condition = lambda x: x >= size)),
('CheckPoolType', dict(paged = False, non_paged = True, free = True)),
('CheckPoolIndex', dict(value = lambda x : x < 5)),
]
class MutantScan(common.AbstractScanCommand):
"""Pool scanner for mutex objects"""
scanners = [PoolScanMutant]
def __init__(self, config, *args, **kwargs):
common.AbstractScanCommand.__init__(self, config, *args, **kwargs)
config.add_option("SILENT", short_option = 's', default = False,
action = 'store_true',
help = 'Suppress less meaningful results')
def unified_output(self, data):
return TreeGrid([(self.offset_column(), Address),
("Pointers", int),
("Handles", int),
("Signal", str),
("Thread", Address),
("CID", str),
("Name", str)],
self.generator(data))
def generator(self, data):
for mutant in data:
header = mutant.get_object_header()
if mutant.OwnerThread.is_valid():
thread = mutant.OwnerThread.dereference_as('_ETHREAD')
CID = "{0}:{1}".format(thread.Cid.UniqueProcess, thread.Cid.UniqueThread)
else:
CID = ""
yield (0, [Address(mutant.obj_offset),
int(header.PointerCount),
int(header.HandleCount),
str(mutant.Header.SignalState),
Address(mutant.OwnerThread),
str(CID),
str(header.NameInfo.Name or '')])
def render_text(self, outfd, data):
self.table_header(outfd, [(self.offset_column(), '#018x'),
('#Ptr', '>8'),
('#Hnd', '>8'),
('Signal', '4'),
('Thread', '[addrpad]'),
('CID', '>9'),
('Name', '')
])
for mutant in data:
header = mutant.get_object_header()
if mutant.OwnerThread.is_valid():
thread = mutant.OwnerThread.dereference_as('_ETHREAD')
CID = "{0}:{1}".format(thread.Cid.UniqueProcess, thread.Cid.UniqueThread)
else:
CID = ""
self.table_row(outfd,
mutant.obj_offset,
header.PointerCount,
header.HandleCount,
mutant.Header.SignalState,
mutant.OwnerThread, CID,
str(header.NameInfo.Name or ''))
class PoolScanProcess(poolscan.PoolScanner):
"""Pool scanner for process objects"""
def __init__(self, address_space, **kwargs):
poolscan.PoolScanner.__init__(self, address_space, **kwargs)
self.struct_name = "_EPROCESS"
self.object_type = "Process"
# this allows us to find terminated processes
self.skip_type_check = True
self.pooltag = obj.VolMagic(address_space).ProcessPoolTag.v()
size = 0x1ae # self.address_space.profile.get_obj_size("_EPROCESS")
self.checks = [
('CheckPoolSize', dict(condition = lambda x: x >= size)),
('CheckPoolType', dict(paged = False, non_paged = True, free = True)),
('CheckPoolIndex', dict(value = lambda x : x < 5)),
]
class PSScan(common.AbstractScanCommand):
"""Pool scanner for process objects"""
scanners = [PoolScanProcess]
# Declare meta information associated with this plugin
meta_info = {}
meta_info['author'] = 'AAron Walters'
meta_info['copyright'] = 'Copyright (c) 2011 Volatility Foundation'
meta_info['contact'] = '[email protected]'
meta_info['license'] = 'GNU General Public License 2.0'
meta_info['url'] = 'https://www.volatilityfoundation.org/'
meta_info['os'] = ['Win7SP0x86', 'WinXPSP3x86']
meta_info['version'] = '0.1'
def calculate(self):
# start with a physical space so we can find processes without a DTB
addr_space = utils.load_as(self._config, astype = 'physical')
meta = addr_space.profile.metadata
win10 = (meta.get("major"), meta.get("minor")) == (6, 4)
# if the user selected virtual space or if we're on win10, switch
# to a virtual kernel space
if self._config.VIRTUAL or win10:
addr_space = utils.load_as(self._config)
return self.scan_results(addr_space)
def render_dot(self, outfd, data):
objects = set()
links = set()
for eprocess in data:
label = "{0} | {1} |".format(eprocess.UniqueProcessId,
eprocess.ImageFileName)
if eprocess.ExitTime:
label += "exited\\n{0}".format(eprocess.ExitTime)
options = ' style = "filled" fillcolor = "lightgray" '
else:
label += "running"
options = ''
objects.add('pid{0} [label="{1}" shape="record" {2}];\n'.format(eprocess.UniqueProcessId,
label, options))
links.add("pid{0} -> pid{1} [];\n".format(eprocess.InheritedFromUniqueProcessId,
eprocess.UniqueProcessId))
## Now write the dot file
outfd.write("digraph processtree { \ngraph [rankdir = \"TB\"];\n")
for link in links:
outfd.write(link)
for item in objects:
outfd.write(item)
outfd.write("}")
def unified_output(self, data):
return TreeGrid([(self.offset_column(), Address),
("Name", str),
("PID", int),
("PPID", int),
("PDB", Address),
("Time Created", str),
("Time Exited", str)],
self.generator(data))
def generator(self, data):
for eprocess in data:
yield (0, [Address(eprocess.obj_offset),
str(eprocess.ImageFileName),
int(eprocess.UniqueProcessId),
int(eprocess.InheritedFromUniqueProcessId),
Address(eprocess.Pcb.DirectoryTableBase),
str(eprocess.CreateTime or ''),
str(eprocess.ExitTime or '')])
def render_text(self, outfd, data):
self.table_header(outfd, [(self.offset_column(), '#018x'),
('Name', '16'),
('PID', '>6'),
('PPID', '>6'),
('PDB', '[addrpad]'),
('Time created', '30'),
('Time exited', '30')
])
for eprocess in data:
self.table_row(outfd,
eprocess.obj_offset,
eprocess.ImageFileName,
eprocess.UniqueProcessId,
eprocess.InheritedFromUniqueProcessId,
eprocess.Pcb.DirectoryTableBase,
eprocess.CreateTime or '',
eprocess.ExitTime or '')
| gpl-2.0 | 4,041,244,974,097,400,300 | 38.658933 | 101 | 0.498567 | false |
jquacinella/pyIsbnScanner | spreadsheet.py | 1 | 1073 | import types
import gdata.spreadsheet.service
from config import *
def get_spreadsheet_client():
''' Prepares and returns the spreadsheet client.'''
spr_client = gdata.spreadsheet.service.SpreadsheetsService()
spr_client.ClientLogin(email, password)
return spr_client
def get_feed_from_spreadsheet(spr_client):
return spr_client.GetListFeed(spreadsheet_key, worksheet_id)
# def get_row_from_spreadsheet(isbn):
# if type(isbn) == types.IntType:
# isbn = str(isbn)
# for idx in range(len(feed.entry)):
# row = feed.entry[idx]
# if row.custom['isbn'].text == isbn:
# # Note: the plus two is for standard off-by-one indexing plus header row
# return (idx + 2, row)
def get_rows_from_spreadsheet():
for idx in range(len(feed.entry)):
yield (idx + 2, feed.entry[idx])
def update_row(row_id, column_id, value):
return spr_client.UpdateCell(row_id, column_id, value, spreadsheet_key, worksheet_id)
spr_client = get_spreadsheet_client()
feed = get_feed_from_spreadsheet(spr_client) | gpl-2.0 | -5,379,212,502,724,104,000 | 31.545455 | 89 | 0.67754 | false |
ali/bandcamp-dl | bandcamp-dl/Bandcamp.py | 1 | 2915 | from bs4 import BeautifulSoup
import requests
import jsobj
class Bandcamp:
def parse(self, url):
try:
r = requests.get(url)
except requests.exceptions.MissingSchema:
return None
if r.status_code is not 200:
return None
self.soup = BeautifulSoup(r.text, "lxml")
album = {
"tracks": [],
"title": "",
"artist": "",
"full": False,
"art": "",
"date": ""
}
album_meta = self.extract_album_meta_data(r)
album['artist'] = album_meta['artist']
album['title'] = album_meta['title']
album['date'] = album_meta['date']
for track in album_meta['tracks']:
track = self.get_track_meta_data(track)
album['tracks'].append(track)
album['full'] = self.all_tracks_available(album)
album['art'] = self.get_album_art()
return album
def all_tracks_available(self, album):
for track in album['tracks']:
if track['url'] is None:
return False
return True
def get_track_meta_data(self, track):
new_track = {}
if not (isinstance(track['file'], unicode) or isinstance(track['file'], str)):
if 'mp3-128' in track['file']:
new_track['url'] = track['file']['mp3-128']
else:
new_track['url'] = None
new_track['duration'] = track['duration']
new_track['track'] = track['track_num']
new_track['title'] = track['title']
return new_track
def extract_album_meta_data(self, request):
album = {}
embedData = self.get_embed_string_block(request)
block = request.text.split("var TralbumData = ")
stringBlock = block[1]
stringBlock = stringBlock.split("};")[0] + "};"
stringBlock = jsobj.read_js_object("var TralbumData = %s" % stringBlock)
album['title'] = embedData['EmbedData']['album_title']
album['artist'] = stringBlock['TralbumData']['artist']
album['tracks'] = stringBlock['TralbumData']['trackinfo']
album['date'] = stringBlock['TralbumData']['album_release_date'].split()[2]
return album
@staticmethod
def generate_album_url(artist, album):
return "http://{0}.bandcamp.com/album/{1}".format(artist, album)
def get_album_art(self):
try:
url = self.soup.find(id='tralbumArt').find_all('img')[0]['src']
return url
except:
pass
def get_embed_string_block(self, request):
embedBlock = request.text.split("var EmbedData = ")
embedStringBlock = embedBlock[1]
embedStringBlock = embedStringBlock.split("};")[0] + "};"
embedStringBlock = jsobj.read_js_object("var EmbedData = %s" % embedStringBlock)
return embedStringBlock
| unlicense | 6,864,020,420,458,705,000 | 27.578431 | 88 | 0.553688 | false |
uber/pyro | pyro/ops/einsum/__init__.py | 1 | 1486 | # Copyright (c) 2017-2019 Uber Technologies, Inc.
# SPDX-License-Identifier: Apache-2.0
import opt_einsum
from pyro.util import ignore_jit_warnings
_PATH_CACHE = {}
def contract_expression(equation, *shapes, **kwargs):
"""
Wrapper around :func:`opt_einsum.contract_expression` that optionally uses
Pyro's cheap optimizer and optionally caches contraction paths.
:param bool cache_path: whether to cache the contraction path.
Defaults to True.
"""
# memoize the contraction path
cache_path = kwargs.pop('cache_path', True)
if cache_path:
kwargs_key = tuple(kwargs.items())
key = equation, shapes, kwargs_key
if key in _PATH_CACHE:
return _PATH_CACHE[key]
expr = opt_einsum.contract_expression(equation, *shapes, **kwargs)
if cache_path:
_PATH_CACHE[key] = expr
return expr
def contract(equation, *operands, **kwargs):
"""
Wrapper around :func:`opt_einsum.contract` that optionally uses Pyro's
cheap optimizer and optionally caches contraction paths.
:param bool cache_path: whether to cache the contraction path.
Defaults to True.
"""
backend = kwargs.pop('backend', 'numpy')
out = kwargs.pop('out', None)
shapes = [tuple(t.shape) for t in operands]
with ignore_jit_warnings():
expr = contract_expression(equation, *shapes)
return expr(*operands, backend=backend, out=out)
__all__ = ['contract', 'contract_expression']
| apache-2.0 | 1,018,638,470,633,825,300 | 29.326531 | 78 | 0.666891 | false |
cwolferh/heat-scratch | heat/tests/test_environment.py | 1 | 42563 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os.path
import sys
import fixtures
import mock
from oslo_config import cfg
import six
from heat.common import environment_format
from heat.common import exception
from heat.engine import environment
from heat.engine import resources
from heat.engine.resources.aws.ec2 import instance
from heat.engine.resources.openstack.nova import server
from heat.engine import support
from heat.tests import common
from heat.tests import generic_resource
from heat.tests import utils
cfg.CONF.import_opt('environment_dir', 'heat.common.config')
class EnvironmentTest(common.HeatTestCase):
def setUp(self):
super(EnvironmentTest, self).setUp()
self.g_env = resources.global_env()
def test_load_old_parameters(self):
old = {u'a': u'ff', u'b': u'ss'}
expected = {u'parameters': old,
u'encrypted_param_names': [],
u'parameter_defaults': {},
u'event_sinks': [],
u'resource_registry': {u'resources': {}}}
env = environment.Environment(old)
self.assertEqual(expected, env.env_as_dict())
del(expected['encrypted_param_names'])
self.assertEqual(expected, env.user_env_as_dict())
def test_load_new_env(self):
new_env = {u'parameters': {u'a': u'ff', u'b': u'ss'},
u'encrypted_param_names': [],
u'parameter_defaults': {u'ff': 'new_def'},
u'event_sinks': [],
u'resource_registry': {u'OS::Food': u'fruity.yaml',
u'resources': {}}}
env = environment.Environment(new_env)
self.assertEqual(new_env, env.env_as_dict())
del(new_env['encrypted_param_names'])
self.assertEqual(new_env, env.user_env_as_dict())
def test_global_registry(self):
self.g_env.register_class('CloudX::Nova::Server',
generic_resource.GenericResource)
new_env = {u'parameters': {u'a': u'ff', u'b': u'ss'},
u'resource_registry': {u'OS::*': 'CloudX::*'}}
env = environment.Environment(new_env)
self.assertEqual('CloudX::Nova::Server',
env.get_resource_info('OS::Nova::Server',
'my_db_server').name)
def test_global_registry_many_to_one(self):
new_env = {u'parameters': {u'a': u'ff', u'b': u'ss'},
u'resource_registry': {u'OS::Nova::*': 'OS::Heat::None'}}
env = environment.Environment(new_env)
self.assertEqual('OS::Heat::None',
env.get_resource_info('OS::Nova::Server',
'my_db_server').name)
def test_global_registry_many_to_one_no_recurse(self):
new_env = {u'parameters': {u'a': u'ff', u'b': u'ss'},
u'resource_registry': {u'OS::*': 'OS::Heat::None'}}
env = environment.Environment(new_env)
self.assertEqual('OS::Heat::None',
env.get_resource_info('OS::Some::Name',
'my_db_server').name)
def test_map_one_resource_type(self):
new_env = {u'parameters': {u'a': u'ff', u'b': u'ss'},
u'resource_registry': {u'resources':
{u'my_db_server':
{u'OS::DBInstance': 'db.yaml'}}}}
env = environment.Environment(new_env)
info = env.get_resource_info('OS::DBInstance', 'my_db_server')
self.assertEqual('db.yaml', info.value)
def test_map_all_resources_of_type(self):
self.g_env.register_class('OS::Nova::FloatingIP',
generic_resource.GenericResource)
new_env = {u'parameters': {u'a': u'ff', u'b': u'ss'},
u'resource_registry':
{u'OS::Networking::FloatingIP': 'OS::Nova::FloatingIP',
u'OS::Loadbalancer': 'lb.yaml'}}
env = environment.Environment(new_env)
self.assertEqual('OS::Nova::FloatingIP',
env.get_resource_info('OS::Networking::FloatingIP',
'my_fip').name)
def test_resource_sort_order_len(self):
new_env = {u'resource_registry': {u'resources': {u'my_fip': {
u'OS::Networking::FloatingIP': 'ip.yaml'}}},
u'OS::Networking::FloatingIP': 'OS::Nova::FloatingIP'}
env = environment.Environment(new_env)
self.assertEqual('ip.yaml',
env.get_resource_info('OS::Networking::FloatingIP',
'my_fip').value)
def test_env_load(self):
new_env = {u'resource_registry': {u'resources': {u'my_fip': {
u'OS::Networking::FloatingIP': 'ip.yaml'}}}}
env = environment.Environment()
self.assertRaises(exception.EntityNotFound,
env.get_resource_info,
'OS::Networking::FloatingIP', 'my_fip')
env.load(new_env)
self.assertEqual('ip.yaml',
env.get_resource_info('OS::Networking::FloatingIP',
'my_fip').value)
def test_register_with_path(self):
yaml_env = '''
resource_registry:
test::one: a.yaml
resources:
res_x:
test::two: b.yaml
'''
env = environment.Environment(environment_format.parse(yaml_env))
self.assertEqual('a.yaml', env.get_resource_info('test::one').value)
self.assertEqual('b.yaml',
env.get_resource_info('test::two', 'res_x').value)
env2 = environment.Environment()
env2.register_class('test::one',
'a.yaml',
path=['test::one'])
env2.register_class('test::two',
'b.yaml',
path=['resources', 'res_x', 'test::two'])
self.assertEqual(env.env_as_dict(), env2.env_as_dict())
def test_constraints(self):
env = environment.Environment({})
first_constraint = object()
second_constraint = object()
env.register_constraint("constraint1", first_constraint)
env.register_constraint("constraint2", second_constraint)
self.assertIs(first_constraint, env.get_constraint("constraint1"))
self.assertIs(second_constraint, env.get_constraint("constraint2"))
self.assertIs(None, env.get_constraint("no_constraint"))
def test_constraints_registry(self):
constraint_content = '''
class MyConstraint(object):
pass
def constraint_mapping():
return {"constraint1": MyConstraint}
'''
plugin_dir = self.useFixture(fixtures.TempDir())
plugin_file = os.path.join(plugin_dir.path, 'test.py')
with open(plugin_file, 'w+') as ef:
ef.write(constraint_content)
self.addCleanup(sys.modules.pop, "heat.engine.plugins.test")
cfg.CONF.set_override('plugin_dirs', plugin_dir.path,
enforce_type=True)
env = environment.Environment({})
resources._load_global_environment(env)
self.assertEqual("MyConstraint",
env.get_constraint("constraint1").__name__)
self.assertIs(None, env.get_constraint("no_constraint"))
def test_constraints_registry_error(self):
constraint_content = '''
def constraint_mapping():
raise ValueError("oops")
'''
plugin_dir = self.useFixture(fixtures.TempDir())
plugin_file = os.path.join(plugin_dir.path, 'test.py')
with open(plugin_file, 'w+') as ef:
ef.write(constraint_content)
self.addCleanup(sys.modules.pop, "heat.engine.plugins.test")
cfg.CONF.set_override('plugin_dirs', plugin_dir.path,
enforce_type=True)
env = environment.Environment({})
error = self.assertRaises(ValueError,
resources._load_global_environment, env)
self.assertEqual("oops", six.text_type(error))
def test_constraints_registry_stevedore(self):
env = environment.Environment({})
resources._load_global_environment(env)
self.assertEqual("FlavorConstraint",
env.get_constraint("nova.flavor").__name__)
self.assertIs(None, env.get_constraint("no_constraint"))
def test_event_sinks(self):
env = environment.Environment(
{"event_sinks": [{"type": "zaqar-queue", "target": "myqueue"}]})
self.assertEqual([{"type": "zaqar-queue", "target": "myqueue"}],
env.user_env_as_dict()["event_sinks"])
sinks = env.get_event_sinks()
self.assertEqual(1, len(sinks))
self.assertEqual("myqueue", sinks[0]._target)
def test_event_sinks_load(self):
env = environment.Environment()
self.assertEqual([], env.get_event_sinks())
env.load(
{"event_sinks": [{"type": "zaqar-queue", "target": "myqueue"}]})
self.assertEqual([{"type": "zaqar-queue", "target": "myqueue"}],
env.user_env_as_dict()["event_sinks"])
class EnvironmentDuplicateTest(common.HeatTestCase):
scenarios = [
('same', dict(resource_type='test.yaml',
expected_equal=True)),
('diff_temp', dict(resource_type='not.yaml',
expected_equal=False)),
('diff_map', dict(resource_type='OS::Nova::Server',
expected_equal=False)),
('diff_path', dict(resource_type='a/test.yaml',
expected_equal=False)),
]
def setUp(self):
super(EnvironmentDuplicateTest, self).setUp(quieten_logging=False)
def test_env_load(self):
env_initial = {u'resource_registry': {
u'OS::Test::Dummy': 'test.yaml'}}
env = environment.Environment()
env.load(env_initial)
info = env.get_resource_info('OS::Test::Dummy', 'something')
replace_log = 'Changing %s from %s to %s' % ('OS::Test::Dummy',
'test.yaml',
self.resource_type)
self.assertNotIn(replace_log, self.LOG.output)
env_test = {u'resource_registry': {
u'OS::Test::Dummy': self.resource_type}}
env.load(env_test)
if self.expected_equal:
# should return exactly the same object.
self.assertIs(info, env.get_resource_info('OS::Test::Dummy',
'my_fip'))
self.assertNotIn(replace_log, self.LOG.output)
else:
self.assertIn(replace_log, self.LOG.output)
self.assertNotEqual(info,
env.get_resource_info('OS::Test::Dummy',
'my_fip'))
def test_env_register_while_get_resource_info(self):
env_test = {u'resource_registry': {
u'OS::Test::Dummy': self.resource_type}}
env = environment.Environment()
env.load(env_test)
env.get_resource_info('OS::Test::Dummy')
self.assertEqual({'OS::Test::Dummy': self.resource_type,
'resources': {}},
env.user_env_as_dict().get(
environment_format.RESOURCE_REGISTRY))
env_test = {u'resource_registry': {
u'resources': {u'test': {u'OS::Test::Dummy': self.resource_type}}}}
env.load(env_test)
env.get_resource_info('OS::Test::Dummy')
self.assertEqual({u'OS::Test::Dummy': self.resource_type,
'resources': {u'test': {u'OS::Test::Dummy':
self.resource_type}}},
env.user_env_as_dict().get(
environment_format.RESOURCE_REGISTRY))
class GlobalEnvLoadingTest(common.HeatTestCase):
def test_happy_path(self):
with mock.patch('glob.glob') as m_ldir:
m_ldir.return_value = ['/etc_etc/heat/environment.d/a.yaml']
env_dir = '/etc_etc/heat/environment.d'
env_content = '{"resource_registry": {}}'
env = environment.Environment({}, user_env=False)
with mock.patch('heat.engine.environment.open',
mock.mock_open(read_data=env_content),
create=True) as m_open:
environment.read_global_environment(env, env_dir)
m_ldir.assert_called_once_with(env_dir + '/*')
m_open.assert_called_once_with('%s/a.yaml' % env_dir)
def test_empty_env_dir(self):
with mock.patch('glob.glob') as m_ldir:
m_ldir.return_value = []
env_dir = '/etc_etc/heat/environment.d'
env = environment.Environment({}, user_env=False)
environment.read_global_environment(env, env_dir)
m_ldir.assert_called_once_with(env_dir + '/*')
def test_continue_on_ioerror(self):
"""Assert we get all files processed.
Assert we get all files processed even if there are processing
exceptions.
Test uses IOError as side effect of mock open.
"""
with mock.patch('glob.glob') as m_ldir:
m_ldir.return_value = ['/etc_etc/heat/environment.d/a.yaml',
'/etc_etc/heat/environment.d/b.yaml']
env_dir = '/etc_etc/heat/environment.d'
env_content = '{}'
env = environment.Environment({}, user_env=False)
with mock.patch('heat.engine.environment.open',
mock.mock_open(read_data=env_content),
create=True) as m_open:
m_open.side_effect = IOError
environment.read_global_environment(env, env_dir)
m_ldir.assert_called_once_with(env_dir + '/*')
expected = [mock.call('%s/a.yaml' % env_dir),
mock.call('%s/b.yaml' % env_dir)]
self.assertEqual(expected, m_open.call_args_list)
def test_continue_on_parse_error(self):
"""Assert we get all files processed.
Assert we get all files processed even if there are processing
exceptions.
Test checks case when env content is incorrect.
"""
with mock.patch('glob.glob') as m_ldir:
m_ldir.return_value = ['/etc_etc/heat/environment.d/a.yaml',
'/etc_etc/heat/environment.d/b.yaml']
env_dir = '/etc_etc/heat/environment.d'
env_content = '{@$%#$%'
env = environment.Environment({}, user_env=False)
with mock.patch('heat.engine.environment.open',
mock.mock_open(read_data=env_content),
create=True) as m_open:
environment.read_global_environment(env, env_dir)
m_ldir.assert_called_once_with(env_dir + '/*')
expected = [mock.call('%s/a.yaml' % env_dir),
mock.call('%s/b.yaml' % env_dir)]
self.assertEqual(expected, m_open.call_args_list)
def test_env_resources_override_plugins(self):
# assertion: any template resources in the global environment
# should override the default plugins.
# 1. set our own global test env
# (with a template resource that shadows a plugin)
g_env_content = '''
resource_registry:
"OS::Nova::Server": "file:///not_really_here.yaml"
'''
envdir = self.useFixture(fixtures.TempDir())
#
envfile = os.path.join(envdir.path, 'test.yaml')
with open(envfile, 'w+') as ef:
ef.write(g_env_content)
cfg.CONF.set_override('environment_dir', envdir.path,
enforce_type=True)
# 2. load global env
g_env = environment.Environment({}, user_env=False)
resources._load_global_environment(g_env)
# 3. assert our resource is in place.
self.assertEqual('file:///not_really_here.yaml',
g_env.get_resource_info('OS::Nova::Server').value)
def test_env_one_resource_disable(self):
# prove we can disable a resource in the global environment
g_env_content = '''
resource_registry:
"OS::Nova::Server":
'''
# 1. fake an environment file
envdir = self.useFixture(fixtures.TempDir())
envfile = os.path.join(envdir.path, 'test.yaml')
with open(envfile, 'w+') as ef:
ef.write(g_env_content)
cfg.CONF.set_override('environment_dir', envdir.path,
enforce_type=True)
# 2. load global env
g_env = environment.Environment({}, user_env=False)
resources._load_global_environment(g_env)
# 3. assert our resource is in now gone.
self.assertRaises(exception.EntityNotFound,
g_env.get_resource_info, 'OS::Nova::Server')
# 4. make sure we haven't removed something we shouldn't have
self.assertEqual(instance.Instance,
g_env.get_resource_info('AWS::EC2::Instance').value)
def test_env_multi_resources_disable(self):
# prove we can disable resources in the global environment
g_env_content = '''
resource_registry:
"AWS::*":
'''
# 1. fake an environment file
envdir = self.useFixture(fixtures.TempDir())
envfile = os.path.join(envdir.path, 'test.yaml')
with open(envfile, 'w+') as ef:
ef.write(g_env_content)
cfg.CONF.set_override('environment_dir', envdir.path,
enforce_type=True)
# 2. load global env
g_env = environment.Environment({}, user_env=False)
resources._load_global_environment(g_env)
# 3. assert our resources are now gone.
self.assertRaises(exception.EntityNotFound,
g_env.get_resource_info, 'AWS::EC2::Instance')
# 4. make sure we haven't removed something we shouldn't have
self.assertEqual(server.Server,
g_env.get_resource_info('OS::Nova::Server').value)
def test_env_user_cant_disable_sys_resource(self):
# prove a user can't disable global resources from the user environment
u_env_content = '''
resource_registry:
"AWS::*":
'''
# 1. load user env
u_env = environment.Environment()
u_env.load(environment_format.parse(u_env_content))
# 2. assert global resources are NOT gone.
self.assertEqual(
instance.Instance,
u_env.get_resource_info('AWS::EC2::Instance').value)
def test_env_ignore_files_starting_dot(self):
# prove we can disable a resource in the global environment
g_env_content = ''
# 1. fake an environment file
envdir = self.useFixture(fixtures.TempDir())
with open(os.path.join(envdir.path, 'a.yaml'), 'w+') as ef:
ef.write(g_env_content)
with open(os.path.join(envdir.path, '.test.yaml'), 'w+') as ef:
ef.write(g_env_content)
with open(os.path.join(envdir.path, 'b.yaml'), 'w+') as ef:
ef.write(g_env_content)
cfg.CONF.set_override('environment_dir', envdir.path,
enforce_type=True)
# 2. load global env
g_env = environment.Environment({}, user_env=False)
with mock.patch('heat.engine.environment.open',
mock.mock_open(read_data=g_env_content),
create=True) as m_open:
resources._load_global_environment(g_env)
# 3. assert that the file were ignored
expected = [mock.call('%s/a.yaml' % envdir.path),
mock.call('%s/b.yaml' % envdir.path)]
call_list = m_open.call_args_list
expected.sort()
call_list.sort()
self.assertEqual(expected, call_list)
class ChildEnvTest(common.HeatTestCase):
def test_params_flat(self):
new_params = {'foo': 'bar', 'tester': 'Yes'}
penv = environment.Environment()
expected = {'parameters': new_params,
'encrypted_param_names': [],
'parameter_defaults': {},
'event_sinks': [],
'resource_registry': {'resources': {}}}
cenv = environment.get_child_environment(penv, new_params)
self.assertEqual(expected, cenv.env_as_dict())
def test_params_normal(self):
new_params = {'parameters': {'foo': 'bar', 'tester': 'Yes'}}
penv = environment.Environment()
expected = {'parameter_defaults': {},
'encrypted_param_names': [],
'event_sinks': [],
'resource_registry': {'resources': {}}}
expected.update(new_params)
cenv = environment.get_child_environment(penv, new_params)
self.assertEqual(expected, cenv.env_as_dict())
def test_params_parent_overwritten(self):
new_params = {'parameters': {'foo': 'bar', 'tester': 'Yes'}}
parent_params = {'parameters': {'gone': 'hopefully'}}
penv = environment.Environment(env=parent_params)
expected = {'parameter_defaults': {},
'encrypted_param_names': [],
'event_sinks': [],
'resource_registry': {'resources': {}}}
expected.update(new_params)
cenv = environment.get_child_environment(penv, new_params)
self.assertEqual(expected, cenv.env_as_dict())
def test_registry_merge_simple(self):
env1 = {u'resource_registry': {u'OS::Food': u'fruity.yaml'}}
env2 = {u'resource_registry': {u'OS::Fruit': u'apples.yaml'}}
penv = environment.Environment(env=env1)
cenv = environment.get_child_environment(penv, env2)
rr = cenv.user_env_as_dict()['resource_registry']
self.assertIn('OS::Food', rr)
self.assertIn('OS::Fruit', rr)
def test_registry_merge_favor_child(self):
env1 = {u'resource_registry': {u'OS::Food': u'carrots.yaml'}}
env2 = {u'resource_registry': {u'OS::Food': u'apples.yaml'}}
penv = environment.Environment(env=env1)
cenv = environment.get_child_environment(penv, env2)
res = cenv.get_resource_info('OS::Food')
self.assertEqual('apples.yaml', res.value)
def test_item_to_remove_simple(self):
env = {u'resource_registry': {u'OS::Food': u'fruity.yaml'}}
penv = environment.Environment(env)
victim = penv.get_resource_info('OS::Food', resource_name='abc')
self.assertIsNotNone(victim)
cenv = environment.get_child_environment(penv, None,
item_to_remove=victim)
self.assertRaises(exception.EntityNotFound,
cenv.get_resource_info,
'OS::Food', resource_name='abc')
self.assertNotIn('OS::Food',
cenv.user_env_as_dict()['resource_registry'])
# make sure the parent env is unaffected
innocent = penv.get_resource_info('OS::Food', resource_name='abc')
self.assertIsNotNone(innocent)
def test_item_to_remove_complex(self):
env = {u'resource_registry': {u'OS::Food': u'fruity.yaml',
u'resources': {u'abc': {
u'OS::Food': u'nutty.yaml'}}}}
penv = environment.Environment(env)
# the victim we want is the most specific one.
victim = penv.get_resource_info('OS::Food', resource_name='abc')
self.assertEqual(['resources', 'abc', 'OS::Food'], victim.path)
cenv = environment.get_child_environment(penv, None,
item_to_remove=victim)
res = cenv.get_resource_info('OS::Food', resource_name='abc')
self.assertEqual(['OS::Food'], res.path)
rr = cenv.user_env_as_dict()['resource_registry']
self.assertIn('OS::Food', rr)
self.assertNotIn('OS::Food', rr['resources']['abc'])
# make sure the parent env is unaffected
innocent2 = penv.get_resource_info('OS::Food', resource_name='abc')
self.assertEqual(['resources', 'abc', 'OS::Food'], innocent2.path)
def test_item_to_remove_none(self):
env = {u'resource_registry': {u'OS::Food': u'fruity.yaml'}}
penv = environment.Environment(env)
victim = penv.get_resource_info('OS::Food', resource_name='abc')
self.assertIsNotNone(victim)
cenv = environment.get_child_environment(penv, None)
res = cenv.get_resource_info('OS::Food', resource_name='abc')
self.assertIsNotNone(res)
def test_drill_down_to_child_resource(self):
env = {
u'resource_registry': {
u'OS::Food': u'fruity.yaml',
u'resources': {
u'a': {
u'OS::Fruit': u'apples.yaml',
u'hooks': 'pre-create',
},
u'nested': {
u'b': {
u'OS::Fruit': u'carrots.yaml',
},
u'nested_res': {
u'hooks': 'pre-create',
}
}
}
}
}
penv = environment.Environment(env)
cenv = environment.get_child_environment(
penv, None, child_resource_name=u'nested')
registry = cenv.user_env_as_dict()['resource_registry']
resources = registry['resources']
self.assertIn('nested_res', resources)
self.assertIn('hooks', resources['nested_res'])
self.assertIsNotNone(
cenv.get_resource_info('OS::Food', resource_name='abc'))
self.assertRaises(exception.EntityNotFound,
cenv.get_resource_info,
'OS::Fruit', resource_name='a')
res = cenv.get_resource_info('OS::Fruit', resource_name='b')
self.assertIsNotNone(res)
self.assertEqual(u'carrots.yaml', res.value)
def test_drill_down_non_matching_wildcard(self):
env = {
u'resource_registry': {
u'resources': {
u'nested': {
u'c': {
u'OS::Fruit': u'carrots.yaml',
u'hooks': 'pre-create',
},
},
u'*_doesnt_match_nested': {
u'nested_res': {
u'hooks': 'pre-create',
},
}
}
}
}
penv = environment.Environment(env)
cenv = environment.get_child_environment(
penv, None, child_resource_name=u'nested')
registry = cenv.user_env_as_dict()['resource_registry']
resources = registry['resources']
self.assertIn('c', resources)
self.assertNotIn('nested_res', resources)
res = cenv.get_resource_info('OS::Fruit', resource_name='c')
self.assertIsNotNone(res)
self.assertEqual(u'carrots.yaml', res.value)
def test_drill_down_matching_wildcard(self):
env = {
u'resource_registry': {
u'resources': {
u'nested': {
u'c': {
u'OS::Fruit': u'carrots.yaml',
u'hooks': 'pre-create',
},
},
u'nest*': {
u'nested_res': {
u'hooks': 'pre-create',
},
}
}
}
}
penv = environment.Environment(env)
cenv = environment.get_child_environment(
penv, None, child_resource_name=u'nested')
registry = cenv.user_env_as_dict()['resource_registry']
resources = registry['resources']
self.assertIn('c', resources)
self.assertIn('nested_res', resources)
res = cenv.get_resource_info('OS::Fruit', resource_name='c')
self.assertIsNotNone(res)
self.assertEqual(u'carrots.yaml', res.value)
def test_drill_down_prefer_exact_match(self):
env = {
u'resource_registry': {
u'resources': {
u'*esource': {
u'hooks': 'pre-create',
},
u'res*': {
u'hooks': 'pre-create',
},
u'resource': {
u'OS::Fruit': u'carrots.yaml',
u'hooks': 'pre-update',
},
u'resource*': {
u'hooks': 'pre-create',
},
u'*resource': {
u'hooks': 'pre-create',
},
u'*sour*': {
u'hooks': 'pre-create',
},
}
}
}
penv = environment.Environment(env)
cenv = environment.get_child_environment(
penv, None, child_resource_name=u'resource')
registry = cenv.user_env_as_dict()['resource_registry']
resources = registry['resources']
self.assertEqual(u'carrots.yaml', resources[u'OS::Fruit'])
self.assertEqual('pre-update', resources[u'hooks'])
class ResourceRegistryTest(common.HeatTestCase):
def test_resources_load(self):
resources = {
u'pre_create': {
u'OS::Fruit': u'apples.yaml',
u'hooks': 'pre-create',
},
u'pre_update': {
u'hooks': 'pre-update',
},
u'both': {
u'hooks': ['pre-create', 'pre-update'],
},
u'b': {
u'OS::Food': u'fruity.yaml',
},
u'nested': {
u'res': {
u'hooks': 'pre-create',
},
},
}
registry = environment.ResourceRegistry(None, {})
registry.load({'resources': resources})
self.assertIsNotNone(registry.get_resource_info(
'OS::Fruit', resource_name='pre_create'))
self.assertIsNotNone(registry.get_resource_info(
'OS::Food', resource_name='b'))
resources = registry.as_dict()['resources']
self.assertEqual('pre-create',
resources['pre_create']['hooks'])
self.assertEqual('pre-update',
resources['pre_update']['hooks'])
self.assertEqual(['pre-create', 'pre-update'],
resources['both']['hooks'])
self.assertEqual('pre-create',
resources['nested']['res']['hooks'])
def test_load_registry_invalid_hook_type(self):
resources = {
u'resources': {
u'a': {
u'hooks': 'invalid-type',
}
}
}
registry = environment.ResourceRegistry(None, {})
msg = ('Invalid hook type "invalid-type" for resource breakpoint, '
'acceptable hook types are: (\'pre-create\', \'pre-update\', '
'\'pre-delete\', \'post-create\', \'post-update\', '
'\'post-delete\')')
ex = self.assertRaises(exception.InvalidBreakPointHook,
registry.load, {'resources': resources})
self.assertEqual(msg, six.text_type(ex))
def test_list_type_validation_invalid_support_status(self):
registry = environment.ResourceRegistry(None, {})
ex = self.assertRaises(exception.Invalid,
registry.get_types,
support_status='junk')
msg = ('Invalid support status and should be one of %s' %
six.text_type(support.SUPPORT_STATUSES))
self.assertIn(msg, ex.message)
def test_list_type_validation_valid_support_status(self):
registry = environment.ResourceRegistry(None, {})
for status in support.SUPPORT_STATUSES:
self.assertEqual([],
registry.get_types(support_status=status))
def test_list_type_find_by_status(self):
registry = resources.global_env().registry
types = registry.get_types(support_status=support.UNSUPPORTED)
self.assertIn('ResourceTypeUnSupportedLiberty', types)
self.assertNotIn('GenericResourceType', types)
def test_list_type_find_by_status_none(self):
registry = resources.global_env().registry
types = registry.get_types(support_status=None)
self.assertIn('ResourceTypeUnSupportedLiberty', types)
self.assertIn('GenericResourceType', types)
def test_list_type_with_name(self):
registry = resources.global_env().registry
types = registry.get_types(type_name='ResourceType*')
self.assertIn('ResourceTypeUnSupportedLiberty', types)
self.assertNotIn('GenericResourceType', types)
def test_list_type_with_name_none(self):
registry = resources.global_env().registry
types = registry.get_types(type_name=None)
self.assertIn('ResourceTypeUnSupportedLiberty', types)
self.assertIn('GenericResourceType', types)
def test_list_type_with_is_available_exception(self):
registry = resources.global_env().registry
self.patchobject(
generic_resource.GenericResource,
'is_service_available',
side_effect=exception.ClientNotAvailable(client_name='generic'))
types = registry.get_types(utils.dummy_context())
self.assertNotIn('GenericResourceType', types)
def test_list_type_with_invalid_type_name(self):
registry = resources.global_env().registry
types = registry.get_types(type_name="r'[^\+]'")
self.assertEqual([], types)
def test_list_type_with_version(self):
registry = resources.global_env().registry
types = registry.get_types(version='5.0.0')
self.assertIn('ResourceTypeUnSupportedLiberty', types)
self.assertNotIn('ResourceTypeSupportedKilo', types)
def test_list_type_with_version_none(self):
registry = resources.global_env().registry
types = registry.get_types(version=None)
self.assertIn('ResourceTypeUnSupportedLiberty', types)
self.assertIn('ResourceTypeSupportedKilo', types)
def test_list_type_with_version_invalid(self):
registry = resources.global_env().registry
types = registry.get_types(version='invalid')
self.assertEqual([], types)
class HookMatchTest(common.HeatTestCase):
scenarios = [(hook_type, {'hook': hook_type}) for hook_type in
environment.HOOK_TYPES]
def test_plain_matches(self):
other_hook = next(hook for hook in environment.HOOK_TYPES
if hook != self.hook)
resources = {
u'a': {
u'OS::Fruit': u'apples.yaml',
u'hooks': [self.hook, other_hook]
},
u'b': {
u'OS::Food': u'fruity.yaml',
},
u'nested': {
u'res': {
u'hooks': self.hook,
},
},
}
registry = environment.ResourceRegistry(None, {})
registry.load({
u'OS::Fruit': u'apples.yaml',
'resources': resources})
self.assertTrue(registry.matches_hook('a', self.hook))
self.assertFalse(registry.matches_hook('b', self.hook))
self.assertFalse(registry.matches_hook('OS::Fruit', self.hook))
self.assertFalse(registry.matches_hook('res', self.hook))
self.assertFalse(registry.matches_hook('unknown', self.hook))
def test_wildcard_matches(self):
other_hook = next(hook for hook in environment.HOOK_TYPES
if hook != self.hook)
resources = {
u'prefix_*': {
u'hooks': self.hook
},
u'*_suffix': {
u'hooks': self.hook
},
u'*': {
u'hooks': other_hook
},
}
registry = environment.ResourceRegistry(None, {})
registry.load({'resources': resources})
self.assertTrue(registry.matches_hook('prefix_', self.hook))
self.assertTrue(registry.matches_hook('prefix_some', self.hook))
self.assertFalse(registry.matches_hook('some_prefix', self.hook))
self.assertTrue(registry.matches_hook('_suffix', self.hook))
self.assertTrue(registry.matches_hook('some_suffix', self.hook))
self.assertFalse(registry.matches_hook('_suffix_blah', self.hook))
self.assertTrue(registry.matches_hook('some_prefix', other_hook))
self.assertTrue(registry.matches_hook('_suffix_blah', other_hook))
def test_hook_types(self):
resources = {
u'hook': {
u'hooks': self.hook
},
u'not-hook': {
u'hooks': [hook for hook in environment.HOOK_TYPES if hook !=
self.hook]
},
u'all': {
u'hooks': environment.HOOK_TYPES
},
}
registry = environment.ResourceRegistry(None, {})
registry.load({'resources': resources})
self.assertTrue(registry.matches_hook('hook', self.hook))
self.assertFalse(registry.matches_hook('not-hook', self.hook))
self.assertTrue(registry.matches_hook('all', self.hook))
class ActionRestrictedTest(common.HeatTestCase):
def test_plain_matches(self):
resources = {
u'a': {
u'OS::Fruit': u'apples.yaml',
u'restricted_actions': [u'update', u'replace'],
},
u'b': {
u'OS::Food': u'fruity.yaml',
},
u'nested': {
u'res': {
u'restricted_actions': 'update',
},
},
}
registry = environment.ResourceRegistry(None, {})
registry.load({
u'OS::Fruit': u'apples.yaml',
'resources': resources})
self.assertIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('a'))
self.assertNotIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('b'))
self.assertNotIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('OS::Fruit'))
self.assertNotIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('res'))
self.assertNotIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('unknown'))
def test_wildcard_matches(self):
resources = {
u'prefix_*': {
u'restricted_actions': 'update',
},
u'*_suffix': {
u'restricted_actions': 'update',
},
u'*': {
u'restricted_actions': 'replace',
},
}
registry = environment.ResourceRegistry(None, {})
registry.load({'resources': resources})
self.assertIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('prefix_'))
self.assertIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('prefix_some'))
self.assertNotIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('some_prefix'))
self.assertIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('_suffix'))
self.assertIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('some_suffix'))
self.assertNotIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('_suffix_blah'))
self.assertIn(environment.REPLACE,
registry.get_rsrc_restricted_actions('some_prefix'))
self.assertIn(environment.REPLACE,
registry.get_rsrc_restricted_actions('_suffix_blah'))
def test_restricted_action_types(self):
resources = {
u'update': {
u'restricted_actions': 'update',
},
u'replace': {
u'restricted_actions': 'replace',
},
u'all': {
u'restricted_actions': ['update', 'replace'],
},
}
registry = environment.ResourceRegistry(None, {})
registry.load({'resources': resources})
self.assertIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('update'))
self.assertNotIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('replace'))
self.assertIn(environment.REPLACE,
registry.get_rsrc_restricted_actions('replace'))
self.assertNotIn(environment.REPLACE,
registry.get_rsrc_restricted_actions('update'))
self.assertIn(environment.UPDATE,
registry.get_rsrc_restricted_actions('all'))
self.assertIn(environment.REPLACE,
registry.get_rsrc_restricted_actions('all'))
| apache-2.0 | 8,367,544,650,527,950,000 | 39.53619 | 79 | 0.53859 | false |
lingthio/signinghub_api | example_app/example_app.py | 1 | 6258 | import json
from flask import Flask, request, current_app, render_template, redirect, url_for
from signinghub_api import SigningHubAPI
# Create a web application with Flask
app = Flask(__name__)
# Copy local_settings.py from local_settings_example.py
# Edit local_settings.py to reflect your CLIENT_ID and CLIENT_SECRET
app.config.from_pyfile('local_settings.py') # Read example_app.local_settings.py
# Initialize the SigningHub API wrapper
signinghub_api = SigningHubAPI(
app.config.get('SIGNINGHUB_CLIENT_ID'),
app.config.get('SIGNINGHUB_CLIENT_SECRET'),
app.config.get('SIGNINGHUB_USERNAME'),
app.config.get('SIGNINGHUB_PASSWORD'),
app.config.get('SIGNINGHUB_SCOPE')
)
# Retrieve config settings from local_settings.py
signinghub_library_document_id = app.config.get('SIGNINGHUB_LIBRARY_DOCUMENT_ID')
signinghub_template_name = app.config.get('SIGNINGHUB_TEMPLATE_NAME')
recipient_user_name = app.config.get('RECIPIENT_USER_NAME')
recipient_user_email = app.config.get('RECIPIENT_USER_EMAIL')
recipient_field_name = app.config.get('RECIPIENT_FIELD_NAME')
recipient_field_value = app.config.get('RECIPIENT_FIELD_VALUE')
# Display the home page
@app.route('/')
def home_page():
access_token = request.args.get('token')
# Render the home page
return render_template('home.html',
access_token=access_token)
@app.route('/new_token')
def new_token():
access_token = signinghub_api.get_access_token()
# Show error message if needed
if signinghub_api.last_error_message:
return render_template('show_error_message.html',
access_token=access_token,
last_function_name=signinghub_api.last_function_name,
last_error_message=signinghub_api.last_error_message)
# Redirect to home page
return redirect(url_for('home_page')+'?token='+access_token)
# Retrieve and render a list of Adobe Sign Library Documents
@app.route('/show_packages')
def show_packages():
# Get access token from the URL query string
access_token = request.args.get('token')
# signinghub_api.delete_package(access_token, 201080)
# Use SigningHubAPI to retrieve a list of library documents
if access_token:
packages = signinghub_api.get_packages(access_token)
else:
packages = []
for package in packages:
print(json.dumps(package, indent=4))
# Show error message if needed
if signinghub_api.last_error_message:
return render_template('show_error_message.html',
access_token=access_token,
last_function_name=signinghub_api.last_function_name,
last_error_message=signinghub_api.last_error_message)
# Render the list of documents
return render_template('show_packages.html',
access_token=access_token,
packages=packages)
# Create and render an Adobe Sign Widget
@app.route('/show_iframe')
def show_iframe():
# Get access token from the URL query string
access_token = request.args.get('token')
if not access_token: return redirect('/')
# Create a package
package_name = '2017 Contract - '+recipient_user_name+' - '+recipient_user_email
package_id = signinghub_api.add_package(access_token, package_name)
# Add a document from the document library
if package_id:
document_id = signinghub_api.upload_document_from_library(access_token, package_id, signinghub_library_document_id)
# Rename document
if document_id:
document_name = package_name
success = signinghub_api.rename_document(access_token, package_id, document_id, document_name)
# Add a template
if success:
template_name = signinghub_template_name
success = signinghub_api.apply_workflow_template(access_token, package_id, document_id, template_name)
# print fields, so that we can determine the name of the text field
if success:
fields = signinghub_api.get_document_fields(access_token, package_id, document_id)
print('Fields:', json.dumps(fields, indent=4))
# Pre-fill the text field
success = signinghub_api.update_textbox_field(access_token, package_id, document_id,
fields, recipient_field_name, recipient_field_value)
# Add signer
if success:
success = signinghub_api.update_workflow_user(access_token, package_id, recipient_user_email, recipient_user_name)
# Share Package
if success:
success = signinghub_api.share_document(access_token, package_id)
# Show error message if needed
if signinghub_api.last_error_message:
return render_template('show_error_message.html',
access_token=access_token,
last_function_name=signinghub_api.last_function_name,
last_error_message=signinghub_api.last_error_message)
# Render the IFrame with the document for signing
return render_template('show_iframe.html',
access_token=access_token,
package_id=package_id,
user_email=recipient_user_email)
# SigningHub Callback, called after a user finishes the IFrame
@app.route('/signinghub/callback') # Must match SigningHub's Application call-back URL setting
def signinghub_callback():
# Retrieve callback info from the query parameters
access_token = request.args.get('token')
package_id = request.args.get('document_id') # legacy parameter name. It really points to the Package.
language_code = request.args.get('language')
user_email = request.args.get('user_email')
# Render a finished message
return render_template('finished.html',
access_token=access_token,
package_id=package_id,
language_code=language_code,
user_email=user_email)
| mit | 5,125,447,170,131,130,000 | 39.901961 | 130 | 0.642378 | false |
SeiryuZ/magnet | magnet/apps/users/forms.py | 1 | 2327 | from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import User
class UserCreationForm(forms.ModelForm):
"""A form for creating new users. Includes all the required
fields, plus a repeated password."""
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
same_number = forms.BooleanField(label=_("Copy number for whatsapp"))
class Meta:
model = User
fields = ('email', 'gender', 'mobile_number', 'same_number',
'whatsapp_number', 'pk_number', 'previous_university', 'type')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-lg-2'
self.helper.field_class = 'col-lg-8'
self.helper.add_input(Submit('submit', 'Submit'))
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Password tidak sesuai")
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
same_number = forms.BooleanField(label=_("Copy number for whatsapp"))
class Meta:
model = User
fields = ('email', 'gender', 'mobile_number', 'same_number',
'whatsapp_number', 'pk_number', 'previous_university', 'type')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-lg-2'
self.helper.field_class = 'col-lg-8'
self.helper.add_input(Submit('submit', 'Submit'))
| mit | 884,519,156,442,182,800 | 37.783333 | 90 | 0.643747 | false |
tencrance/cool-config | python_tricks/factory_method.py | 1 | 3265 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2017/11/15 14:57
# @Author : Zhiwei Yang
# @File : factory_method.py.py
"""
工厂方法集中的在一个地方创建对象,使对象的跟踪变得更容易。
工厂方法模式可以帮助我们将产品的实现从使用中解耦。如果增加产品或者改变产品的实现,Creator 并不会收到影响。
使用工厂方法模式的另一个优点是在系统中加入新产品时,无须修改抽象工厂和抽象产品提供的接口,无须修改客户端,
也无须修改其他的具体工厂和具体产品,而只要添加一个具体工厂和具体产品就可以了。这样,系统的可扩展性也就变得非常好,
完全符合“开闭原则”。
"""
class Pizza:
name = None
dough = None
sauce = None
toppings = []
def prepare(self):
print("preparing %s" % self.name)
print("Tossing dough...")
print("Adding sauce...")
print("Adding toppings: ")
for topping in self.toppings:
print(" %s"% topping)
def bake(self):
print("Bake for 25 minutes at 350.")
def cut(self):
print("Cutting the pizza into diagonal slices")
def box(self):
print("Place pizza in official PizzaStore box")
def __str__(self):
return self.name
class PizzaStore:
def create_pizza(self,pizza_type):
# 每个需要子类实现的方法都会抛出NotImplementedError
# 我们也可以把 PizzaStore 的 metaclass 设置成 abc.ABCMeta
# 这样的话,这个类就是真正的抽象基类
raise NotImplementedError()
def order_pizza(self,pizza_type):
pizza = self.create_pizza(pizza_type)
# 一旦我们有了一个 pizza,需要做一些准备(擀面皮、加佐料),然后烘烤、切片、装盒
pizza.prepare()
pizza.bake()
pizza.cut()
pizza.box()
return pizza
class NYStyleCheesePizza(Pizza):
name = "NY Style Sauce and Cheese Pizza"
dough = "Thin Crust Dough"
sauce = "Marinara Sauce"
toppings = ["Grated", "Reggiano", "Cheese"]
class ChicagoCheesePizza(Pizza):
name = "Chicago Style Deep Dish Cheese Pizza"
dough = "Extra Thick Crust Dough"
sauce = "Plum Tomato Sauce"
toppings = ["Shredded", "Mozzarella", "Cheese"]
def cut(self):
print("Cutting the pizza into square slices")
class NYStylePizzaStore(PizzaStore):
def create_pizza(self,pizza_type):
# 根据 pizza 类型,我们实例化正确的具体类,然后将其赋值给 pizza 实例变量
if pizza_type == "cheese":
pizza = NYStyleCheesePizza()
return pizza
class ChicagoCheeseStore(PizzaStore):
def create_pizza(self,pizza_type):
if pizza_type == "cheese":
pizza = ChicagoCheesePizza()
return pizza
def main():
nystore = NYStylePizzaStore()
pizza = nystore.order_pizza("cheese")
print("goodspeed ordered a %s" % pizza)
print("*" * 10)
chicagostore = ChicagoCheeseStore()
pizza_chicago = chicagostore.order_pizza("cheese")
print("chicago ordered a %s" % pizza_chicago)
print("*" * 10 )
if __name__ == '__main__':
main()
| mit | 6,386,572,594,155,726,000 | 22.918919 | 57 | 0.629755 | false |
bat-serjo/vivisect | envi/radixtree.py | 3 | 2027 | '''
simple/fast python impl of a couple radix/prefix trees.
'''
import threading
class RadixIntTree:
'''
An integer indexed suffix tree.
'''
def __init__(self, bits=32):
# each node is [ left, right, answers ]
self.precount = 0
self.bitwidth = bits
self.treelock = threading.Lock()
self.treeroot = [None, None, None]
def addIntPrefix(self, value, bits, obj):
'''
Add an object to the prefix tree based on a value/bits pair.
( NOTE: most common use case is ipv4/ipv6 subnetting )
'''
with self.treelock:
self.precount += 1
shiftmax = self.bitwidth - 1
bits = [(value >> (shiftmax - i)) & 1 for i in range(bits)]
curnode = self.treeroot
for bit in bits:
nextnode = curnode[bit]
if nextnode is None:
nextnode = [None, None, None]
curnode[bit] = nextnode
curnode = nextnode
curnode[2] = obj
def getIntPrefixes(self, value):
'''
Retrieve a yield generator which returns the previously
inserted objects in the prefix tree (best match last).
'''
shiftmax = self.bitwidth - 1
bits = [(value >> (shiftmax - i)) & 1 for i in range(self.bitwidth)]
curnode = self.treeroot
for bit in bits:
curnode = curnode[bit]
if curnode is None:
break
obj = curnode[2]
if obj is not None:
yield obj
def getIntLongestPrefix(self, value):
shiftmax = self.bitwidth - 1
bits = [(value >> (shiftmax - i)) & 1 for i in range(self.bitwidth)]
best = None
curnode = self.treeroot
for bit in bits:
curnode = curnode[bit]
if curnode is None:
break
obj = curnode[2]
if obj is not None:
best = obj
return best
| apache-2.0 | -5,749,332,831,248,389,000 | 26.391892 | 76 | 0.517514 | false |
gregoil/rotest | tests/core/test_shell.py | 1 | 5847 | """Test Rotest's TestCase class behavior."""
# pylint: disable=missing-docstring,unused-argument,protected-access
# pylint: disable=no-member,no-self-use,too-many-public-methods,invalid-name
from __future__ import absolute_import
from rotest.core import request, BlockInput
from rotest.management.models.ut_resources import DemoResource
from rotest.management.models.ut_models import DemoResourceData
from rotest.management.utils.shell import run_test, create_result
from tests.core.utils import (BasicRotestUnitTest, MockCase, MockBlock,
MockFlow, MockTestSuite)
RESOURCE_NAME = 'available_resource1'
class TempResourceCase(MockCase):
"""Inherit class and override resources requests."""
__test__ = False
resources = (request('test_resource', DemoResource, name=RESOURCE_NAME),)
def test_method(self):
self.assertEqual(self.test_resource.name, RESOURCE_NAME)
class TempResourceSuite(MockTestSuite):
"""Inherit class and override resources requests."""
__test__ = False
components = [TempResourceCase]
class TempResourceBlock(MockBlock):
"""Inherit class and override resources requests."""
__test__ = False
test_resource = BlockInput()
def test_method(self):
self.assertEqual(self.test_resource.name, RESOURCE_NAME)
class TempResourceFlow(MockFlow):
"""Inherit class and override resources requests."""
__test__ = False
resources = (request('test_resource', DemoResource, name=RESOURCE_NAME),)
blocks = [TempResourceBlock]
class TestShell(BasicRotestUnitTest):
"""Test Rotest shell functionality."""
fixtures = ['resource_ut.json']
DEMO_RESOURCE_NAME = 'test_resource'
def setUp(self):
create_result()
def test_case_supplying_config(self):
config = {'some': 'value'}
test = run_test(TempResourceCase, config=config)
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
self.assertEqual(test.config, config, 'Test ran with the wrong config')
def test_case_supplying_resource_via_kwargs(self):
resource = DemoResource(
data=DemoResourceData.objects.get(name=RESOURCE_NAME))
test = run_test(TempResourceCase, test_resource=resource)
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
self.assertEqual(test.test_resource, resource,
"Test didn't get the supplied resource")
def test_case_not_supplying_any_resource(self):
test = run_test(TempResourceCase)
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
def test_block_supplying_config(self):
config = {'some': 'value'}
resource = DemoResource(
data=DemoResourceData.objects.get(name=RESOURCE_NAME))
test = run_test(TempResourceBlock, config=config,
test_resource=resource)
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
self.assertEqual(test.config, config, 'Test ran with the wrong config')
def test_block_supplying_resource_via_kwargs(self):
resource = DemoResource(
data=DemoResourceData.objects.get(name=RESOURCE_NAME))
test = run_test(TempResourceBlock, test_resource=resource)
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
self.assertEqual(test.test_resource, resource,
"Test didn't get the supplied resource")
def test_block_not_supplying_any_resource(self):
with self.assertRaises(AttributeError):
run_test(TempResourceBlock)
def test_flow_supplying_config(self):
config = {'some': 'value'}
test = run_test(TempResourceFlow, config=config)
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
self.assertEqual(test.config, config, "Test ran with the wrong config")
def test_flow_supplying_resource_via_kwargs(self):
resource = DemoResource(
data=DemoResourceData.objects.get(name=RESOURCE_NAME))
test = run_test(TempResourceFlow, test_resource=resource)
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
self.assertEqual(test.test_resource, resource,
"Test didn't get the supplied resource")
def test_flow_not_supplying_any_resource(self):
test = run_test(TempResourceFlow)
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
def test_suite_supplying_config(self):
config = {'some': 'value'}
test = run_test(TempResourceSuite, config=config)._tests[0]
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
self.assertEqual(test.config, config, "Test ran with the wrong config")
def test_suite_supplying_resource_via_kwargs(self):
resource = DemoResource(
data=DemoResourceData.objects.get(name=RESOURCE_NAME))
test = run_test(TempResourceSuite, test_resource=resource)._tests[0]
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
self.assertEqual(test.test_resource, resource,
"Test didn't get the supplied resource")
def test_suite_not_supplying_any_resource(self):
test = run_test(TempResourceSuite)._tests[0]
self.assertTrue(test.data.success,
'Case failed when it should have succeeded')
| mit | -8,135,696,172,237,661,000 | 35.773585 | 79 | 0.654524 | false |
sergpolly/GlycoMadness | SILAC_stage1b_pull_proteins_NCBI.py | 1 | 5077 | import os
import sys
from Bio import Seq
from Bio import SeqIO
from Bio import SeqRecord
import pandas as pd
import numpy as np
import ms_module as ms
import re
############################
from Bio import Entrez
from Bio import SeqIO
from StringIO import StringIO
import time
from urllib2 import HTTPError # for Python 2
import argparse
# do some arguments parsing to make the script looks civilized ...
parser = argparse.ArgumentParser()
parser.add_argument("-f","--raw_fetch", help="speicfy input data fname (with fetchid column(!), with/without path)",required=True)
# we don't need spectrum file for downloading proteins, it is too redundant for that purpose ...
parser.add_argument("--verbose", help="verbose output", action="store_true")
parser.add_argument("--prefix", help="specify common part of the path for peptide and spectrum files")
parser.add_argument("--email", help="Provide your email for NCBI servers abuse-feedback")
args = parser.parse_args()
# print args
###############################################
if args.verbose:
print "Verbose output is to follow ...\n\n"
###############################################
if args.prefix is not None:
raw_info_with_fetch_fname = os.path.join( args.prefix, args.raw_fetch )
else:
raw_info_with_fetch_fname = args.raw_fetch
# get the common path for later use ...
raw_path = os.path.dirname(raw_info_with_fetch_fname)
#
# don'r forget to provide you email
Entrez.email = args.email if args.email else "your_email@mail_server.com"
#
# peptides_with_fetch.csv
# raw_info_with_fetch_fname
# raw_info_with_fetch
raw_info_with_fetch = pd.read_csv(raw_info_with_fetch_fname)
assert 'fetchid' in raw_info_with_fetch.columns
############################################
# columns that needs to be delivered ... #
############################################
# A gsites, 1 per line
# B pept, 1 per line
# B1 enzyme, G or T, derive from 'Biological sample category', like this: {'TrypsinSample1':'T','GluC_Sample2':'G'}
# C peptide_start, 1 per line accordingly
# D all_uids, REPLACE WITH col:H
# E prot_seq, try to get those from NCBI, not from UniProt ...
# F protein, ??? sequence, name or what???
# G uid_max, UID for major form instead or something like that ...
# H prot_name, parsed out human-readable name from 'Protein Name'
# H1 gene_name, parsed out GN=xxx from 'Protein Name'
# I uniq_peptide_count, discrad that column ...
# J pept_probability, output number not the string - this would be the criteria
# K gsites_predicted, OK
# L gsites_predicted_number, OK
# M gsite_start, beware of 0 or 1 type of indexing ...
# N,O,P - gsites AAs in separate columns
# M1, NOP combined, gsite sequence basically!
# Q signal, from GeneBank record on the protein, simply Y,N on whether there is a 'Signal' in gb.
# R signal_location, location of the signal from Q
# S tm_span, Y,N just for the fact of having TM span as a protein feature.
#
#
print
print "Posting and fetching genebank records corresponding to the available FetchIDs from the Protein DB ..."
pulled_gb_recs_fname = os.path.join( raw_path, "pulled_proteins.gb" )
batch_size = 60
attempts_limit = 3
# THEN WE'D NEED TO DO POST AND ONLY AFTER EFETCH ...
# there might be some EMPTY fetchids ...
non_empty_fetchids = raw_info_with_fetch['fetchid'][raw_info_with_fetch['fetchid'].notnull()].apply(int)
with_empty_fetchids = raw_info_with_fetch[raw_info_with_fetch['fetchid'].isnull()]
#
print
print "BEWARE! There are %d empty fetchids ..."%with_empty_fetchids.shape[0]
print with_empty_fetchids[['Protein Name','Peptide Sequence']]
print
#
search_results = Entrez.read( Entrez.epost("protein", id=",".join( non_empty_fetchids.apply(str).unique() )) )
webenv = search_results["WebEnv"]
query_key = search_results["QueryKey"]
# download results in batches using history and coockies ....
count, = non_empty_fetchids.unique().shape
out_handle = open(pulled_gb_recs_fname, "w")
for start in range(0, count, batch_size):
end = min(count, start+batch_size)
print("Going to download record %i to %i" % (start+1, end))
attempt = 0
while attempt < attempts_limit:
attempt += 1
try:
fetch_handle = Entrez.efetch(db="protein", rettype="gb", retmode="text",
retstart=start, retmax=batch_size,
webenv=webenv, query_key=query_key)
break # skip subsequent attempts is succeeded ...
except HTTPError as err:
if 500 <= err.code <= 599:
print("Received error from server %s" % err)
print("Attempt %d of %d"%(attempt,attempts_limit))
# attempt += 1
time.sleep(15)
else:
print "oh Shut! %d"%attempt
raise
data = fetch_handle.read()
fetch_handle.close()
out_handle.write(data)
out_handle.close()
#
print "Fetched genebank records are stored in %s."%pulled_gb_recs_fname
print "Check for BioPython gb consistency before processing ..."
print "THE END"
| mit | 4,538,599,466,040,063,500 | 31.33758 | 130 | 0.656096 | false |
li-yuntao/SiliconLives | PytorchModels/softmax.py | 1 | 2699 | # encoding: utf-8
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
class Softmax(nn.Module):
def __init__(self, num_labels, feature_dim):
super(Softmax, self).__init__()
self.linear = nn.Linear(feature_dim, num_labels)
def forward(self, x):
p = F.softmax(self.linear(x))
log_p = F.log_softmax(self.linear(x))
return p, log_p
data = [("me gusta comer en la cafeteria".split(), "SPANISH"),
("Give it to me".split(), "ENGLISH"),
("No creo que sea una buena idea".split(), "SPANISH"),
("No it is not a good idea to get lost at sea".split(), "ENGLISH")]
test_data = [("Yo creo que si".split(), "SPANISH"),
("it is lost on me".split(), "ENGLISH")]
word_to_ix = {}
for sent, _ in data + test_data:
for word in sent:
if word not in word_to_ix:
word_to_ix[word] = len(word_to_ix)
print(word_to_ix)
label_to_ix = {"SPANISH": 0, "ENGLISH": 1}
VOCAB_SIZE = len(word_to_ix)
NUM_LABELS = 2
def make_bow_vector(sentence, word_to_ix):
vec = torch.zeros(len(word_to_ix))
for word in sentence:
vec[word_to_ix[word]] += 1
return vec.view(1, -1)
def make_target(label, label_to_ix):
return torch.LongTensor([label_to_ix[label]])
model = Softmax(NUM_LABELS, VOCAB_SIZE)
loss_function = nn.NLLLoss()
optimizer = optim.SGD(model.parameters(), lr=0.1)
for epoch in range(100):
for instance, label in data:
# Step 1. Remember that Pytorch accumulates gradients.
# We need to clear them out before each instance
model.zero_grad()
# Step 2. Make our BOW vector and also we must wrap the target in a
# Variable as an integer. For example, if the target is SPANISH, then
# we wrap the integer 0. The loss function then knows that the 0th
# element of the log probabilities is the log probability
# corresponding to SPANISH
bow_vec = autograd.Variable(make_bow_vector(instance, word_to_ix))
target = autograd.Variable(make_target(label, label_to_ix))
# Step 3. Run our forward pass.
_, log_probs = model(bow_vec)
# Step 4. Compute the loss, gradients, and update the parameters by
# calling optimizer.step()
loss = loss_function(log_probs, target)
loss.backward()
optimizer.step()
for instance, label in test_data:
bow_vec = autograd.Variable(make_bow_vector(instance, word_to_ix))
probs, log_probs = model(bow_vec)
print(probs)
# Index corresponding to Spanish goes up, English goes down!
print(next(model.parameters())[:, word_to_ix["creo"]])
| gpl-3.0 | -8,706,232,006,862,907,000 | 31.518072 | 77 | 0.636532 | false |
aptivate/django-notification | notification/models.py | 1 | 15623 | import datetime
try:
import cPickle as pickle
except ImportError:
import pickle
from django.db import models
from django.db.models.query import QuerySet
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.template import Context
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext, get_language, activate
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
from django.contrib.auth.models import AnonymousUser
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
QUEUE_ALL = getattr(settings, "NOTIFICATION_QUEUE_ALL", False)
class LanguageStoreNotAvailable(Exception):
pass
class NoticeType(models.Model):
label = models.CharField(_("label"), max_length=40)
display = models.CharField(_("display"), max_length=50)
description = models.CharField(_("description"), max_length=100)
# by default only on for media with sensitivity less than or equal to this number
default = models.IntegerField(_("default"))
def __unicode__(self):
return self.label
class Meta:
verbose_name = _("notice type")
verbose_name_plural = _("notice types")
# if this gets updated, the create() method below needs to be as well...
NOTICE_MEDIA = (
("1", _("Email")),
)
# how spam-sensitive is the medium
NOTICE_MEDIA_DEFAULTS = {
"1": 2 # email
}
class NoticeSetting(models.Model):
"""
Indicates, for a given user, whether to send notifications
of a given type to a given medium.
"""
user = models.ForeignKey(User, verbose_name=_("user"))
notice_type = models.ForeignKey(NoticeType, verbose_name=_("notice type"))
medium = models.CharField(_("medium"), max_length=1, choices=NOTICE_MEDIA)
send = models.BooleanField(_("send"))
class Meta:
verbose_name = _("notice setting")
verbose_name_plural = _("notice settings")
unique_together = ("user", "notice_type", "medium")
def get_notification_setting(user, notice_type, medium):
try:
return NoticeSetting.objects.get(user=user, notice_type=notice_type, medium=medium)
except NoticeSetting.DoesNotExist:
default = (NOTICE_MEDIA_DEFAULTS[medium] <= notice_type.default)
setting = NoticeSetting(user=user, notice_type=notice_type, medium=medium, send=default)
setting.save()
return setting
def should_send(user, notice_type, medium):
return get_notification_setting(user, notice_type, medium).send
class NoticeManager(models.Manager):
def notices_for(self, user, archived=False, unseen=None, on_site=None, sent=False):
"""
returns Notice objects for the given user.
If archived=False, it only include notices not archived.
If archived=True, it returns all notices for that user.
If unseen=None, it includes all notices.
If unseen=True, return only unseen notices.
If unseen=False, return only seen notices.
"""
if sent:
lookup_kwargs = {"sender": user}
else:
lookup_kwargs = {"recipient": user}
qs = self.filter(**lookup_kwargs)
if not archived:
self.filter(archived=archived)
if unseen is not None:
qs = qs.filter(unseen=unseen)
if on_site is not None:
qs = qs.filter(on_site=on_site)
return qs
def unseen_count_for(self, recipient, **kwargs):
"""
returns the number of unseen notices for the given user but does not
mark them seen
"""
return self.notices_for(recipient, unseen=True, **kwargs).count()
def received(self, recipient, **kwargs):
"""
returns notices the given recipient has recieved.
"""
kwargs["sent"] = False
return self.notices_for(recipient, **kwargs)
def sent(self, sender, **kwargs):
"""
returns notices the given sender has sent
"""
kwargs["sent"] = True
return self.notices_for(sender, **kwargs)
class Notice(models.Model):
recipient = models.ForeignKey(User, related_name="recieved_notices", verbose_name=_("recipient"))
sender = models.ForeignKey(User, null=True, related_name="sent_notices", verbose_name=_("sender"))
message = models.TextField(_("message"))
notice_type = models.ForeignKey(NoticeType, verbose_name=_("notice type"))
added = models.DateTimeField(_("added"), auto_now_add=True)
unseen = models.BooleanField(_("unseen"), default=True)
archived = models.BooleanField(_("archived"), default=False)
on_site = models.BooleanField(_("on site"))
objects = NoticeManager()
def __unicode__(self):
return self.message
def archive(self):
self.archived = True
self.save()
def is_unseen(self):
"""
returns value of self.unseen but also changes it to false.
Use this in a template to mark an unseen notice differently the first
time it is shown.
"""
unseen = self.unseen
if unseen:
self.unseen = False
self.save()
return unseen
class Meta:
ordering = ["-added"]
verbose_name = _("notice")
verbose_name_plural = _("notices")
def get_absolute_url(self):
return reverse("notification_notice", args=[str(self.pk)])
class NoticeQueueBatch(models.Model):
"""
A queued notice.
Denormalized data for a notice.
"""
pickled_data = models.TextField()
def create_notice_type(label, display, description, default=2, verbosity=1):
"""
Creates a new NoticeType.
This is intended to be used by other apps as a post_syncdb manangement step.
"""
try:
notice_type = NoticeType.objects.get(label=label)
updated = False
if display != notice_type.display:
notice_type.display = display
updated = True
if description != notice_type.description:
notice_type.description = description
updated = True
if default != notice_type.default:
notice_type.default = default
updated = True
if updated:
notice_type.save()
if verbosity > 1:
print "Updated %s NoticeType" % label
except NoticeType.DoesNotExist:
NoticeType(label=label, display=display, description=description, default=default).save()
if verbosity > 1:
print "Created %s NoticeType" % label
def get_notification_language(user):
"""
Returns site-specific notification language for this user. Raises
LanguageStoreNotAvailable if this site does not use translated
notifications.
"""
if getattr(settings, "NOTIFICATION_LANGUAGE_MODULE", False):
try:
app_label, model_name = settings.NOTIFICATION_LANGUAGE_MODULE.split(".")
model = models.get_model(app_label, model_name)
language_model = model._default_manager.get(user__id__exact=user.id)
if hasattr(language_model, "language"):
return language_model.language
except (ImportError, ImproperlyConfigured, model.DoesNotExist):
raise LanguageStoreNotAvailable
raise LanguageStoreNotAvailable
def get_formatted_messages(formats, label, context):
"""
Returns a dictionary with the format identifier as the key. The values are
are fully rendered templates with the given context.
"""
format_templates = {}
for format in formats:
# conditionally turn off autoescaping for .txt extensions in format
if format.endswith(".txt"):
context.autoescape = False
else:
context.autoescape = True
format_templates[format] = render_to_string((
"notification/%s/%s" % (label, format),
"notification/%s" % format), context_instance=context)
return format_templates
def send_now(users, label, extra_context=None, on_site=True, sender=None, from_email=settings.DEFAULT_FROM_EMAIL):
"""
Creates a new notice.
This is intended to be how other apps create new notices.
notification.send(user, "friends_invite_sent", {
"spam": "eggs",
"foo": "bar",
)
You can pass in on_site=False to prevent the notice emitted from being
displayed on the site.
"""
if extra_context is None:
extra_context = {}
notice_type = NoticeType.objects.get(label=label)
protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http")
current_site = Site.objects.get_current()
notices_url = u"%s://%s%s" % (
protocol,
unicode(current_site),
reverse("notification_notices"),
)
current_language = get_language()
formats = (
"short.txt",
"full.txt",
"notice.html",
"full.html",
) # TODO make formats configurable
for user in users:
recipients = []
# get user language for user from language store defined in
# NOTIFICATION_LANGUAGE_MODULE setting
try:
language = get_notification_language(user)
except LanguageStoreNotAvailable:
language = None
if language is not None:
# activate the user's language
activate(language)
# update context with user specific translations
context = Context({
"recipient": user,
"sender": sender,
"notice": ugettext(notice_type.display),
"notices_url": notices_url,
"current_site": current_site,
})
context.update(extra_context)
# get prerendered format messages
messages = get_formatted_messages(formats, label, context)
# Strip newlines from subject
subject = "".join(render_to_string("notification/email_subject.txt", {
"message": messages["short.txt"],
}, context).splitlines())
body = render_to_string("notification/email_body.txt", {
"message": messages["full.txt"],
}, context)
notice = Notice.objects.create(recipient=user, message=messages["notice.html"],
notice_type=notice_type, on_site=on_site, sender=sender)
if should_send(user, notice_type, "1") and user.email and user.is_active: # Email
recipients.append(user.email)
send_mail(subject, body, from_email, recipients)
# reset environment to original language
activate(current_language)
def send(*args, **kwargs):
"""
A basic interface around both queue and send_now. This honors a global
flag NOTIFICATION_QUEUE_ALL that helps determine whether all calls should
be queued or not. A per call ``queue`` or ``now`` keyword argument can be
used to always override the default global behavior.
"""
queue_flag = kwargs.pop("queue", False)
now_flag = kwargs.pop("now", False)
assert not (queue_flag and now_flag), "'queue' and 'now' cannot both be True."
if queue_flag:
return queue(*args, **kwargs)
elif now_flag:
return send_now(*args, **kwargs)
else:
if QUEUE_ALL:
return queue(*args, **kwargs)
else:
return send_now(*args, **kwargs)
def queue(users, label, extra_context=None, on_site=True, sender=None, from_email=settings.DEFAULT_FROM_EMAIL):
"""
Queue the notification in NoticeQueueBatch. This allows for large amounts
of user notifications to be deferred to a seperate process running outside
the webserver.
"""
if extra_context is None:
extra_context = {}
if isinstance(users, QuerySet):
users = [row["pk"] for row in users.values("pk")]
else:
users = [user.pk for user in users]
notices = []
for user in users:
notices.append((user, label, extra_context, on_site, sender,from_email))
NoticeQueueBatch(pickled_data=pickle.dumps(notices).encode("base64")).save()
class ObservedItemManager(models.Manager):
def all_for(self, observed, signal):
"""
Returns all ObservedItems for an observed object,
to be sent when a signal is emited.
"""
content_type = ContentType.objects.get_for_model(observed)
observed_items = self.filter(content_type=content_type, object_id=observed.id, signal=signal)
return observed_items
def get_for(self, observed, observer, signal):
content_type = ContentType.objects.get_for_model(observed)
observed_item = self.get(content_type=content_type, object_id=observed.id, user=observer, signal=signal)
return observed_item
class ObservedItem(models.Model):
user = models.ForeignKey(User, verbose_name=_("user"))
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
observed_object = generic.GenericForeignKey("content_type", "object_id")
notice_type = models.ForeignKey(NoticeType, verbose_name=_("notice type"))
added = models.DateTimeField(_("added"), auto_now_add=True)
# the signal that will be listened to send the notice
signal = models.TextField(verbose_name=_("signal"))
objects = ObservedItemManager()
class Meta:
ordering = ["-added"]
verbose_name = _("observed item")
verbose_name_plural = _("observed items")
def send_notice(self, extra_context=None):
if extra_context is None:
extra_context = {}
extra_context.update({"observed": self.observed_object})
send([self.user], self.notice_type.label, extra_context)
def observe(observed, observer, notice_type_label, signal="post_save"):
"""
Create a new ObservedItem.
To be used by applications to register a user as an observer for some object.
"""
notice_type = NoticeType.objects.get(label=notice_type_label)
observed_item = ObservedItem(
user=observer, observed_object=observed,
notice_type=notice_type, signal=signal
)
observed_item.save()
return observed_item
def stop_observing(observed, observer, signal="post_save"):
"""
Remove an observed item.
"""
observed_item = ObservedItem.objects.get_for(observed, observer, signal)
observed_item.delete()
def send_observation_notices_for(observed, signal="post_save", extra_context=None):
"""
Send a notice for each registered user about an observed object.
"""
if extra_context is None:
extra_context = {}
observed_items = ObservedItem.objects.all_for(observed, signal)
for observed_item in observed_items:
observed_item.send_notice(extra_context)
return observed_items
def is_observing(observed, observer, signal="post_save"):
if isinstance(observer, AnonymousUser):
return False
try:
observed_items = ObservedItem.objects.get_for(observed, observer, signal)
return True
except ObservedItem.DoesNotExist:
return False
except ObservedItem.MultipleObjectsReturned:
return True
def handle_observations(sender, instance, *args, **kw):
send_observation_notices_for(instance)
| mit | 8,039,311,497,318,216,000 | 32.597849 | 114 | 0.640594 | false |
laslabs/vertical-medical | sale_medical_prescription/tests/test_medical_sale_wizard.py | 1 | 7593 | # -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License GPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
import mock
from . import wizard_test_setup
NEXT_SALE = 'odoo.addons.sale_medical_prescription.wizards.' \
'medical_sale_wizard.MedicalSaleWizard._get_next_sale_wizard'
class TestMedicalSaleWizard(wizard_test_setup.WizardTestSetup):
def test_compute_default_session(self):
""" Test rx lines properly extracted from context """
exp = [self.rx_line_7.id, self.rx_line_8.id]
res = sorted(self.wizard_2.prescription_line_ids.ids)
self.assertEquals(
res, exp,
)
def test_compute_default_pharmacy_single_rx_line(self):
""" Test default pharmacy extracted from single rx_line context """
exp = self.rx_line_7.prescription_order_id.partner_id
res = self.wizard_1.pharmacy_id
self.assertEquals(
res, exp,
)
def test_compute_default_pharmacy_multiple_rx_lines(self):
""" Test default pharmacy extracted from multiple rx_lines context """
exp = self.rx_order_8.partner_id
res = self.wizard_3.pharmacy_id
self.assertEquals(
res, exp,
)
def test_action_create_sale_wizards_orders(self):
""" Test sale order fields properly populated from wizard """
patient = self.rx_order_7.patient_id
partner = patient.partner_id
rx_order_names = self.rx_order_7.name
exp_keys = {
'partner_id': partner,
'patient_id': patient,
'pricelist_id': self.env.ref('product.list0'),
'partner_invoice_id': partner,
'partner_shipping_id': partner,
'prescription_order_ids': self.rx_order_7,
'pharmacy_id': self.rx_order_7.partner_id,
'client_order_ref': rx_order_names,
'date_order': self.order_date,
'origin': rx_order_names,
'user_id': self.env.user,
'company_id': self.env.user.company_id,
}
self.wizard_2.action_create_sale_wizards()
for key in exp_keys:
exp = exp_keys[key]
res = getattr(self.wizard_2.sale_wizard_ids[0], key)
self.assertEquals(
res, exp,
'\rKey: %s \rGot: %s \rExpected: %s' % (
key, res, exp
)
)
def test_action_create_sale_wizards_order_line(self):
""" Test order line fields properly populated from wizard """
simv_1 = self.env.ref(
'sale_medical_prescription.product_product_simv_1'
)
exp_keys = {
'product_id': simv_1,
'product_uom': simv_1.uom_id,
'product_uom_qty': self.rx_line_7.qty,
'price_unit': simv_1.list_price,
'prescription_order_line_id': self.rx_line_7,
}
self.wizard_1.action_create_sale_wizards()
for key in exp_keys:
exp = exp_keys[key]
res = getattr(self.wizard_1.sale_wizard_ids[0].order_line[0], key)
self.assertEquals(
res, exp,
'\rKey: %s \rGot: %s \rExpected: %s' % (
key, res, exp
)
)
def test_action_create_sale_wizards_wizard_start(self):
""" Test wizard state properly set to start after calling action """
self.wizard_1.action_create_sale_wizards()
self.assertEquals(
'start', self.wizard_1.state,
)
def test_get_next_sale_wizard_not_in_states(self):
""" Test returns False if none of sale_wizard_ids in only_states """
self.wizard_1.action_create_sale_wizards()
self.wizard_1.sale_wizard_ids[0].state = 'done'
self.assertFalse(
self.wizard_1._get_next_sale_wizard(['new'])
)
def test_get_next_sale_wizard(self):
""" Test next wizard attrs correctly returned """
self.wizard_1.action_create_sale_wizards()
res_next_sale = self.wizard_1._get_next_sale_wizard()
wizard = self.env.ref(
'sale_medical_prescription.medical_sale_temp_view_form'
)
action = self.env.ref(
'sale_medical_prescription.medical_sale_temp_action'
)
order_line = self.wizard_1.sale_wizard_ids[0]
context = self.wizard_1._context.copy()
context['active_id'] = order_line.id
exp_keys = {
'name': action.name,
'help': action.help,
'type': action.type,
'target': 'new',
'context': context,
'res_model': action.res_model,
'res_id': order_line.id,
}
self.assertEquals(
res_next_sale['views'][0][0],
wizard.id,
)
for key in exp_keys:
res = res_next_sale[key]
exp = exp_keys[key]
self.assertEquals(
res, exp,
'\rKey: %s \rGot: %s \rExpected: %s' % (
key, res, exp
)
)
@mock.patch(NEXT_SALE)
def test_action_next_wizard_no_action(self, next_sale):
""" Test wizard state properly set to done after calling action """
next_sale.return_value = False
self.wizard_1.action_create_sale_wizards()
self.wizard_1.action_next_wizard()
self.assertEquals(
'done', self.wizard_1.state,
)
def test_action_rx_sale_conversions_sale_orders(self):
""" Test real sale orders created properly """
self.wizard_2.action_create_sale_wizards()
self.wizard_2.action_next_wizard()
res_action = self.wizard_2.action_rx_sale_conversions()
sale_orders = self.env['sale.order'].browse(res_action['res_ids'])
self.assertEquals(
1, len(sale_orders),
)
def test_action_rx_sale_conversions_sale_order_line(self):
""" Test real sale order lines created properly """
self.wizard_2.action_create_sale_wizards()
self.wizard_2.action_next_wizard()
res_action = self.wizard_2.action_rx_sale_conversions()
sale_orders = self.env['sale.order'].browse(res_action['res_ids'])
self.assertEquals(
2, len(sale_orders[0].order_line),
)
def test_action_rx_sale_conversions_return_attrs(self):
""" Test dictionary returned is correct """
self.wizard_1.action_create_sale_wizards()
self.wizard_1.action_next_wizard()
res_action = self.wizard_1.action_rx_sale_conversions()
form = self.env.ref('sale.view_order_form')
tree = self.env.ref('sale.view_quotation_tree')
action = self.env.ref('sale.action_quotations')
context = self.wizard_1._context.copy()
exp_keys = {
'name': action.name,
'help': action.help,
'type': action.type,
'view_mode': 'tree',
'view_id': tree.id,
'target': 'current',
'context': context,
'res_model': action.res_model,
}
self.assertEquals(
[tree.id, form.id],
[
res_action['views'][0][0],
res_action['views'][1][0],
],
)
for key in exp_keys:
res = res_action[key]
exp = exp_keys[key]
self.assertEquals(
res, exp,
'\rKey: %s \rGot: %s \rExpected: %s' % (
key, res, exp
)
)
| agpl-3.0 | -1,537,561,629,924,999,200 | 35.330144 | 78 | 0.546424 | false |
jsspencer/toy_fci | lattice_propogation.py | 1 | 3133 | #!/usr/bin/env python
'''
lattice_propogation
===================
Script which uses the FCIQMC algorithm without annihilation to propogate
positive and negative psips using Hamiltonians defined in both Hartree product
and Slater determinant bases for two spinless fermions on a `4\\times4` lattice
with periodic boundary conditions. The Hamiltonian operator is:
.. math::
H = - \sum_{<ij>} t \left( c^\dagger_j c_i + c^\dagger_i c_j - u n_i n_j \\right).
This is an example where the evolution of an FCIQMC calculation is
step-by-step identical in both first- and second-quantized basis sets.
This is an independent implementation of work originally performed by Michael
Kolodrubetz and Bryan Clark (Princeton).
'''
import lattice_fci
import numpy
if __name__ == '__main__':
N = 2
L = 4
sys = lattice_fci.LatticeModel(N, L)
tau = 0.1
print('# Fermions on a Lattice')
print('# =====================')
print('''#
# Use a simple lattice Hamiltonian as an illustration of when the FCIQMC sign
# problem is identical in both Hartree product and Slater determinant bases.
#
# Number of fermions: %i
# Lattice dimensions: %i
#
# Propogation data format: N labels of the single-particle functions in
# a many-fermion basis function followed by the weight of the positive and
# negative particles on that basis function.
#
# No annihilation is performed.
#''' % (sys.nfermions, sys.L)
)
init_pos_basis = (lattice_fci.LatticeSite(0, 0, sys.L), lattice_fci.LatticeSite(0, 1, sys.L))
init_neg_basis = (lattice_fci.LatticeSite(0, 1, sys.L), lattice_fci.LatticeSite(0, 0, sys.L))
(hartree_products, determinants) = lattice_fci.init_lattice_basis(sys.nfermions, sys.L)
hartree_hamil = lattice_fci.HartreeLatticeHamiltonian(sys, hartree_products, tau)
(val, vec) = hartree_hamil.eigh()
print('# Lowest eigenvalues in a Hartree product basis: %s.' % ', '.join('%f' % v for v in val[:10]))
det_hamil = lattice_fci.DeterminantLatticeHamiltonian(sys, determinants, tau)
(val, vec) = det_hamil.eigh()
print('# Lowest eigenvalues in Slater determinant basis: %s.' % ', '.join('%f' %v for v in val[:5]))
print("#")
for (hamil, label) in ((hartree_hamil, 'Hartree product'), (det_hamil, 'Slater determinant')):
print("# %s" % "Propogating Hamiltonian in %s basis" % (label))
print("# %s" % ("-"*len("Propogating Hamiltonian in %s basis" % (label))))
print("#")
pos = numpy.zeros(len(hamil.basis))
neg = numpy.zeros(len(hamil.basis))
for (indx, bfn) in enumerate(hamil.basis):
if bfn == init_pos_basis:
pos[indx] = 1
elif bfn == init_neg_basis:
neg[indx] = 1
t = 0
for tfinal in (0, 1, 2, 8):
while abs(t - tfinal) > 1.e-10:
t += tau
(pos, neg) = hamil.propogate(pos, neg)
print("# %s" % "tau=%.2f" % (t))
print("# %s" % ("^"*len("tau=%.2f" % (t))))
print("#")
lattice_fci.print_two_fermion_wfn(hamil.basis, pos, neg, L)
print('\n\n\n')
| bsd-3-clause | -6,894,005,543,195,005,000 | 36.297619 | 105 | 0.621449 | false |
Jet-Streaming/gyp | test/variables/commands/gyptest-commands.py | 1 | 1247 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Test variable expansion of '<!()' syntax commands.
"""
import os
import TestGyp
test = TestGyp.TestGyp(format='gypd')
expect = test.read('commands.gyp.stdout').replace('\r', '')
test.run_gyp('commands.gyp',
'--debug', 'variables',
stdout=expect, ignore_line_numbers=True)
# Verify the commands.gypd against the checked-in expected contents.
#
# Normally, we should canonicalize line endings in the expected
# contents file setting the Subversion svn:eol-style to native,
# but that would still fail if multiple systems are sharing a single
# workspace on a network-mounted file system. Consequently, we
# massage the Windows line endings ('\r\n') in the output to the
# checked-in UNIX endings ('\n').
contents = test.read('commands.gypd').replace('\r', '')
expect = test.read('commands.gypd.golden').replace('\r', '')
if not test.match(contents, expect):
print "Unexpected contents of `commands.gypd'"
test.diff(expect, contents, 'commands.gypd ')
test.fail_test()
test.pass_test()
| bsd-3-clause | -8,167,964,090,152,755,000 | 29.974359 | 72 | 0.685646 | false |
skosukhin/spack | var/spack/repos/builtin/packages/font-adobe-utopia-100dpi/package.py | 1 | 2145 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class FontAdobeUtopia100dpi(Package):
"""X.org adobe-utopia-100dpi font."""
homepage = "http://cgit.freedesktop.org/xorg/font/adobe-utopia-100dpi"
url = "https://www.x.org/archive/individual/font/font-adobe-utopia-100dpi-1.0.4.tar.gz"
version('1.0.4', '128416eccd59b850f77a9b803681da3c')
depends_on('font-util')
depends_on('fontconfig', type='build')
depends_on('mkfontdir', type='build')
depends_on('bdftopcf', type='build')
depends_on('[email protected]:', type='build')
depends_on('util-macros', type='build')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
make()
make('install')
# `make install` copies the files to the font-util installation.
# Create a fake directory to convince Spack that we actually
# installed something.
mkdir(prefix.lib)
| lgpl-2.1 | 6,558,359,492,020,549,000 | 39.471698 | 96 | 0.661538 | false |
asifhj/Python_SOAP_OSSJ_SAP_Fusion_Kafka_Spark_HBase | srKbLink.py | 1 | 7686 | __author__ = 'asifj'
import requests
from pymongo import MongoClient
import json
import csv
import traceback
import logging
from tabulate import tabulate
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.DEBUG
)
class HBase:
def __init__(self):
self.url = "http://172.22.147.248:8092/api/"
pass
def get_case_by_case_id(self, document, row):
print "API URL: "+self.url+"case-manager/cases/"+str(document['caseId'])
r = requests.get(self.url+"case-manager/cases/"+str(document['caseId']))
print "CaseID: "+str(document['caseId'])
print "Response: "+str(r.status_code)
#keys = len(document.keys())
#print "Keys: "+str(keys)
row.append(r.status_code)
status = 0
if r.status_code==200:
response = json.loads(r.text)
table = []
if not (str(document['caseId']).strip() == "" if response['srId'] is None else str(response['srId']).strip()):
print "Incorrect value for 'caseId'!"
status = 1
document_kbLinks_len = len(document['link'])
response_kbLinks_len = 0
if type(document['link']) is dict:
print "kBLinks in document is not an array!"
document_kbLinks_len = 1
document['link'] = [document['link']]
if response['kbLinks'] is not None:
response_kbLinks_len = len(response['kbLinks'])
else:
response_kbLinks_len = 0
print "Number of kbLinks in document: "+str(document_kbLinks_len)
print "Number of kbLinks in API response: "+str(response_kbLinks_len)
if document_kbLinks_len==0:
print "No kbLinks found in document!"
row.append("No kbLinks found in document!")
print "Kafka: "+str(json.dumps(document['link'], sort_keys=True))
print "API: "+str(json.dumps(response['kbLinks'], sort_keys=True))
return row
if response_kbLinks_len==0 and document_kbLinks_len>0:
print "No kbLinks found in API response but present in document."
row.append("No kbLinks found in API response but present in document.")
print "Kafka: "+str(json.dumps(document['link'], sort_keys=True))
print "API: "+str(json.dumps(response['kbLinks'], sort_keys=True))
return row
for doc_link in document['link']:
match_level = 0
found = 0
match_location = 0
counter = 0
old_match_level = 0
match_data = ""
for resp in response['kbLinks']:
match_level = 0
if doc_link['kbId'] == ("" if resp['kbId'] is None else resp['kbId']):
match_level += 1
if doc_link['status'] == ("" if resp['status'] is None else resp['status']):
match_level += 1
if doc_link['description'] == ("" if resp['description'] is None else resp['description']):
match_level += 1
if doc_link['internalId'] == ("" if resp['internalId'] is None else resp['internalId']):
match_level += 1
if doc_link['url'] == ("" if resp['url'] is None else resp['url']):
match_level += 1
if doc_link['kbDate'] == ("" if resp['kbDate'] is None else str(resp['kbDate']).replace("-", "").replace(":", "").replace(" ", "")):
match_level += 1
if doc_link['dataSource'] == ("" if resp['dataSource'] is None else resp['dataSource']):
match_level += 1
if doc_link['sourceVisibility'] == ("" if resp['srcVisiblity'] is None else resp['srcVisiblity']):
match_level += 1
if doc_link['integrated'] == ("" if resp['kbFlag'] is None else resp['kbFlag']):
match_level += 1
if doc_link['srVisibility'] == ("" if resp['srVisibility'] is None else resp['srVisibility']):
if match_level >= 9:
found = 1
match_level += 1
match_location = counter
match_data = resp
break;
if match_level >= old_match_level:
match_location = counter
old_match_level = match_level
match_data = resp
counter += 1
if found == 0:
print "************************************************"
print "Data Mismatch, max number of values matched is "+str(old_match_level)
print "Kafka ==> "+str(json.dumps(doc_link, sort_keys=True))
print "API ==> "+str(json.dumps(match_data, sort_keys=True))
tmp = ["", ""]
tmp.append("Incorrect value for 'kbLinks'!")
table.append(tmp)
status = 1
print "************************************************"
else:
print "Data matched, highest level of match is "+str(match_level)
print "Kafka ==> "+str(json.dumps(doc_link, sort_keys=True))
print "API ==> "+str(json.dumps(match_data, sort_keys=True))
tmp = ["", ""]
tmp.append("Match found for 'kbLinks'!")
table.append(tmp)
if status == 0:
print "Match Found"
row.append("Match Found")
else:
print "\nCompared JSONs"
print "Kafka: "+str(json.dumps(document['link'], sort_keys=True))
print "API: "+str(json.dumps(response['kbLinks'], sort_keys=True))
print tabulate(table, headers=["Kafka", "API", "Status"], tablefmt="rst")
else:
print "No Match Found in Hadoop."
row.append("No Match Found in Hadoop.")
return row
client = MongoClient('10.219.48.134', 27017)
#client = MongoClient('192.168.56.101', 27017)
db = client['SAPEvent']
collection = db['srKbLink']
api = HBase()
document_no = 0
#documents = collection.find({})
documents = collection.find({ 'caseId': '2015-1117-T-0021'})
ofile = open('srKbLink.csv', "wb")
writer = csv.writer(ofile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL)
row = ["SNo", "CaseID", "KafkaJSON", "APIResponse", "Status"]
writer.writerow(row)
for document in documents:
row = []
document_no += 1
row.append(document_no)
row.append(document['caseId'])
row.append(str(document).replace("\n", ""))
print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
print "Document No: "+str(document_no)
try:
row = api.get_case_by_case_id(document, row)
except Exception:
print Exception.message
print(traceback.format_exc())
writer.writerow(row)
print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
print "\n\n"
ofile.close()
| apache-2.0 | -7,725,995,754,706,033,000 | 45.73913 | 152 | 0.47606 | false |
strogo/djpcms | tests/regression/autocomplete/tests.py | 1 | 1443 | from djpcms.test import TestCase
from djpcms.views import appsite, appview
from regression.autocomplete.models import Strategy
# RULE 1 import forms from djpcms
from djpcms import forms
class TestForm(forms.Form):
strategy = forms.ModelChoiceField(Strategy.objects.all())
class TestFormMulti(forms.Form):
strategy = forms.ModelMultipleChoiceField(Strategy.objects.all())
class ApplicationWithAutocomplete(appsite.ModelApplication):
# RULE 2 the search_fields list
search_fields = ['name','description']
# RULE 3 the autocomplete view
autocomplete = appview.AutocompleteView(regex = 'autocompletetest',
display = 'name')
# RULE 4 register as usual
appurls = ApplicationWithAutocomplete('/strategies/', Strategy),
class TestAutocomplete(TestCase):
'''Autocomplete functionalities. Autocomplete widgets are implemented
in :mod:`djpcms.utils.html.autocomplete`.'''
appurls = 'regression.autocomplete.tests'
def testModelChoiceField(self):
f = TestForm()
html = f.as_table()
self.assertFalse('<select' in html)
self.assertTrue('href="/strategies/autocompletetest/"' in html)
def testModelMultipleChoiceField(self):
f = TestFormMulti()
html = f.as_table()
self.assertFalse('<select' in html)
self.assertTrue('href="/strategies/autocompletetest/"' in html) | bsd-3-clause | 4,268,121,878,232,398,300 | 31.818182 | 73 | 0.688843 | false |
RevansChen/online-judge | Codewars/6kyu/financing-a-purchase/Python/test.py | 1 | 1050 | # Python - 3.6.0
test.assert_equals(amort(7.4, 10215, 24, 20), 'num_payment 20 c 459 princ 445 int 14 balance 1809')
test.assert_equals(amort(7.9, 107090, 48, 41), 'num_payment 41 c 2609 princ 2476 int 133 balance 17794')
test.assert_equals(amort(6.8, 105097, 36, 4), 'num_payment 4 c 3235 princ 2685 int 550 balance 94447')
test.assert_equals(amort(3.8, 48603, 24, 10), 'num_payment 10 c 2106 princ 2009 int 98 balance 28799')
test.assert_equals(amort(1.9, 182840, 48, 18), 'num_payment 18 c 3959 princ 3769 int 189 balance 115897')
test.assert_equals(amort(1.9, 19121, 48, 2), 'num_payment 2 c 414 princ 384 int 30 balance 18353')
test.assert_equals(amort(2.2, 112630, 60, 11), 'num_payment 11 c 1984 princ 1810 int 174 balance 92897')
test.assert_equals(amort(5.6, 133555, 60, 53), 'num_payment 53 c 2557 princ 2464 int 93 balance 17571')
test.assert_equals(amort(9.8, 67932, 60, 34), 'num_payment 34 c 1437 princ 1153 int 283 balance 33532')
test.assert_equals(amort(3.7, 64760, 36, 24), 'num_payment 24 c 1903 princ 1829 int 75 balance 22389')
| mit | -6,679,267,894,839,368,000 | 86.5 | 105 | 0.724762 | false |
yhoogstrate/dr-disco | setup.py | 1 | 1885 | #!/usr/bin/env python
# *- coding: utf-8 -*-
# vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4 textwidth=79:
"""
Installer of Dr. Disco
[License: GNU General Public License v3 (GPLv3)]
This file is part of Dr. Disco.
FuMa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
FuMa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import drdisco
from setuptools import setup
def get_requirements():
with open('requirements.txt', 'r') as fh:
content = fh.read().strip().split()
return content
setup(name="dr-disco",
scripts=['bin/dr-disco'],
packages=["drdisco"],
test_suite="tests",
tests_require=['nose2', 'pytest', 'pytest-cov'],
setup_requires=['scipy', 'numpy'],
install_requires=[get_requirements()],
version=drdisco.__version__,
description="Makes discordant RNA-Seq alignments healthy, and tries to interpret intronic break points",
author=drdisco.__author__,
url=drdisco.__homepage__,
keywords=["rna-seq", "intronic", "break point"],
classifiers=[
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
| gpl-3.0 | -3,885,011,239,471,985,000 | 33.907407 | 110 | 0.677984 | false |
maas/maas | src/provisioningserver/import_images/cleanup.py | 1 | 1824 | # Copyright 2014-2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Clean up old snapshot directories."""
import os
import shutil
def list_old_snapshots(storage):
"""List of snapshot directories that are no longer in use."""
current_dir = os.path.join(storage, "current")
if os.path.exists(current_dir):
current_snapshot = os.path.basename(os.readlink(current_dir))
else:
current_snapshot = None
return [
os.path.join(storage, directory)
for directory in os.listdir(storage)
if directory.startswith("snapshot-") and directory != current_snapshot
]
def cleanup_snapshots(storage):
"""Remove old snapshot directories."""
old_snapshots = list_old_snapshots(storage)
for snapshot in old_snapshots:
shutil.rmtree(snapshot)
def list_unused_cache_files(storage):
"""List of cache files that are no longer being referenced by snapshots."""
cache_dir = os.path.join(storage, "cache")
if os.path.exists(cache_dir):
cache_files = [
os.path.join(cache_dir, filename)
for filename in os.listdir(cache_dir)
if os.path.isfile(os.path.join(cache_dir, filename))
]
else:
cache_files = []
return [
cache_file
for cache_file in cache_files
if os.stat(cache_file).st_nlink == 1
]
def cleanup_cache(storage):
"""Remove files that are no longer being referenced by snapshots."""
cache_files = list_unused_cache_files(storage)
for cache_file in cache_files:
os.remove(cache_file)
def cleanup_snapshots_and_cache(storage):
"""Remove old snapshot directories and old cache files."""
cleanup_snapshots(storage)
cleanup_cache(storage)
| agpl-3.0 | -8,726,739,808,957,960,000 | 29.4 | 79 | 0.663377 | false |
Signiant/maestro-core | maestro/tools/path.py | 1 | 3383 | import os, platform, re, shutil
def get_tree_size(path = '.'):
"""
get_tree_size will return the total size of a directory tree
"""
if not os.path.exists(path):
raise OSError("Path " + str(path) + " does not exist!")
total_size = 0
for dirpath, dirnames, filenames in os.walk(str(path)):
for f in filenames:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
return total_size
def get_case_insensitive_path(path = '.'):
"""
get_case_insensitive_path will check for the existance of a path in a case sensitive file system, regardless of the case of the inputted path. Returns the absolute path if found (with correct casing) or None.
"""
if os.path.exists(path):
return path
elif platform.system() == "Windows":
return None
path_elements = full_split(path)
path_root = None
drive, path = os.path.splitdrive(path)
if not drive:
if not path.startswith("/"):
path_root = os.path.abspath(os.path.normpath("./"))
else:
path_root = os.path.abspath(os.path.normpath("/"))
else:
path_root = os.path.abspath(os.path.normpath(drive))
if not os.path.exists(path_root):
raise OSError("Unable to locate path root: " + str(path_root))
#Build the full path, also used for error messages
full_path = path_root
for element in path_elements:
if not element or element == "/" or element == ".":
continue
found = False
for directory in os.listdir(full_path):
if element.lower() == directory.lower():
full_path = os.path.join(full_path,directory)
found = True
break
if found is False:
return None
return full_path
# Credit: Gian Marco Gherardi
# http://stackoverflow.com/questions/6260149/os-symlink-support-in-windows
def symlink(source, link_name):
import os
os_symlink = getattr(os, "symlink", None)
if callable(os_symlink):
os_symlink(source, link_name)
else:
import ctypes
csl = ctypes.windll.kernel32.CreateSymbolicLinkW
csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
csl.restype = ctypes.c_ubyte
flags = 1 if os.path.isdir(source) else 0
if csl(link_name, source, flags) == 0:
raise ctypes.WinError()
def purge(pattern, path, match_directories = False):
for root, dirs, files in os.walk(path):
if match_directories is True:
for dir in filter(lambda x: re.match(pattern, x), dirs):
shutil.rmtree(os.path.join(root,dir))
for file in filter(lambda x: re.match(pattern, x), files):
os.remove(os.path.join(root, file))
# Credit: John Machin
# http://stackoverflow.com/questions/4579908/cross-platform-splitting-of-path-in-python
def full_split(path, debug=False):
"""
full_split will split Windows and UNIX paths into seperate elements
"""
parts = []
while True:
newpath, tail = os.path.split(path)
if debug: print (repr(path), (newpath, tail))
if newpath == path:
assert not tail
if path: parts.append(path)
break
parts.append(tail)
path = newpath
parts.reverse()
return parts
| mit | -5,302,980,098,306,267,000 | 33.876289 | 212 | 0.606562 | false |
takeshixx/webfixy | libfixy/deobfuscation.py | 1 | 1230 | import demjson
values = {
'uff000b': 'policy',
'sfe0009': 'skin',
's1': 'user',
's2': 'manualURL',
's11': 'arch',
's15': 'boardname',
's16': 'version',
's17': 'board',
's2c': 'displayname',
'M1': 'prefs',
'Uff0001': 'path',
'Uff0002': '',
'uff0007': 'cmd',
'ufe0001': 'safeModeid',
'u1': 'sqcaps' # uptime?
}
cmds = {
0xfe0003: '', # newObject
0xfe0005: '', # newObject
0xfe0006: 'removeObject',
0xfe0007: 'moveObjectAfter',
0xfe0008: 'next',
0xfe0010: 'fetch',
0xfe0012: 'subscribe',
0xfe0013: 'unsubscribe',
0xfe0014: 'logout',
0xfe000e: 'safePrefs'
}
errors = {
0xfe0002: 'feature is not implemented',
0xfe0003: 'feature is not implemented',
0xfe0011: 'object doesn\'t exist',
0xfe0004: 'object doesn\'t exist',
0xfe0007: 'object already exists',
0xfe0009: 'not permitted',
0xfe0012: 'busy',
0xfe000d: 'timeout'
}
def print_message(msg):
msg = demjson.decode(msg)
r = dict()
for k, v in msg.items():
if v in list(cmds.keys()) and cmds[v]:
v = cmds[v]
if k in values.keys():
r[values[k]] = v
else:
r[k] = v
return r | gpl-3.0 | 1,142,369,460,716,871,200 | 20.982143 | 46 | 0.543089 | false |
our-city-app/oca-backend | src/rogerthat/pages/login.py | 1 | 11605 | # -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
import base64
import json
import logging
import os
import urllib
from google.appengine.ext import webapp, db
from google.appengine.ext.webapp import template
from mcfw.properties import azzert
from rogerthat.bizz import session
from rogerthat.bizz.job import hookup_with_default_services
from rogerthat.bizz.limit import clear_rate_login
from rogerthat.bizz.profile import update_password_hash, create_user_profile
from rogerthat.bizz.registration import get_headers_for_consent, save_tos_consent
from rogerthat.bizz.session import create_session
from rogerthat.bizz.user import calculate_secure_url_digest, update_user_profile_language_from_headers
from rogerthat.dal.profile import get_service_or_user_profile
from rogerthat.exceptions import ServiceExpiredException
from rogerthat.exceptions.login import AlreadyUsedUrlException, ExpiredUrlException, InvalidUrlException
from rogerthat.models import UserProfile, ServiceProfile
from rogerthat.pages.legal import get_legal_language, get_version_content, DOC_TERMS_SERVICE, \
get_current_document_version, DOC_TERMS
from rogerthat.rpc import users
from rogerthat.settings import get_server_settings
from rogerthat.templates import get_languages_from_header, JINJA_ENVIRONMENT
from rogerthat.utils import urlencode, now, channel
from rogerthat.utils.cookie import set_cookie
from rogerthat.utils.crypto import decrypt, sha256_hex
_BASE_DIR = os.path.dirname(__file__)
class SessionHandler(webapp.RequestHandler):
def redirect(self, url, permanent=False):
return super(SessionHandler, self).redirect(str(url), permanent)
def start_session(self, user, cont=None):
try:
secret, _ = create_session(user)
except ServiceExpiredException:
return self.redirect('/service_disabled')
server_settings = get_server_settings()
set_cookie(self.response, server_settings.cookieSessionName, secret)
if not cont:
cont = self.request.GET.get("continue", "/")
if cont:
self.redirect(cont)
else:
self.redirect("/")
def stop_session(self):
current_session = users.get_current_session()
session.drop_session(current_session)
server_settings = get_server_settings()
set_cookie(self.response, server_settings.cookieSessionName, current_session.parent_session_secret or "")
self.redirect("/")
class LoginHandler(webapp.RequestHandler):
def get(self):
self.redirect('/customers/signin')
class SetPasswordHandler(SessionHandler):
def return_error(self, reason="Invalid url received."):
path = os.path.join(_BASE_DIR, 'error.html')
self.response.out.write(template.render(path, {"reason": reason, "hide_header": True}))
def parse_data(self, email, data):
user = users.User(email)
data = base64.decodestring(data)
data = decrypt(user, data)
data = json.loads(data)
azzert(data["d"] == calculate_secure_url_digest(data))
return data, user
def parse_and_validate_data(self, email, data):
if not email or not data:
raise InvalidUrlException()
try:
data, user = self.parse_data(email, data)
except UnicodeEncodeError:
logging.warn("Could not decipher url!\ndata: %s\nemail: %s", data, email, exc_info=True)
raise InvalidUrlException()
except:
logging.exception("Could not decipher url!\ndata: %s\nemail: %s", data, email)
raise InvalidUrlException()
now_ = now()
timestamp = data["t"]
if not (now_ < timestamp < now_ + 5 * 24 * 3600):
raise ExpiredUrlException(action=data["a"])
profile = get_service_or_user_profile(user)
if profile and profile.lastUsedMgmtTimestamp + 5 * 24 * 3600 > timestamp:
raise AlreadyUsedUrlException(action=data["a"])
return data
def get(self):
email = self.request.get("email")
data = self.request.get("data")
try:
parsed_data = self.parse_and_validate_data(email, data)
except ExpiredUrlException as e:
return self.return_error("The %s link has expired." % e.action)
except AlreadyUsedUrlException as e:
return self.return_error("You cannot use the %s link more than once." % e.action)
except InvalidUrlException:
return self.return_error()
path = os.path.join(_BASE_DIR, 'setpassword.html')
self.response.out.write(template.render(path, {
'name': parsed_data['n'],
'hide_header': True,
'data': data,
'email': email,
'action': parsed_data['a']
}))
def post(self):
email = self.request.get("email", None)
password = self.request.get("password", None)
data = self.request.get("data", None)
if not (email and password and data):
return self.redirect("/")
try:
data, user = self.parse_data(email, data)
except:
logging.exception("Could not decypher url!")
return self.redirect("/")
now_ = now()
language_header = self.request.headers.get('Accept-Language', None)
language = get_languages_from_header(language_header)[0] if language_header else None
passwordHash = sha256_hex(password)
profile = get_service_or_user_profile(user)
if not profile:
profile = create_user_profile(user, data['n'], language) # todo communities set community_id
update_password_hash(profile, passwordHash, now_)
else:
def update():
p = db.get(profile.key())
if isinstance(profile, UserProfile) and not p.language:
p.language = language
p.passwordHash = passwordHash
p.lastUsedMgmtTimestamp = now_
p.put()
return p
profile = db.run_in_transaction(update)
if isinstance(profile, UserProfile):
hookup_with_default_services.schedule(user)
self.start_session(user, data["c"])
class ResetPasswordHandler(webapp.RequestHandler):
def get(self):
cont = self.request.GET.get("continue", "/")
email = self.request.GET.get("email", "")
path = os.path.join(_BASE_DIR, 'resetpassword.html')
self.response.out.write(template.render(path, {"continue": cont, "hide_header": True, "email": email}))
class AuthenticationRequiredHandler(webapp.RequestHandler):
def get(self):
path = "/login"
cont = self.request.GET.get("continue", None)
if cont:
path += "?" + urlencode((("continue", cont),))
self.redirect(path)
class TermsAndConditionsHandler(webapp.RequestHandler):
def get_doc_and_lang(self, user):
profile = get_service_or_user_profile(user)
if isinstance(profile, ServiceProfile):
if profile.solution:
return None, None
doc_type = DOC_TERMS_SERVICE
language = get_legal_language(profile.defaultLanguage)
else:
doc_type = DOC_TERMS
language = get_legal_language(profile.language)
return doc_type, language
def get(self):
user = users.get_current_user()
doc_type, language = self.get_doc_and_lang(user)
if not doc_type and not language:
self.redirect('/')
return
version = get_current_document_version(doc_type)
self.response.out.write(JINJA_ENVIRONMENT.get_template('terms_and_conditions.html').render({
'user': user,
'tac': get_version_content(language, doc_type, version),
'language': language,
'version': version,
'logout_url': users.create_logout_url('/'),
}))
def post(self):
user = users.get_current_user()
if not user:
self.redirect('/logout')
return
doc, lang = self.get_doc_and_lang(user)
if not doc and not lang:
self.redirect('/')
return
version = long(self.request.get('version')) or get_current_document_version(doc)
profile = get_service_or_user_profile(user)
profile.tos_version = version
profile.put()
save_tos_consent(user, get_headers_for_consent(self.request), version, None)
self.redirect('/')
class LogoutHandler(SessionHandler):
def get(self):
user = users.get_current_user()
self.stop_session()
channel.send_message(user, u'rogerthat.system.logout')
cont = self.request.get('continue')
if cont:
self.redirect('/%s' % cont)
class AutoLogin(webapp.RequestHandler):
def parse_data(self, email, data):
user = users.User(email)
data = base64.decodestring(data)
data = decrypt(user, data)
data = json.loads(data)
azzert(data["d"] == calculate_secure_url_digest(data))
return data, user
def get(self):
email = self.request.get("email", None)
data = self.request.get("data", None)
service_identity = self.request.get("si", None)
user = users.get_current_user()
if user:
users.clear_user()
channel.send_message(user, u'rogerthat.system.logout')
if not email or not data:
logging.warn("not al params received for email: %s and data: %s" % (email, data))
self.redirect("/")
return
try:
data, _ = self.parse_data(email, data)
except:
logging.warn("Could not decipher url! email: %s and data: %s" % (email, data), exc_info=True)
self.redirect("/")
return
user = users.User(email)
profile = get_service_or_user_profile(user)
if not profile:
logging.warn("profile not found for email: %s" % email)
self.redirect("/")
return
try:
secret, _ = create_session(user, service_identity=service_identity)
except ServiceExpiredException:
return self.redirect('/service_disabled')
server_settings = get_server_settings()
set_cookie(self.response, server_settings.cookieSessionName, secret)
clear_rate_login(user)
update_user_profile_language_from_headers(profile, self.response.headers)
params = self.request.GET
redirect_url = '/'
if params:
params = dict((k, v.decode('utf8')) for k, v in params.iteritems())
del params['email']
del params['data']
if "si" in params:
del params['si']
redirect_url = "%s?%s" % (redirect_url, urllib.urlencode(params))
logging.info("Redirecting to url: %s" % redirect_url)
self.redirect(redirect_url)
| apache-2.0 | 5,234,816,009,816,448,000 | 35.152648 | 113 | 0.627574 | false |
googleapis/python-irm | google/cloud/irm_v1alpha2/gapic/transports/incident_service_grpc_transport.py | 1 | 20473 | # -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
from google.cloud.irm_v1alpha2.proto import incidents_service_pb2_grpc
class IncidentServiceGrpcTransport(object):
"""gRPC transport class providing stubs for
google.cloud.irm.v1alpha2 IncidentService API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
def __init__(
self, channel=None, credentials=None, address="irm.googleapis.com:443"
):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
"The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
options={
"grpc.max_send_message_length": -1,
"grpc.max_receive_message_length": -1,
}.items(),
)
self._channel = channel
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
"incident_service_stub": incidents_service_pb2_grpc.IncidentServiceStub(
channel
),
}
@classmethod
def create_channel(
cls, address="irm.googleapis.com:443", credentials=None, **kwargs
):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
kwargs (dict): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
)
@property
def channel(self):
"""The gRPC channel used by the transport.
Returns:
grpc.Channel: A gRPC channel object.
"""
return self._channel
@property
def delete_artifact(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.delete_artifact`.
Deletes an existing artifact.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].DeleteArtifact
@property
def request_incident_role_handover(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.request_incident_role_handover`.
Starts a role handover. The proposed assignee will receive an email
notifying them of the assignment. This will fail if a role handover is
already pending.
Handover to an oncall ladder is not permitted. Use
CreateIncidentRoleAssignment instead.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].RequestIncidentRoleHandover
@property
def confirm_incident_role_handover(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.confirm_incident_role_handover`.
Confirms a role handover. This will fail if the 'proposed_assignee'
field of the IncidentRoleAssignment is not equal to the 'new_assignee'
field of the request. If the caller is not the new_assignee,
ForceIncidentRoleHandover should be used instead.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].ConfirmIncidentRoleHandover
@property
def force_incident_role_handover(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.force_incident_role_handover`.
Forces a role handover. This will fail if the 'proposed_assignee'
field of the IncidentRoleAssignment is not equal to the 'new_assignee'
field of the request. If the caller is the new_assignee,
ConfirmIncidentRoleHandover should be used instead.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].ForceIncidentRoleHandover
@property
def create_incident(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.create_incident`.
Creates a new incident.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].CreateIncident
@property
def get_incident(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.get_incident`.
Returns an incident by name.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].GetIncident
@property
def search_incidents(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.search_incidents`.
Returns a list of incidents.
Incidents are ordered by start time, with the most recent incidents first.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].SearchIncidents
@property
def update_incident(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.update_incident`.
Updates an existing incident.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].UpdateIncident
@property
def search_similar_incidents(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.search_similar_incidents`.
Returns a list of incidents that are "similar" to the specified incident
or signal. This functionality is provided on a best-effort basis and the
definition of "similar" is subject to change.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].SearchSimilarIncidents
@property
def create_annotation(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.create_annotation`.
Creates an annotation on an existing incident. Only 'text/plain' and
'text/markdown' annotations can be created via this method.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].CreateAnnotation
@property
def list_annotations(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.list_annotations`.
Lists annotations that are part of an incident. No assumptions should be
made on the content-type of the annotation returned.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].ListAnnotations
@property
def create_tag(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.create_tag`.
Creates a tag on an existing incident.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].CreateTag
@property
def delete_tag(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.delete_tag`.
Deletes an existing tag.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].DeleteTag
@property
def list_tags(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.list_tags`.
Lists tags that are part of an incident.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].ListTags
@property
def create_signal(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.create_signal`.
Creates a new signal.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].CreateSignal
@property
def search_signals(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.search_signals`.
Lists signals that are part of an incident.
Signals are returned in reverse chronological order.
Note that search should not be relied on for critical functionality. It
has lower availability guarantees and might fail to return valid results.
Returned results might include stale or extraneous entries.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].SearchSignals
@property
def lookup_signal(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.lookup_signal`.
Finds a signal by other unique IDs.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].LookupSignal
@property
def get_signal(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.get_signal`.
Returns a signal by name.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].GetSignal
@property
def update_signal(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.update_signal`.
Updates an existing signal (for example, to assign/unassign it to an
incident).
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].UpdateSignal
@property
def escalate_incident(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.escalate_incident`.
Escalates an incident.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].EscalateIncident
@property
def create_artifact(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.create_artifact`.
Creates a new artifact.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].CreateArtifact
@property
def list_artifacts(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.list_artifacts`.
Returns a list of artifacts for an incident.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].ListArtifacts
@property
def update_artifact(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.update_artifact`.
Updates an existing artifact.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].UpdateArtifact
@property
def send_shift_handoff(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.send_shift_handoff`.
Sends a summary of the shift for oncall handoff.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].SendShiftHandoff
@property
def create_subscription(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.create_subscription`.
Creates a new subscription.
This will fail if:
a. there are too many (50) subscriptions in the incident already
b. a subscription using the given channel already exists
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].CreateSubscription
@property
def update_subscription(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.update_subscription`.
Updates a subscription.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].UpdateSubscription
@property
def list_subscriptions(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.list_subscriptions`.
Returns a list of subscriptions for an incident.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].ListSubscriptions
@property
def delete_subscription(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.delete_subscription`.
Deletes an existing subscription.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].DeleteSubscription
@property
def create_incident_role_assignment(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.create_incident_role_assignment`.
Creates a role assignment on an existing incident. Normally, the user field
will be set when assigning a role to oneself, and the next field will be
set when proposing another user as the assignee. Setting the next field
directly to a user other than oneself is equivalent to proposing and
force-assigning the role to the user.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].CreateIncidentRoleAssignment
@property
def delete_incident_role_assignment(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.delete_incident_role_assignment`.
Deletes an existing role assignment.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].DeleteIncidentRoleAssignment
@property
def list_incident_role_assignments(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.list_incident_role_assignments`.
Lists role assignments that are part of an incident.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].ListIncidentRoleAssignments
@property
def cancel_incident_role_handover(self):
"""Return the gRPC stub for :meth:`IncidentServiceClient.cancel_incident_role_handover`.
Cancels a role handover. This will fail if the 'proposed_assignee'
field of the IncidentRoleAssignment is not equal to the 'new_assignee'
field of the request.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["incident_service_stub"].CancelIncidentRoleHandover
| apache-2.0 | -3,519,468,932,222,391,300 | 35.888288 | 98 | 0.643921 | false |
CLVsol/odoo_cl_addons | cl_person/person_address/cl_person_address.py | 1 | 2726 | # -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp.osv import fields, osv
from datetime import *
class cl_person_address(osv.Model):
_name = 'cl_person.address'
_columns = {
'address_id': fields.many2one('cl_address', 'Address', required=False),
'person_id': fields.many2one('cl_person', string='Person', help='Person'),
'sign_in_date': fields.datetime("Sign in date", required=False),
'sign_out_date': fields.datetime("Sign out date", required=False),
'notes': fields.text(string='Notes'),
'active': fields.boolean(
'Active',
help="If unchecked, it will allow you to hide the person address without removing it."
),
}
_order = "sign_in_date desc"
_defaults = {
'sign_in_date': lambda *a: datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'active': 1,
}
class cl_person(osv.osv):
_inherit = 'cl_person'
_columns = {
'person_address_ids': fields.one2many(
'cl_person.address',
'person_id',
'Person Addresses'
),
}
# class cl_address(osv.osv):
# _inherit = 'cl_address'
# _columns = {
# 'person_address_ids': fields.one2many(
# 'cl_person.address',
# 'address_id',
# 'Person Addresses'
# ),
# }
| agpl-3.0 | -7,330,202,957,231,574,000 | 39.088235 | 98 | 0.469552 | false |
Ezhil-Language-Foundation/open-tamil | solthiruthi/resources.py | 1 | 1405 | ## -*- coding: utf-8 -*-
## This file is part of Open-Tamil project.
## (C) 2015,2020 Muthiah Annamalai
##
from __future__ import print_function
import os
def _make_dict_with_path(srcfiles):
return dict([(srcfile.split(u".txt")[0], mk_path(srcfile)) for srcfile in srcfiles])
def get_data_dir():
dirname, filename = os.path.split(os.path.abspath(__file__))
return os.path.sep.join([dirname, u"data"])
def get_data_dictionaries():
srcfiles = {
"tamilvu": "tamilvu_dictionary_words.txt",
"projmad": "proj-madurai-040415.txt",
"wikipedia": "wikipedia_full_text_032915.txt",
"english": "english_dictionary_words.txt",
"parallel": "parallel_dictionary.txt",
"vatamozhi": "monierwilliams_dictionary_words.txt",
}
for k, v in srcfiles.items():
srcfiles[k] = mk_path(v)
return srcfiles
def get_data_categories():
# add new elements to end
srcfiles = [
"peyargal.txt",
"capitals-n-countries.txt",
"maligaiporul.txt",
"mooligaigal.txt",
"nagarangal.txt",
"palam.txt",
"vilangugal.txt",
"TamilStopWords.txt",
]
return _make_dict_with_path(srcfiles)
DATADIR = get_data_dir()
def mk_path(srcfile):
return os.path.sep.join([DATADIR, srcfile])
CATEGORY_DATA_FILES = get_data_categories()
DICTIONARY_DATA_FILES = get_data_dictionaries()
| mit | 4,160,694,511,781,497,300 | 23.649123 | 88 | 0.627046 | false |
tysonholub/twilio-python | tests/integration/sync/v1/service/sync_list/test_sync_list_permission.py | 1 | 8015 | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class SyncListPermissionTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.sync.v1.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_permissions(identity="identity").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://sync.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Permissions/identity',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identity": "identity",
"read": true,
"write": true,
"manage": true,
"url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Permissions/identity"
}
'''
))
actual = self.client.sync.v1.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_permissions(identity="identity").fetch()
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.sync.v1.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_permissions(identity="identity").delete()
self.holodeck.assert_has_request(Request(
'delete',
'https://sync.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Permissions/identity',
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.sync.v1.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_permissions(identity="identity").delete()
self.assertTrue(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.sync.v1.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_permissions.list()
self.holodeck.assert_has_request(Request(
'get',
'https://sync.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Permissions',
))
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"permissions": [],
"meta": {
"first_page_url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/sidOrUniqueName/Permissions?PageSize=50&Page=0",
"key": "permissions",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/sidOrUniqueName/Permissions?PageSize=50&Page=0"
}
}
'''
))
actual = self.client.sync.v1.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_permissions.list()
self.assertIsNotNone(actual)
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"permissions": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identity": "identity",
"read": true,
"write": true,
"manage": true,
"url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Permissions/identity"
}
],
"meta": {
"first_page_url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/sidOrUniqueName/Permissions?PageSize=50&Page=0",
"key": "permissions",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/sidOrUniqueName/Permissions?PageSize=50&Page=0"
}
}
'''
))
actual = self.client.sync.v1.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_permissions.list()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.sync.v1.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_permissions(identity="identity").update(read=True, write=True, manage=True)
values = {'Read': True, 'Write': True, 'Manage': True, }
self.holodeck.assert_has_request(Request(
'post',
'https://sync.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Permissions/identity',
data=values,
))
def test_update_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identity": "identity",
"read": true,
"write": true,
"manage": true,
"url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Permissions/identity"
}
'''
))
actual = self.client.sync.v1.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_permissions(identity="identity").update(read=True, write=True, manage=True)
self.assertIsNotNone(actual)
| mit | 440,342,085,809,121,860 | 41.407407 | 165 | 0.560823 | false |
pinterest/kingpin | kingpin/metaconfig/metaconfig_utils.py | 1 | 19020 | #!/usr/bin/python
#
# Copyright 2016 Pinterest, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import simplejson
from kingpin.kazoo_utils import KazooClientManager
from kingpin.config_utils import ZKBaseConfigManager
log = logging.getLogger(__name__)
ZK_DOWNLOAD_DATA_PREFIX = None
METACONFIG_ZK_PATH_FORMAT = '/metaconfig/metaconfig/{}'
DEPENDENCY_ZK_PATH_FORMAT = '/metaconfig/dependency/{}'
ZK_DOWNLOAD_DATA_SERVERSET_FORMAT = 'zk_download_data.py -f /var/serverset/{} -p {} -m serverset'
ZK_DOWNLOAD_DATA_CONFIGV2_FORMAT = 'zk_download_data.py -f /var/config/{} --from-s3 {} -m config -p {}'
class MetaConfigManager(object):
"""
A manager which handles all dependencies and manageddata / serversets via MetaConfigs.
MetaConfig: Configurations which tell zk_update_monitor how to update / download serverset or configurations.
One MetaConfig example (in json):
[
{
"config_section_name": "config.manageddata.spam.test_grey_list",
"zk_path": "/config/manageddata/spam/test_grey_list",
"command": "zk_download_data.py -f /var/config/config.manageddata.spam.test_grey_list --from-s3 /data/config/manageddata/spam/test_grey_list -m config -p /config/manageddata/spam/test_grey_list",
"type": "config",
"max_wait_in_secs": 0
}
]
The manager has the following capabilities:
1. Create / update a MetaConfig and archive the history to S3 using ZKConfigManager.
2. Create a new dependency ZK Node
3. Add a metaconfig to existing dependency
4. Some templating : Auto-generating the Zk_download_data command for serverset / configv1 / configv2
5. Convert a .conf metaconfig file into json formatted metaconfig
"""
def __init__(self, zk_hosts, aws_keyfile, s3_bucket, s3_endpoint="s3.amazonaws.com"):
"""
To initialize the MetaConfigManager.
Args:
zk_hosts: The zookeeper hosts that MetaConfig manager need to put dependencies to.
aws_keyfile: The aws key file which contains aws access and secret keys.
s3_bucket: the s3 bucket that stores the metaconfigs.
s3_endpoint: the s3 endpoint which stores the metaconfigs.
"""
self.zk_hosts = zk_hosts
self.aws_keyfile = aws_keyfile
self.s3_bucket = s3_bucket
self.s3_endpoint = s3_endpoint
self._kazoo_client().ensure_path("/metaconfig/metaconfig")
self._kazoo_client().ensure_path("/metaconfig/dependency")
def _kazoo_client(self):
# Make the connection timeout long so the executive shell creating the
# metaconfigs will not timeout.
kazoo_client = KazooClientManager(self.zk_hosts,
start_timeout=200.0,
session_timeout=200.0).get_client()
if not kazoo_client:
KazooClientManager(self.zk_hosts)._reconnect()
kazoo_client = KazooClientManager(self.zk_hosts).get_client()
return kazoo_client
def _construct_absolute_dependency_leaf_node(self, source, destination):
destination_zk_path = DEPENDENCY_ZK_PATH_FORMAT.format(destination)
return "{}/{}".format(destination_zk_path, source)
def get_metaconfig_content(self, metaconfig_name):
"""
Get the JSON format of MetaConfig from S3.
Args:
metaconfig_name: the metaconfig's name.
Returns:
the content of metaconfig in json string
"""
metaconfig_zk_path = METACONFIG_ZK_PATH_FORMAT.format(metaconfig_name)
if not self._kazoo_client().exists(metaconfig_zk_path):
return ""
return ZKBaseConfigManager(self.zk_hosts, metaconfig_zk_path,
self.aws_keyfile, self.s3_bucket,
s3_endpoint=self.s3_endpoint).get_data()
def add_to_dependency(self, destination_dependency, new_member):
"""
Add a metaconfig / dependency to another dependency.
The metaconfig / dependency should already exist.
Args:
destination_dependency:
The destination dependnecy to be added to.
Should be always been ended with ".dep"
new_member: the source dependency / metaconfig to add to destination_dependency.
If a dependency, should end with .dep
Returns:
Throwing exception if the operation is failed.
"""
destination_zk_path = DEPENDENCY_ZK_PATH_FORMAT.format(destination_dependency)
self._kazoo_client().ensure_path(destination_zk_path)
# Ensure source is a valid existing metaconfig / dependency
if new_member.endswith(".dep"):
source_zk_path = DEPENDENCY_ZK_PATH_FORMAT.format(new_member)
else:
source_zk_path = METACONFIG_ZK_PATH_FORMAT.format(new_member)
if not self._kazoo_client().exists(source_zk_path):
raise Exception("The source MetaConfig/Dep does not exist")
znode_path_added_to_dependency = self._construct_absolute_dependency_leaf_node(new_member,
destination_dependency)
if self._kazoo_client().exists(znode_path_added_to_dependency):
raise Exception("The dependency has already been added")
# Create a children node to that dependency node
try:
returned_path = self._kazoo_client().create(znode_path_added_to_dependency)
if returned_path == znode_path_added_to_dependency:
return
else:
raise Exception("Failed to add dependency because the returned path doesn't equal to desired path."
"Desired: %s, Actual Returned: %s", znode_path_added_to_dependency, returned_path)
except Exception as e:
raise Exception("Failed to add dependency: %s", e)
def create_dependency(self, dependency_name):
"""
Create a new dependency for metaconfigs.
Args:
dependency_name: must ending with ".dep"
return:
Throwing exception is the operation is failed
"""
if not dependency_name.endswith(".dep"):
raise Exception("The dpendency name should be ended with '.dep'")
zk_path = DEPENDENCY_ZK_PATH_FORMAT.format(dependency_name)
self._kazoo_client().ensure_path(zk_path)
def dependency_exists(self, dependency_name):
"""
Check if dependency already exists.
Args:
dependency_name: must ending with ".dep"
Returns:
True if exist, false if not
"""
dependency_zk_path = DEPENDENCY_ZK_PATH_FORMAT.format(dependency_name)
if self._kazoo_client().exists(dependency_zk_path):
return True
else:
return False
def is_member_of_dependency(self, dependency_name, metaconfig_name):
"""
Check if the metaconfig is already been added to the dependency.
Args:
dependency_name: the dependency name
metaconfig_name: the metaconfig name
Returns:
True is already added, false if not.
"""
znode_path_added_to_dependency = self._construct_absolute_dependency_leaf_node(metaconfig_name, dependency_name)
if self._kazoo_client().exists(znode_path_added_to_dependency):
return True
else:
return False
def remove_dependency(self, destination_dependency, member_to_remove):
"""
Remove a metaconfig / dependency from another dependency.
Args:
destination_dependency:
The destination dependnecy to be removed from.
Should be always been ended with ".dep"
member_to_remove:
the member under that dependency to remove.
Returns:
Throwing exception if the operation is failed.
"""
destination_zk_path = DEPENDENCY_ZK_PATH_FORMAT.format(destination_dependency)
if not self._kazoo_client().exists(destination_zk_path):
raise Exception("The dependency to delete from doesn't exist")
znode_path_deleted_from_dependency = self._construct_absolute_dependency_leaf_node(
member_to_remove, destination_dependency)
if not self._kazoo_client().exists(znode_path_deleted_from_dependency):
raise Exception("The dependency relation doesn't exist")
try:
self._kazoo_client().delete(znode_path_deleted_from_dependency)
except Exception as e:
raise Exception("Failed to remove dependency: %s", e)
def get_dependency_members(self, dependency_name):
"""
Get a list of names of dependency members of a dependency
Args:
dependency_name: the name of dependency (should end with .dep)
Returns:
a list of dependency member names
"""
dependency_path = DEPENDENCY_ZK_PATH_FORMAT.format(dependency_name)
return self._kazoo_client().get_children(dependency_path)
def update_metaconfig(self, name, content):
"""
Put Metaconfig in S3 and create correcponding ZK nodes.
We will reuse the logic in config_utils to guarantee the writes to both S3 and ZkNode.
In this case, because we don't sync MetaConfig to local files, we will always turn force_update to True
so the change will be populated to Zookeeper and S3.
We will also enable audit history for all changes related to MetaConfig contents.
If the MetaConfig is not in ZK, we will foce create it, if it is in ZK, we will update it.
Args:
name: the name of config. For example, discovery.serverset.prod
content: the content of the MetaConfig, should be in JSON format.
Returns:
True if successfully updated, otherwise execption is thrown.
"""
metaconfig_zk_path = METACONFIG_ZK_PATH_FORMAT.format(name)
try:
return ZKBaseConfigManager(self.zk_hosts, metaconfig_zk_path,
self.aws_keyfile, self.s3_bucket,
s3_endpoint=self.s3_endpoint).update_zk(None, content, force_update=True)
except Exception as e:
raise Exception("Failed to create MetaConfig: %s", e)
def metaconfig_exists(self, name):
"""
Check if the metaconfig already exists.
Args:
name: the name of metaconfig.
Returns:
True if exists, False if not.
"""
metaconfig_zk_path = METACONFIG_ZK_PATH_FORMAT.format(name)
if self._kazoo_client().exists(metaconfig_zk_path):
return True
else:
return False
def create_default_metaconfig_for_config(self, name, config_zk_path):
"""
A shortcut to create a metaconfig for config using the default template.
Also create the zk node for the config according to the zk_path passed in.
Args:
name: the name of config. For example,
config.manageddata.admin.decider
config_zk_path: the zookeeper path mapping to this config
Returns:
throw exception if creation failed. Otherwise return the metaconfig body.
"""
self._kazoo_client().ensure_path(config_zk_path)
download_command = self.construct_zk_download_data_command_for_config(config_zk_path)
metaconfig_content = self.construct_metaconfig_with_single_section(
name, config_zk_path, download_command)
try:
self.update_metaconfig(name, metaconfig_content)
except:
# Fail to create. Delete the path
self.remove_metaconfig(name)
raise
return metaconfig_content
def create_default_metaconfig_for_serverset(self, name, serverset_zk_path):
"""
A shortcut to create a serverset using the default template.
Also create the zk node for the serverset according to the zk_path passed in.
Args:
name: the name of serverset. For example, /discovery/dataservices/prod
serverset_zk_path: the zookeeper path mapping to this serverset
Returns:
throw exception if creation failed. Otherwise return the metaconfig body
"""
self._kazoo_client().ensure_path(serverset_zk_path)
download_command = self.construct_zk_download_data_command_for_serverset(serverset_zk_path)
metaconfig_content = self.construct_metaconfig_with_single_section(
name, serverset_zk_path, download_command, is_serverset=True)
self.update_metaconfig(name, metaconfig_content)
return metaconfig_content
def update_metaconfig_with_dict(self, name, metaconfig_content_dict):
"""
Update the metaconfig content with the python dict format.
Args:
name: the name of config. For example, discovery.serverset.prod
content: the content of the MetaConfig, should be in dict format.
Returns:
True if successfully updated, otherwise execption is thrown.
"""
content = simplejson.dumps(metaconfig_content_dict)
return self.create_metaconfig(name, content)
def _construct_single_metaconfig_section(self, config_section_name, zk_path,
command, max_wait_in_secs=0,
is_serverset=False):
"""
Used to construct the json content of a MetaConfig section.
A MetaConfig can have different sections, which is an array of multiple section dict in JSON format.
Args:
config_section_name: this is for backward compatibility. When we use old version of MetaConfig
as a file on the disk, this is used for python config parser to get metaconfig body. ZUM also
keeps it.
zk_path: the zookeeper path of the config (NOTE: not the metaconfig)
command: the command which tells zk_download_data.py how to download the data to local disk.
max_wait_in_secs: seconds that ZUM will delay the download of configs once get an update. Usually 0.
is_serverset: whether this MetaConfig is for serverset? False if it is for ConfigV1 /ConfigV2.
Returns:
A dict of single metaconfig section.
"""
content_dict = {"config_section_name": config_section_name,
"zk_path": zk_path,
"max_wait_in_secs": max_wait_in_secs,
"command": command}
content_dict['type'] = "serverset" if is_serverset else 'config'
return content_dict
def construct_metaconfig_with_single_section(self, config_section_name, zk_path, command,
max_wait_in_secs=0, is_serverset=False):
"""
Ideally a metaconfig should have only one section.
This method is used for creating a metaconfig with only one section.
Args:
config_section_name: this is for backward compatibility. When we use old version of MetaConfig
as a file on the disk, this is used for python config parser to get metaconfig body. ZUM also
keeps it.
zk_path: the zookeeper path of the config (NOTE: not the metaconfig)
command: the command which tells zk_download_data.py how to download the data to local disk.
max_wait_in_secs: seconds that ZUM will delay the download of configs once get an update. Usually 0.
is_serverset: whether this MetaConfig is for serverset? False if it is for ConfigV1 /ConfigV2.
Returns:
A json blob of metaconfig content.
"""
metaconfig = [self._construct_single_metaconfig_section(
config_section_name, zk_path, command, max_wait_in_secs=max_wait_in_secs,
is_serverset=is_serverset)]
return simplejson.dumps(metaconfig)
# some utils
def construct_zk_download_data_command_for_serverset(self, serverset_zk_path, serverset_file_name=None):
if not serverset_zk_path.startswith("/discovery/"):
raise Exception("The serverset zk path should start with "
"'/discovery/'. For example: /discovery/userservice/prod")
if not serverset_file_name:
serverset_file_name = ".".join(serverset_zk_path.split("/")[1:])
return ZK_DOWNLOAD_DATA_SERVERSET_FORMAT.format(serverset_file_name, serverset_zk_path)
def construct_zk_download_data_command_for_config(self, config_zk_path,
config_local_filename=None,
s3_path=None):
if not config_zk_path.startswith("/config/"):
raise Exception("The configv2 zk path should start with '/config/', "
"for example /config/manageddata/growth/popular_nux_topic_list "
"or /config/services/pinlater")
if not config_local_filename:
config_local_filename = ".".join(config_zk_path.split("/")[1:])
if not s3_path:
s3_path = "/data{}".format(config_zk_path)
return ZK_DOWNLOAD_DATA_CONFIGV2_FORMAT.format(config_local_filename, s3_path, config_zk_path)
def remove_metaconfig(self, metaconfig_name):
metaconfig_path = METACONFIG_ZK_PATH_FORMAT.format(metaconfig_name)
metaconfig_lock_path = metaconfig_path + ".lock"
transaction = self._kazoo_client().transaction()
transaction.delete(metaconfig_path)
transaction.delete(metaconfig_lock_path)
transaction.commit()
def get_all_metaconfigs(self):
parent_node = "/metaconfig/metaconfig"
children = self._kazoo_client().get_children(parent_node)
return [child for child in children if not child.endswith(".lock")]
def get_all_dependencies(self):
parent_node = "/metaconfig/dependency"
children = self._kazoo_client().get_children(parent_node)
return [child for child in children if not child.endswith(".lock")] | apache-2.0 | 1,019,133,969,854,918,900 | 43.441589 | 211 | 0.628549 | false |
Akylas/CouchPotatoServer | couchpotato/core/plugins/manage/main.py | 1 | 2849 | from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, addEvent, fireEventAsync
from couchpotato.core.helpers.request import jsonified, getParam
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
import os
import time
log = CPLog(__name__)
class Manage(Plugin):
def __init__(self):
fireEvent('scheduler.interval', identifier = 'manage.update_library', handle = self.updateLibrary, hours = 2)
addEvent('manage.update', self.updateLibrary)
addApiView('manage.update', self.updateLibraryView, docs = {
'desc': 'Update the library by scanning for new movies',
'params': {
'full': {'desc': 'Do a full update or just recently changed/added movies.'},
}
})
if not Env.get('dev'):
def updateLibrary():
self.updateLibrary(full = False)
addEvent('app.load', updateLibrary)
def updateLibraryView(self):
full = getParam('full', default = 1)
fireEventAsync('manage.update', full = True if full == '1' else False)
return jsonified({
'success': True
})
def updateLibrary(self, full = True):
last_update = float(Env.prop('manage.last_update', default = 0))
if self.isDisabled() or (last_update > time.time() - 20):
return
directories = self.directories()
added_identifiers = []
for directory in directories:
if not os.path.isdir(directory):
if len(directory) > 0:
log.error('Directory doesn\'t exist: %s', directory)
continue
log.info('Updating manage library: %s', directory)
identifiers = fireEvent('scanner.folder', folder = directory, newer_than = last_update if not full else 0, single = True)
if identifiers:
added_identifiers.extend(identifiers)
# Break if CP wants to shut down
if self.shuttingDown():
break
# If cleanup option is enabled, remove offline files from database
if self.conf('cleanup') and full and not self.shuttingDown():
# Get movies with done status
total_movies, done_movies = fireEvent('movie.list', status = 'done', single = True)
for done_movie in done_movies:
if done_movie['library']['identifier'] not in added_identifiers:
fireEvent('movie.delete', movie_id = done_movie['id'], delete_from = 'all')
Env.prop('manage.last_update', time.time())
def directories(self):
try:
return [x.strip() for x in self.conf('library', default = '').split('::')]
except:
return []
| gpl-3.0 | -3,737,406,707,848,226,300 | 33.325301 | 133 | 0.599158 | false |
Amir-Hijack/Acc-Hijack | src/facebookLib.py | 1 | 1769 | # -*- coding: utf-8 -*-
from time import sleep
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import sys
reload(sys)
sys.setdefaultencoding('utf8')
''' facebookLib.py - Facebook bruteforce, seperate from mainLib to prevent any errors.
Comprises of a username checking method and actual bruteforce method. '''
R = '\033[31m' # red
W = '\033[0m' # white (normal)
G = '\033[32m' # green
O = '\033[33m' # orange
def facebookCheck(username):
try:
driver = webdriver.Firefox()
driver.get("https://www.facebook.com/" + username)
assert (("Sorry, this page isn't available.") not in driver.page_source)
driver.close()
except AssertionError:
return 1
def facebookBruteforce(username, wordlist, delay):
driver = webdriver.Firefox()
driver.get("https://mbasic.facebook.com/login")
wordlist = open(wordlist, 'r')
for i in wordlist.readlines():
password = i.strip("\n")
try:
elem = driver.find_element_by_name("email")
elem.clear()
elem.send_keys(username)
elem = driver.find_element_by_name("pass")
elem.clear()
elem.send_keys(password)
elem.send_keys(Keys.RETURN)
print O + "[*] Username: %s | [*] Password: %s | Incorrect!\n" % (username, password) + W
sleep(delay)
assert (("Welcome to Facebook") in driver.title)
except AssertionError:
print G + "[*] Username: %s | [*] Password found: %s\n" % (username, password) + W
sys.exit(0)
except Exception, e:
print R + "[!] OOPs, something went wrong. Did you terminate the connection? [!]" + W
sys.exit(1)
| gpl-3.0 | -8,152,327,688,107,481,000 | 35.102041 | 101 | 0.595252 | false |
menpo/serializablecallable | serializablecallable/base.py | 1 | 6281 | import inspect
import importlib
import sys
from collections import namedtuple, Hashable
from functools import partial
try:
from unittest.mock import MagicMock, patch # Python 3
except ImportError:
from mock import MagicMock, patch # Python 2
from inspect import getargspec
SerializedCallable = namedtuple('SerializedCallable',
['name', 'source', 'modules'])
is_py2 = sys.version_info.major == 2
is_py3 = sys.version_info.major == 3
def serialize_callable_and_test(c, modules):
# save the callable down
serialized_c = serialize_callable(c, modules)
# attempt to re-serialize
deserialize_callable(*serialized_c)
if serialized_c.source is not None:
# test the callable source
namespace = namespace_for_modules(modules)
mock_namespace = {k: MagicMock() for k in namespace
if not (k.startswith('__') and k.endswith('__'))}
# mock namespace means the funciton has access to the desired
# namespace only, but everything in there is a MagicMock instance
mock_c_rebuilt = deserialize_callable_in_namespace(
serialized_c.name, serialized_c.source, mock_namespace)
test_callable(mock_c_rebuilt)
return serialized_c
def test_callable(c):
nargs = len(getargspec(c).args)
args = [MagicMock() for _ in range(nargs)]
# first we need to get a handle on the builtin import method.
if is_py2:
import __builtin__
import_string = '__builtin__.__import__'
elif is_py3:
import builtins as __builtin__
import_string = 'builtins.__import__'
# make sure we keep this, otherwise we won't be able to fix our patching!
orig_import = __builtin__.__import__
def import_mock(name, *args):
# we are in our callable and we are attempting to import something.
# we want to confirm that the item exists, so we flip back to real
# importing and import the object. Note that it's critical that we flip
# this back as recusive importing would otherwise be damaged by the
# patch.
__builtin__.__import__ = orig_import
orig_import(name)
# the import was successful! Now give a mock back instead of the real
# thing.
__builtin__.__import__ = import_mock
# and just vend a MagicMock back..
return MagicMock()
# invoke our callable with import patched.
with patch(import_string, side_effect=import_mock):
c(*args)
# to make ourselves feel better, never leave this function without checking
# that we have restored order to the universe!
assert __builtin__.__import__ == orig_import
def serialize_callable(c, modules):
# build the namespace mapping {name: callable}
name_to_callable = {n: s for n, s in namespace_for_modules(modules).items()
if callable(s) and isinstance(s, Hashable)}
module_names = [module_to_str(m) for m in modules]
# build the inverse mapping for callables {callable: name}
callable_to_name = {s: n for n, s in name_to_callable.items()}
# see if c is in the module namespace
name = callable_to_name.get(c)
if name is not None:
# c is directly in the namespace - easy to serialize.
return SerializedCallable(name, None, module_names)
elif hasattr(c, 'h5_source'):
# f is a novel function that has it's own source attached.
return SerializedCallable(c.__name__, c.source, module_names)
elif isinstance(c, partial):
# Special case: c is a partially applied function (that isn't directly
# in the namespace of the modules)
# currently not supported, could be added
raise ValueError("Partial function serialization is not yet supported")
else:
# c is a novel function and needs to be introspected for it's
# definition
source = extract_source(c)
return SerializedCallable(c.__name__, source, module_names)
def extract_source(c):
source = inspect.getsource(c)
lines = source.splitlines()
l = lines[0]
# find any leading whitespace on the function and strip it
leading_space = len(l) - len(l.lstrip())
return '\n'.join([l[leading_space:] for l in lines])
def deserialize_callable(name, source, modules):
namespace = namespace_for_modules([str_to_module(m) for m in modules])
return deserialize_callable_in_namespace(name, source, namespace)
def deserialize_callable_in_namespace(name, source, namespace):
if source is None:
# must be directly in namespace
return namespace[name]
else:
# exec the callable in this namespace
return safe_exec(source, namespace, name)
def str_to_module(module_str):
return importlib.import_module(module_str)
def module_to_str(module):
return module.__name__
def namespace_for_module(module):
return dict(inspect.getmembers(module))
def namespace_for_modules(modules):
namespace = {}
for m in modules:
namespace.update(namespace_for_module(m))
return namespace
def safe_exec(source, namespace, name):
r"""
Execs a function definition a certain namespace, returning the
function.
"""
namespace = namespace.copy()
exec(source, namespace)
f = namespace[name]
f.h5_source = source
return f
# rough idea of partial support, difficult though.
# Python 3.3+ makes this trivial:
# https://docs.python.org/3/library/inspect.html#inspect.signature
#
# def source_for_partial(p):
# arg_str = [str(a) for a in p.args]
# kwarg_str = ['{}={}'.format(*i) for i in p.keywords.items()]
# args = ', '.join([p.func.__name__] + arg_str + kwarg_str)
# return "partial({})".format(args)
class SerializableCallable(object):
def __init__(self, callable, modules):
self.callable = callable
self.modules = modules
def __getstate__(self):
serialized_c = serialize_callable_and_test(self.callable, self.modules)
return dict(serialized_c._asdict())
def __setstate__(self, state):
self.callable = deserialize_callable(state['name'], state['source'],
state['modules'])
self.modules = state['modules']
| bsd-3-clause | -4,669,458,925,515,064,000 | 34.286517 | 79 | 0.651329 | false |
syllog1sm/TextBlob | text/blob.py | 1 | 29687 | # -*- coding: utf-8 -*-
'''Wrappers for various units of text.'''
from __future__ import unicode_literals
import sys
import json
import string as pystring
from collections import defaultdict
from .packages import nltk
from .decorators import cached_property
from .utils import lowerstrip, PUNCTUATION_REGEX
from .inflect import singularize as _singularize, pluralize as _pluralize
from .mixins import ComparableMixin
from .compat import (string_types, unicode, basestring,
python_2_unicode_compatible, u)
from .np_extractors import BaseNPExtractor, FastNPExtractor
from .taggers import BaseTagger, PatternTagger
from .tokenizers import BaseTokenizer, WordTokenizer, SentenceTokenizer
from .sentiments import BaseSentimentAnalyzer, PatternAnalyzer
from .parsers import BaseParser, PatternParser
from .translate import Translator
from .en import suggest
from .exceptions import MissingCorpusException
class Word(unicode):
'''A simple word representation.'''
translator = Translator()
def __new__(cls, string, pos_tag=None):
'''Return a new instance of the class. It is necessary to override
this method in order to handle the extra pos_tag argument in the
constructor.
'''
return super(Word, cls).__new__(cls, string)
def __init__(self, string, pos_tag=None):
self.string = string
self.pos_tag = pos_tag
def __repr__(self):
return repr(self.string)
def __str__(self):
return self.string
def singularize(self):
'''Return the singular version of the word as a string.'''
return Word(_singularize(self.string))
def pluralize(self):
'''Return the plural version of the word as a string.'''
return Word(_pluralize(self.string))
def translate(self, from_lang=None, to="en"):
'''Translate the word to another language using Google's
Translate API.
.. versionadded:: 0.5.0
'''
if from_lang is None:
from_lang = self.translator.detect(self.string)
return self.translator.translate(self.string,
from_lang=from_lang, to_lang=to)
def detect_language(self):
'''Detect the word's language using Google's Translate API.
.. versionadded:: 0.5.0
'''
return self.translator.detect(self.string)
def spellcheck(self):
'''Return a list of (word, confidence) tuples of spelling corrections.
Based on: Peter Norvig, "How to Write a Spelling Corrector"
(http://norvig.com/spell-correct.html) as implemented in the pattern
library.
.. versionadded:: 0.6.0
'''
return suggest(self.string)
def correct(self):
'''Correct the spelling of the word. Returns the word with the highest
confidence using the spelling corrector.
.. versionadded:: 0.6.0
'''
return Word(self.spellcheck()[0][0])
@cached_property
def lemma(self):
'''Return the lemma for a word using WordNet's morphy function.'''
lemmatizer = nltk.stem.WordNetLemmatizer()
try:
return lemmatizer.lemmatize(self.string)
except LookupError as err:
print(err)
raise MissingCorpusException()
class WordList(list):
'''A list-like collection of words.'''
def __init__(self, collection):
'''Initialize a WordList. Takes a collection of strings as
its only argument.
'''
self._collection = [Word(w) for w in collection]
super(WordList, self).__init__(self._collection)
def __repr__(self):
'''Returns a string representation for debugging.'''
class_name = self.__class__.__name__
# String representation of words
strings = [unicode(w) for w in self._collection]
if len(self) > 60:
return '{cls}({beginning}...{end})'.format(cls=class_name,
beginning=strings[:3],
end=strings[-3:])
else:
return '{cls}({lst})'.format(cls=class_name, lst=strings)
def __getitem__(self, key):
'''Returns a string at the given index.'''
if isinstance(key, slice):
return self.__class__(self._collection[key])
else:
return self._collection[key]
def __getslice__(self, i, j):
# This is included for Python 2.* compatibility
return self.__class__(self._collection[i:j])
def __iter__(self):
return iter(self._collection)
def count(self, strg, case_sensitive=False, *args, **kwargs):
"""Get the count of a word or phrase `s` within this WordList.
:param strg: The string to count.
:param case_sensitive: A boolean, whether or not the search is case-sensitive.
"""
if not case_sensitive:
return [word.lower() for word in self].count(strg.lower(), *args,
**kwargs)
return self._collection.count(strg, *args, **kwargs)
def append(self, obj):
'''Append an object to end. If the object is a string, appends a
``Word`` object.
'''
if isinstance(obj, basestring):
return self._collection.append(Word(obj))
else:
return self._collection.append(obj)
def extend(self, iterable):
'''Extend WordList by appending alements from ``iterable``. If an element
is a string, appends a ``Word`` object.
'''
[self._collection.append(Word(e) if isinstance(e, basestring) else e)
for e in iterable]
return self
def upper(self):
'''Return a new WordList with each word upper-cased.'''
return self.__class__([word.upper() for word in self])
def lower(self):
'''Return a new WordList with each word lower-cased.'''
return self.__class__([word.lower() for word in self])
def singularize(self):
'''Return the single version of each word in this WordList.'''
return self.__class__([word.singularize() for word in self])
def pluralize(self):
'''Return the plural version of each word in this WordList.'''
return self.__class__([word.pluralize() for word in self])
def lemmatize(self):
'''Return the lemma of each word in this WordList.'''
return self.__class__([word.lemma for word in self])
def _validated_param(obj, name, base_class, default, base_class_name=None):
'''Validates a parameter passed to __init__. Makes sure that obj is
the correct class. Return obj if it's not None or falls back to default
:param obj: The object passed in.
:param name: The name of the parameter.
:param base_class: The class that obj must inherit from.
:param default: The default object to fall back upon if obj is None.
'''
base_class_name = base_class_name if base_class_name else base_class.__name__
if obj is not None and not isinstance(obj, base_class):
raise ValueError("{name} must be an instance of {cls}"
.format(name=name, cls=base_class_name))
return obj if obj else default
def _initialize_models(obj, tokenizer, pos_tagger,
np_extractor, analyzer, parser, classifier):
"""Common initialization between BaseBlob and Blobber classes."""
# tokenizer may be a textblob or an NLTK tokenizer
obj.tokenizer = _validated_param(tokenizer, "tokenizer",
base_class=(BaseTokenizer, nltk.tokenize.api.TokenizerI),
default=BaseBlob.tokenizer,
base_class_name="BaseTokenizer")
obj.np_extractor = _validated_param(np_extractor, "np_extractor",
base_class=BaseNPExtractor,
default=BaseBlob.np_extractor)
obj.pos_tagger = _validated_param(pos_tagger, "pos_tagger",
BaseTagger, BaseBlob.pos_tagger)
obj.analyzer = _validated_param(analyzer, "analyzer",
BaseSentimentAnalyzer, BaseBlob.analyzer)
obj.parser = _validated_param(parser, "parser", BaseParser, BaseBlob.parser)
obj.classifier = classifier
@python_2_unicode_compatible
class BaseBlob(ComparableMixin):
'''An abstract base class that all text.blob classes will inherit from.
Includes words, POS tag, NP, and word count properties. Also includes
basic dunder and string methods for making objects like Python strings.
:param text: A string.
:param tokenizer: (optional) A tokenizer instance. If ``None``,
defaults to :class:`WordTokenizer() <text.tokenizers.WordTokenizer>`.
:param np_extractor: (optional) An NPExtractor instance. If ``None``,
defaults to :class:`FastNPExtractor() <text.np_extractors.FastNPExtractor>`.
:param pos_tagger: (optional) A Tagger instance. If ``None``,
defaults to :class:`PatternTagger <text.taggers.PatternTagger>`.
:param analyzer: (optional) A sentiment analyzer. If ``None``,
defaults to :class:`PatternAnalyzer <text.sentiments.PatternAnalyzer>`.
:param parser: A parser. If ``None``, defaults to
:class:`PatternParser <text.parsers.PatternParser>`.
:param classifier: A classifier.
.. versionchanged:: 0.6.0
``clean_html`` parameter deprecated, as it was in NLTK.
'''
np_extractor = FastNPExtractor()
pos_tagger = PatternTagger()
tokenizer = WordTokenizer()
translator = Translator()
analyzer = PatternAnalyzer()
parser = PatternParser()
def __init__(self, text, tokenizer=None,
pos_tagger=None, np_extractor=None, analyzer=None,
parser=None, classifier=None, clean_html=False):
if type(text) not in string_types:
raise TypeError('The `text` argument passed to `__init__(text)` '
'must be a string, not {0}'.format(type(text)))
if clean_html:
raise NotImplementedError("clean_html has been deprecated. "
"To remove HTML markup, use BeautifulSoup's "
"get_text() function")
self.raw = self.string = text
self.stripped = lowerstrip(self.raw, all=True)
_initialize_models(self, tokenizer, pos_tagger, np_extractor, analyzer, parser, classifier)
@cached_property
def words(self):
'''Return a list of word tokens. This excludes punctuation characters.
If you want to include punctuation characters, access the ``tokens``
property.
'''
return WordList(WordTokenizer().itokenize(self.raw, include_punc=False))
@cached_property
def tokens(self):
'''Return a list of tokens, using this blob's tokenizer object
(defaults to :class:`WordTokenizer <text.tokenizers.WordTokenizer>`).
'''
return WordList(self.tokenizer.tokenize(self.raw))
def tokenize(self, tokenizer=None):
'''Return a list of tokens, using ``tokenizer``.
:param tokenizer: (optional) A tokenizer object. If None, defaults to
this blob's default tokenizer.
'''
t = tokenizer if tokenizer is not None else self.tokenizer
return WordList(t.tokenize(self.raw))
def parse(self, parser=None):
'''Parse the text.
:param parser: (optional) A parser instance. If ``None``, defaults to
this blob's default parser.
.. versionadded:: 0.6.0
'''
p = parser if parser is not None else self.parser
return p.parse(self.raw)
def classify(self):
'''Classify the blob using the blob's ``classifier``.'''
if self.classifier is None:
raise NameError("This blob has no classfier. Train one first!")
return self.classifier.classify(self.raw)
@cached_property
def sentiment(self):
'''Return a tuple of form (polarity, subjectivity ) where polarity
is a float within the range [-1.0, 1.0] and subjectivity is a float
within the range [0.0, 1.0] where 0.0 is very objective and 1.0 is
very subjective.
:rtype: tuple
'''
return self.analyzer.analyze(self.raw)
@cached_property
def polarity(self):
'''Return the polarity score as a float within the range [-1.0, 1.0]
:rtype: float
'''
return PatternAnalyzer().analyze(self.raw)[0]
@cached_property
def subjectivity(self):
'''Return the subjectivity score as a float within the range [0.0, 1.0]
where 0.0 is very objective and 1.0 is very subjective.
:rtype: float
'''
return PatternAnalyzer().analyze(self.raw)[1]
@cached_property
def noun_phrases(self):
'''Returns a list of noun phrases for this blob.'''
return WordList([phrase.strip().lower()
for phrase in self.np_extractor.extract(self.raw)
if len(phrase) > 1])
@cached_property
def pos_tags(self):
'''Returns an list of tuples of the form (word, POS tag).
Example:
::
[('At', 'IN'), ('eight', 'CD'), ("o'clock", 'JJ'), ('on', 'IN'),
('Thursday', 'NNP'), ('morning', 'NN')]
:rtype: list of tuples
'''
return [(Word(word, pos_tag=t), unicode(t))
for word, t in self.pos_tagger.tag(self.raw)
if not PUNCTUATION_REGEX.match(unicode(t))]
tags = pos_tags
@cached_property
def word_counts(self):
'''Dictionary of word frequencies in this text.
'''
counts = defaultdict(int)
stripped_words = [lowerstrip(word) for word in self.words]
for word in stripped_words:
counts[word] += 1
return counts
@cached_property
def np_counts(self):
'''Dictionary of noun phrase frequencies in this text.
'''
counts = defaultdict(int)
for phrase in self.noun_phrases:
counts[phrase] += 1
return counts
def ngrams(self, n=3):
'''Return a list of n-grams (tuples of n successive words) for this
blob.
'''
if n <= 0:
return []
grams = [WordList(self.words[i:i+n])
for i in range(len(self.words) - n + 1)]
return grams
def translate(self, from_lang=None, to="en"):
'''Translate the blob to another language.
Uses the Google Translate API. Returns a new TextBlob.
Requires an internet connection.
Usage:
::
>>> b = TextBlob("Simple is better than complex")
>>> b.translate(to="es")
TextBlob('Lo simple es mejor que complejo')
Language code reference:
https://developers.google.com/translate/v2/using_rest#language-params
.. versionadded:: 0.5.0.
:param from_lang: Language to translate from. If ``None``, will attempt
to detect the language.
:param to: Language to translate to.
:rtype: BaseBlob
'''
if from_lang is None:
from_lang = self.translator.detect(self.string)
return self.__class__(self.translator.translate(self.raw,
from_lang=from_lang, to_lang=to))
def detect_language(self):
'''Detect the blob's language using the Google Translate API.
Requires an internet connection.
Usage:
::
>>> b = TextBlob("bonjour")
>>> b.detect_language()
u'fr'
Language code reference:
https://developers.google.com/translate/v2/using_rest#language-params
.. versionadded:: 0.5.0
:rtype: str
'''
return self.translator.detect(self.raw)
def correct(self):
'''Attempt to correct the spelling of a blob.
.. versionadded:: 0.6.0
:rtype: BaseBlob
'''
tok = WordTokenizer()
corrected = (Word(w).correct() for w in tok.tokenize(self.raw, include_punc=True))
# Separate each token with a space unless the token is a punctuation
ret = ''
for i, word in enumerate(corrected):
# Avoid an extra space at the beginning
if word in pystring.punctuation or i == 0:
ret = ''.join([ret, word])
else:
ret = ' '.join([ret, word])
return self.__class__(ret)
def __repr__(self):
'''Returns a string representation for debugging.'''
class_name = self.__class__.__name__
return "{cls}({text})".format(cls=class_name,
text=repr(self.raw))
def __len__(self):
'''Returns the length of the raw text.'''
return len(self.raw)
def __str__(self):
'''Returns a string representation used in print statements
or str(my_blob).'''
return self.raw
def __unicode__(self):
'''Returns the unicode representation of the blob.'''
return u(self.raw)
def __iter__(self):
'''Makes the object iterable as if it were a string,
iterating through the raw string's characters.
'''
return iter(self.raw)
def _cmpkey(self):
'''Key used by ComparableMixin to implement all rich comparison
operators.
'''
return self.raw
def __eq__(self, other):
'''Equality comparator. Blobs are be equal to blobs with the same
text and also to their string counterparts.
'''
if type(other) in string_types:
return self.raw == other
else:
return super(BaseBlob, self).__eq__(other)
def __hash__(self):
return hash(self._cmpkey())
def __getitem__(self, index):
'''Returns a substring. If index is an integer, returns a Python
string of a single character. If a range is given, e.g. `blob[3:5]`,
a new instance of the class is returned.
'''
if isinstance(index, int):
return self.raw[index] # Just return a single character
else:
# Return a new blob object
return self.__class__(self.raw[index])
def __add__(self, other):
'''Concatenates two text objects the same way Python strings are
concatenated.
Arguments:
- `other`: a string or a text object
'''
if type(other) in string_types:
return TextBlob(self.raw + other)
elif isinstance(other, BaseBlob):
return TextBlob(self.raw + other.raw)
else:
raise TypeError('Operands must be either strings or {0} objects'
.format(self.__class__.__name__))
def __contains__(self, sub):
'''Implements the `in` keyword like a Python string.'''
return sub in self.raw
def find(self, sub, start=0, end=sys.maxsize):
'''Behaves like the built-in str.find() method. Returns an integer,
the index of the first occurrence of the substring argument sub in the
sub-string given by [start:end].
'''
return self.raw.find(sub, start, end)
def rfind(self, sub, start=0, end=sys.maxsize):
'''Behaves like the built-in str.rfind() method. Returns an integer,
the index of he last (right-most) occurence of the substring argument
sub in the sub-sequence given by [start:end].
'''
return self.raw.rfind(sub, start, end)
def index(self, sub, start=0, end=sys.maxsize):
'''Like blob.find() but raise ValueError when the substring
is not found.
'''
return self.raw.index(sub, start, end)
def startswith(self, prefix, start=0, end=sys.maxsize):
"""Returns True if the blob starts with the given prefix."""
return self.raw.startswith(prefix, start, end)
def endswith(self, suffix, start=0, end=sys.maxsize):
"""Returns True if the blob ends with the given suffix."""
return self.raw.endswith(suffix, start, end)
# PEP8 aliases
starts_with = startswith
ends_with = endswith
def title(self):
"""Returns a blob object with the text in title-case."""
return self.__class__(self.raw.title())
def format(self, *args, **kwargs):
"""Perform a string formatting operation, like the built-in
`str.format(*args, **kwargs)`. Returns a blob object.
"""
return self.__class__(self.raw.format(*args, **kwargs))
def split(self, sep=None, maxsplit=sys.maxsize):
"""Behaves like the built-in str.split() except returns a
WordList.
"""
return WordList(self.raw.split(sep, maxsplit))
def strip(self, chars=None):
"""Behaves like the built-in str.strip([chars]) method. Returns
an object with leading and trailing whitespace removed.
"""
return self.__class__(self.raw.strip(chars))
def upper(self):
"""Like str.upper(), returns new object with all upper-cased characters.
"""
return self.__class__(self.raw.upper())
def lower(self):
"""Like str.lower(), returns new object with all lower-cased characters.
"""
return self.__class__(self.raw.lower())
def join(self, iterable):
"""Behaves like the built-in `str.join(iterable)` method, except
returns a blob object.
Returns a blob which is the concatenation of the strings or blobs
in the iterable.
"""
return self.__class__(self.raw.join(iterable))
def replace(self, old, new, count=sys.maxsize):
"""Return a new blob object with all the occurence of `old` replaced
by `new`.
"""
return self.__class__(self.raw.replace(old, new, count))
class TextBlob(BaseBlob):
"""A general text block, meant for larger bodies of text (esp. those
containing sentences). Inherits from :class:`BaseBlob <BaseBlob>`.
:param text: A string.
:param tokenizer: (optional) A tokenizer instance. If ``None``, defaults to
:class:`WordTokenizer() <text.tokenizers.WordTokenizer>`.
:param np_extractor: (optional) An NPExtractor instance. If ``None``,
defaults to :class:`FastNPExtractor() <text.np_extractors.FastNPExtractor>`.
:param pos_tagger: (optional) A Tagger instance. If ``None``, defaults to
:class:`PatternTagger <text.taggers.PatternTagger>`.
:param analyzer: (optional) A sentiment analyzer. If ``None``, defaults to
:class:`PatternAnalyzer <text.sentiments.PatternAnalyzer>`.
:param classifier: (optional) A classifier.
"""
@cached_property
def sentences(self):
'''Return list of :class:`Sentence <Sentence>` objects.'''
return self._create_sentence_objects()
@cached_property
def words(self):
'''Return a list of word tokens. This excludes punctuation characters.
If you want to include punctuation characters, access the ``tokens``
property.
'''
# NLTK's word tokenizer expects sentences as input, so tokenize the
# blob into sentences before tokenizing to words
words = []
for sent in self.sentences:
words.extend(WordTokenizer().tokenize(sent.raw, include_punc=False))
return WordList(words)
@property
def raw_sentences(self):
'''List of strings, the raw sentences in the blob.'''
return [sentence.raw for sentence in self.sentences]
@property
def serialized(self):
'''Returns a list of each sentence's dict representation.'''
return [sentence.dict for sentence in self.sentences]
def to_json(self, *args, **kwargs):
'''Return a json representation (str) of this blob.
Takes the same arguments as json.dumps.
.. versionadded:: 0.5.1
'''
return json.dumps(self.serialized, *args, **kwargs)
@property
def json(self):
'''The json representation of this blob.
.. versionchanged:: 0.5.1
Made ``json`` a property instead of a method to restore backwards
compatibility that was broken after version 0.4.0.
'''
return self.to_json()
def _create_sentence_objects(self):
'''Returns a list of Sentence objects given
a list of sentence strings. Attempts to handle sentences that
have more than one punctuation mark at the end of the sentence.
Examples: "An ellipses is no problem..." or "This is awesome!!!"
'''
sent_tokenizer = SentenceTokenizer()
sentence_objects = []
sentences = sent_tokenizer.itokenize(self.raw)
char_index = 0 # Keeps track of character index within the blob
for sent in sentences:
# Compute the start and end indices of the sentence
# within the blob
start_index = self.raw.index(sent, char_index)
char_index += len(sent)
end_index = start_index + len(sent)
# Sentences share the same models as their parent blob
s = Sentence(sent, start_index=start_index, end_index=end_index,
tokenizer=self.tokenizer, np_extractor=self.np_extractor,
pos_tagger=self.pos_tagger, analyzer=self.analyzer,
parser=self.parser, classifier=self.classifier)
sentence_objects.append(s)
return sentence_objects
class Sentence(BaseBlob):
'''A sentence within a TextBlob. Inherits from :class:`BaseBlob <BaseBlob>`.
:param sentence: A string, the raw sentence.
:param start_index: An int, the index where this sentence begins
in a TextBlob. If not given, defaults to 0.
:param end_index: An int, the index where this sentence ends in
a TextBlob. If not given, defaults to the
length of the sentence - 1.
'''
def __init__(self, sentence, start_index=0, end_index=None, *args, **kwargs):
super(Sentence, self).__init__(sentence, *args, **kwargs)
self.start = self.start_index = start_index
self.end = self.end_index = end_index if end_index else len(sentence) - 1
@property
def dict(self):
'''The dict representation of this sentence.'''
return {
'raw': self.raw,
'start_index': self.start_index,
'end_index': self.end_index,
'stripped': self.stripped,
'noun_phrases': self.noun_phrases,
'polarity': self.polarity,
'subjectivity': self.subjectivity,
}
class Blobber(object):
'''A factory for TextBlobs that all share the same tagger,
tokenizer, parser, classifier, and np_extractor.
Usage:
>>> from text.blob import Blobber
>>> from text.taggers import NLTKTagger
>>> from text.tokenizers import SentenceTokenizer
>>> tb = Blobber(pos_tagger=NLTKTagger(), tokenizer=SentenceTokenizer())
>>> blob1 = tb("This is one blob.")
>>> blob2 = tb("This blob has the same tagger and tokenizer.")
>>> blob1.pos_tagger is blob2.pos_tagger
True
:param tokenizer: (optional) A tokenizer instance. If ``None``,
defaults to :class:`WordTokenizer() <text.tokenizers.WordTokenizer>`.
:param np_extractor: (optional) An NPExtractor instance. If ``None``,
defaults to :class:`FastNPExtractor() <text.np_extractors.FastNPExtractor>`.
:param pos_tagger: (optional) A Tagger instance. If ``None``,
defaults to :class:`PatternTagger <text.taggers.PatternTagger>`.
:param analyzer: (optional) A sentiment analyzer. If ``None``,
defaults to :class:`PatternAnalyzer <text.sentiments.PatternAnalyzer>`.
:param parser: A parser. If ``None``, defaults to
:class:`PatternParser <text.parsers.PatternParser>`.
:param classifier: A classifier.
.. versionadded:: 0.4.0
'''
np_extractor = FastNPExtractor()
pos_tagger = PatternTagger()
tokenizer = WordTokenizer()
analyzer = PatternAnalyzer()
parser = PatternParser()
def __init__(self, tokenizer=None, pos_tagger=None, np_extractor=None,
analyzer=None, parser=None, classifier=None):
_initialize_models(self, tokenizer, pos_tagger, np_extractor, analyzer,
parser, classifier)
def __call__(self, text):
'''Return a new TextBlob object with this Blobber's ``np_extractor``,
``pos_tagger``, and ``tokenizer``.
:returns: A new TextBlob.
'''
return TextBlob(text, tokenizer=self.tokenizer, pos_tagger=self.pos_tagger,
np_extractor=self.np_extractor, analyzer=self.analyzer,
classifier=self.classifier)
def __repr__(self):
classifier_name = self.classifier.__class__.__name__ + "()" if self.classifier else "None"
return ("Blobber(tokenizer={0}(), pos_tagger={1}(), "
"np_extractor={2}(), analyzer={3}(), parser={4}(), classifier={5})")\
.format(self.tokenizer.__class__.__name__,
self.pos_tagger.__class__.__name__,
self.np_extractor.__class__.__name__,
self.analyzer.__class__.__name__,
self.parser.__class__.__name__,
classifier_name)
__str__ = __repr__
| mit | 800,950,500,900,574,200 | 36.295226 | 99 | 0.600936 | false |
b-jesch/service.fritzbox.callmonitor | resources/lib/PhoneBooks/pyicloud/services/reminders.py | 1 | 3587 | from __future__ import absolute_import
from datetime import datetime
import time
import uuid
import json
from tzlocal import get_localzone
class RemindersService(object):
def __init__(self, service_root, session, params):
self.session = session
self.params = params
self._service_root = service_root
self.lists = {}
self.collections = {}
self.refresh()
def refresh(self):
params_reminders = dict(self.params)
params_reminders.update({
'clientVersion': '4.0',
'lang': 'en-us',
'usertz': get_localzone().zone
})
# Open reminders
req = self.session.get(
self._service_root + '/rd/startup',
params=params_reminders
)
startup = req.json()
self.lists = {}
self.collections = {}
for collection in startup['Collections']:
temp = []
self.collections[collection['title']] = {
'guid': collection['guid'],
'ctag': collection['ctag']
}
for reminder in startup['Reminders']:
if reminder['pGuid'] != collection['guid']:
continue
if 'dueDate' in reminder:
if reminder['dueDate']:
due = datetime(
reminder['dueDate'][1],
reminder['dueDate'][2], reminder['dueDate'][3],
reminder['dueDate'][4], reminder['dueDate'][5]
)
else:
due = None
else:
due = None
if reminder['description']:
desc = reminder['description']
else:
desc = ""
temp.append({
"title": reminder['title'],
"desc": desc,
"due": due
})
self.lists[collection['title']] = temp
def post(self, title, description="", collection=None):
pguid = 'tasks'
if collection:
if collection in self.collections:
pguid = self.collections[collection]['guid']
params_reminders = dict(self.params)
params_reminders.update({
'clientVersion': '4.0',
'lang': 'en-us',
'usertz': get_localzone().zone
})
req = self.session.post(
self._service_root + '/rd/reminders/tasks',
data=json.dumps({
"Reminders": {
'title': title,
"description": description,
"pGuid": pguid,
"etag": None,
"order": None,
"priority": 0,
"recurrence": None,
"alarms": [],
"startDate": None,
"startDateTz": None,
"startDateIsAllDay": False,
"completedDate": None,
"dueDate": None,
"dueDateIsAllDay": False,
"lastModifiedDate": None,
"createdDate": None,
"isFamily": None,
"createdDateExtended": int(time.time()*1000),
"guid": str(uuid.uuid4())
},
"ClientState": {"Collections": self.collections.values()}
}),
params=params_reminders)
return req.ok
| gpl-2.0 | 6,290,168,076,146,102,000 | 31.609091 | 75 | 0.441595 | false |
praekelt/vumi-go | go/vumitools/tests/test_contact.py | 1 | 13126 | # -*- coding: utf-8 -*-
"""Tests for go.vumitools.contact."""
from twisted.internet.defer import inlineCallbacks
from vumi.tests.helpers import VumiTestCase
from go.vumitools.tests.utils import model_eq
from go.vumitools.contact import (
ContactStore, ContactError, ContactNotFoundError)
from go.vumitools.opt_out import OptOutStore
from go.vumitools.tests.helpers import VumiApiHelper
class TestContactStore(VumiTestCase):
@inlineCallbacks
def setUp(self):
self.vumi_helper = yield self.add_helper(VumiApiHelper())
self.user_helper = yield self.vumi_helper.make_user(u'user')
user_account = yield self.user_helper.get_user_account()
self.store = ContactStore.from_user_account(user_account)
self.alt_user_helper = yield self.vumi_helper.make_user(u'other_user')
alt_user_account = yield self.alt_user_helper.get_user_account()
self.store_alt = ContactStore.from_user_account(alt_user_account)
def assert_models_equal(self, m1, m2):
self.assertTrue(model_eq(m1, m2),
"Models not equal:\na: %r\nb: %r" % (m1, m2))
def assert_models_not_equal(self, m1, m2):
self.assertFalse(model_eq(m1, m2),
"Models unexpectedly equal:\na: %r\nb: %r" % (m1, m2))
@inlineCallbacks
def test_get_contact_by_key(self):
contact = yield self.store.new_contact(
name=u'J Random', surname=u'Person', msisdn=u'27831234567')
self.assert_models_equal(
contact, (yield self.store.get_contact_by_key(contact.key)))
def test_get_contact_by_key_for_nonexistent_contact(self):
return self.assertFailure(
self.store.get_contact_by_key(u'123'), ContactNotFoundError)
@inlineCallbacks
def test_new_group(self):
self.assertEqual(None, (yield self.store.get_group(u'group1')))
group = yield self.store.new_group(u'group1')
self.assertEqual(u'group1', group.name)
dbgroup = yield self.store.get_group(group.key)
self.assertEqual(u'group1', dbgroup.name)
self.assert_models_equal(group, dbgroup)
@inlineCallbacks
def test_list_groups(self):
self.assertEqual([], (yield self.store.list_groups()))
group1 = yield self.store.new_group(u'group1')
group2 = yield self.store.new_group(u'group2')
sgroup1 = yield self.store.new_smart_group(u'sgroup1', u'surname:"a"')
sgroup2 = yield self.store.new_smart_group(u'sgroup2', u'surname:"a"')
[g1, g2, sg1, sg2] = yield self.store.list_groups()
self.assert_models_equal(group1, g1)
self.assert_models_equal(group2, g2)
self.assert_models_equal(sgroup1, sg1)
self.assert_models_equal(sgroup2, sg2)
@inlineCallbacks
def test_list_smart_groups(self):
self.assertEqual([], (yield self.store.list_smart_groups()))
yield self.store.new_group(u'group1')
yield self.store.new_group(u'group2')
sgroup1 = yield self.store.new_smart_group(u'sgroup1', u'surname:"a"')
sgroup2 = yield self.store.new_smart_group(u'sgroup2', u'surname:"a"')
[sg1, sg2] = yield self.store.list_smart_groups()
self.assert_models_equal(sgroup1, sg1)
self.assert_models_equal(sgroup2, sg2)
@inlineCallbacks
def test_list_static_groups(self):
self.assertEqual([], (yield self.store.list_static_groups()))
group1 = yield self.store.new_group(u'group1')
group2 = yield self.store.new_group(u'group2')
yield self.store.new_smart_group(u'sgroup1', u'surname:"a"')
yield self.store.new_smart_group(u'sgroup2', u'surname:"a"')
[g1, g2] = yield self.store.list_static_groups()
self.assert_models_equal(group1, g1)
self.assert_models_equal(group2, g2)
@inlineCallbacks
def test_per_user_groups(self):
group = yield self.store.new_group(u'group1')
dbgroup = yield self.store.get_group(group.key)
self.assertNotEqual(None, dbgroup)
self.assertEqual(None, (yield self.store_alt.get_group(group.key)))
group_alt = yield self.store_alt.new_group(u'group1')
dbgroup_alt = yield self.store_alt.get_group(group_alt.key)
self.assert_models_equal(group, dbgroup)
self.assert_models_equal(group_alt, dbgroup_alt)
self.assert_models_not_equal(group, group_alt)
@inlineCallbacks
def test_new_contact(self):
contact = yield self.store.new_contact(
name=u'J Random', surname=u'Person', msisdn=u'27831234567')
self.assertEqual(u'J Random', contact.name)
self.assertEqual(u'Person', contact.surname)
self.assertEqual(u'27831234567', contact.msisdn)
dbcontact = yield self.store.get_contact_by_key(contact.key)
self.assert_models_equal(contact, dbcontact)
@inlineCallbacks
def test_update_contact(self):
contact = yield self.store.new_contact(
name=u'J Random', surname=u'Person', msisdn=u'27831234567')
contact.add_to_group(u'group-a')
contact.add_to_group(u'group-b')
yield contact.save()
updated_contact = yield self.store.update_contact(
contact.key, surname=u'Jackal', groups=['group-a', u'group-c'])
dbcontact = yield self.store.get_contact_by_key(contact.key)
self.assertEqual(u'J Random', updated_contact.name)
self.assertEqual(u'Jackal', updated_contact.surname)
self.assertEqual(u'27831234567', updated_contact.msisdn)
self.assertEqual([u'group-a', u'group-b', u'group-c'],
updated_contact.groups.keys())
self.assert_models_equal(dbcontact, updated_contact)
def test_update_contact_for_nonexistent_contact(self):
return self.assertFailure(
self.store.update_contact('123124'), ContactNotFoundError)
@inlineCallbacks
def test_add_contact_to_group(self):
contact = yield self.store.new_contact(
name=u'J Random', surname=u'Person', msisdn=u'27831234567')
group1 = yield self.store.new_group(u'group1')
group2 = yield self.store.new_group(u'group2')
self.assertEqual([], contact.groups.keys())
contact.add_to_group(group1)
self.assertEqual([group1.key], contact.groups.keys())
contact.add_to_group(group2.key)
self.assertEqual([group1.key, group2.key], contact.groups.keys())
yield contact.save()
dbcontact = yield self.store.get_contact_by_key(contact.key)
self.assert_models_equal(contact, dbcontact)
group1 = yield self.store.get_group(group1.key)
group2 = yield self.store.get_group(group2.key)
contact_keys_page_g1 = yield group1.backlinks.contact_keys()
contact_keys_page_g2 = yield group2.backlinks.contact_keys()
self.assertEqual([contact.key], list(contact_keys_page_g1))
self.assertEqual([contact.key], list(contact_keys_page_g2))
@inlineCallbacks
def test_check_for_opted_out_contact(self):
contact1 = yield self.store.new_contact(
name=u'J Random', surname=u'Person', msisdn=u'27831234567')
contact2 = yield self.store.new_contact(
name=u'J Random', surname=u'Person', msisdn=u'27830000000')
# Opt out the first contact
user_account = yield self.user_helper.get_user_account()
optout_store = OptOutStore.from_user_account(user_account)
yield optout_store.new_opt_out(u'msisdn', contact1.msisdn, {
'message_id': u'the-message-id'
})
self.assertTrue((yield self.store.contact_has_opted_out(contact1)))
self.assertFalse((yield self.store.contact_has_opted_out(contact2)))
@inlineCallbacks
def test_count_contacts_for_static_group(self):
group = yield self.store.new_group(u'test group')
for i in range(2):
yield self.store.new_contact(
name=u'Contact', surname=u'%d' % i, msisdn=u'12345',
groups=[group])
count = yield self.store.count_contacts_for_group(group)
self.assertEqual(count, 2)
@inlineCallbacks
def test_count_contacts_for_smart_group(self):
group = yield self.store.new_smart_group(u'test group',
u'surname:"Foo 1"')
for i in range(2):
yield self.store.new_contact(
name=u'Contact', surname=u'Foo %d' % i, msisdn=u'12345')
count = yield self.store.count_contacts_for_group(group)
self.assertEqual(count, 1)
@inlineCallbacks
def test_new_contact_for_addr(self):
@inlineCallbacks
def check_new_contact_for_addr(deliv_class, addr, **kw):
contact = yield self.store.new_contact_for_addr(deliv_class, addr)
self.assertEqual(
contact.user_account.key, self.user_helper.account_key)
for field, expected_value in kw.iteritems():
self.assertEqual(getattr(contact, field), expected_value)
yield check_new_contact_for_addr('sms', u'+27831234567',
msisdn=u'+27831234567')
yield check_new_contact_for_addr('ussd', u'+27831234567',
msisdn=u'+27831234567')
yield check_new_contact_for_addr('gtalk', u'[email protected]',
gtalk_id=u'[email protected]',
msisdn=u'unknown')
yield check_new_contact_for_addr('twitter', u'random',
twitter_handle=u'random',
msisdn=u'unknown')
yield check_new_contact_for_addr('mxit', u'mxit',
mxit_id=u'mxit',
msisdn=u'unknown')
yield check_new_contact_for_addr('wechat', u'wechat',
wechat_id=u'wechat',
msisdn=u'unknown')
@inlineCallbacks
def test_contact_for_addr(self):
@inlineCallbacks
def check_contact_for_addr(delivery_class, addr, expected_contact):
contact = yield self.store.contact_for_addr(delivery_class, addr)
self.assert_models_equal(expected_contact, contact)
contact = yield self.store.new_contact(
name=u'A Random',
surname=u'Person',
msisdn=u'+27831234567',
gtalk_id=u'[email protected]',
twitter_handle=u'random',
mxit_id=u'mxit',
wechat_id=u'wechat')
yield check_contact_for_addr('sms', u'+27831234567', contact)
yield check_contact_for_addr('ussd', u'+27831234567', contact)
yield check_contact_for_addr('gtalk', u'[email protected]', contact)
yield check_contact_for_addr('twitter', u'random', contact)
yield check_contact_for_addr('mxit', u'mxit', contact)
yield check_contact_for_addr('wechat', u'wechat', contact)
yield check_contact_for_addr('voice', u'+27831234567', contact)
def test_contact_for_addr_for_unsupported_transports(self):
return self.assertFailure(
self.store.contact_for_addr('bad_transport_type', u'234234'),
ContactError)
def test_contact_for_addr_for_nonexistent_contacts(self):
return self.assertFailure(
self.store.contact_for_addr('sms', u'27831234567', create=False),
ContactNotFoundError)
@inlineCallbacks
def test_contact_for_addr_for_contact_creation(self):
@inlineCallbacks
def check_contact_for_addr(deliv_class, addr, **kw):
contact = yield self.store.contact_for_addr(deliv_class, addr)
self.assertEqual(
contact.user_account.key, self.user_helper.account_key)
for field, expected_value in kw.iteritems():
self.assertEqual(getattr(contact, field), expected_value)
yield check_contact_for_addr('sms', u'+27831234567',
msisdn=u'+27831234567')
yield check_contact_for_addr('ussd', u'+27831234567',
msisdn=u'+27831234567')
yield check_contact_for_addr('gtalk', u'[email protected]',
gtalk_id=u'[email protected]',
msisdn=u'unknown')
yield check_contact_for_addr('twitter', u'random',
twitter_handle=u'random',
msisdn=u'unknown')
yield check_contact_for_addr('mxit', u'mxit',
mxit_id=u'mxit',
msisdn=u'unknown')
yield check_contact_for_addr('wechat', u'wechat',
wechat_id=u'wechat',
msisdn=u'unknown')
yield check_contact_for_addr('voice', u'+27831234567',
msisdn=u'+27831234567')
| bsd-3-clause | 865,351,161,753,732,900 | 42.320132 | 79 | 0.605516 | false |
carsongee/formunculous | formunculous/urls.py | 1 | 4732 | # This file is part of formunculous.
#
# formunculous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# formunculous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with formunculous. If not, see <http://www.gnu.org/licenses/>.
# Copyright 2009-2013 Carson Gee
from django.conf.urls import patterns, include, url
from formunculous.views import builder
urlpatterns = patterns('',
# General views
url(r'^$', 'formunculous.views.apply.index', name="formunculous-index"),
url(r'^accounts/login/$', 'django.contrib.auth.views.login',
name='formunculous-login',),
url(r'^logout/$', 'formunculous.views.apply.logout_view',
name='formunculous-logout',),
# Apply views
url(r'^forms/(?P<slug>[-\w]+)/?$',
'formunculous.views.apply.apply',
name="formunculous-apply"
),
url(r'^confirm/(?P<slug>[-\w]+)/(?P<app>\d+)/$',
'formunculous.views.apply.confirm',
name="formunculous-confirm"
),
url(r'^thankyou/(?P<slug>[-\w]+)/(?P<app>\d+)/$',
'formunculous.views.apply.thankyou',
name="formunculous-thankyou"
),
url(r'^submit/(?P<slug>[-\w]+)/(?P<app>\d+)/$',
'formunculous.views.apply.submit',
name="formunculous-submit"),
url(r'^history/$',
'formunculous.views.apply.history',
name="formunculous-apply-history"
),
# Reviewers views
(r'^review/comments/', include('django.contrib.comments.urls')),
url(r'^review/(?P<slug>[-\w]+)/$',
'formunculous.views.reviewer.index',
name="reviewer-index"
),
url(r'^review/(?P<slug>[-\w]+)/incomplete/?$',
'formunculous.views.reviewer.index_incomplete',
name="reviewer-index-incomplete"
),
url(r'^review/(?P<slug>[-\w]+)/statistics/?$',
'formunculous.views.reviewer.statistics',
name="reviewer-statistics"
),
url(r'^review/(?P<slug>[-\w]+)/response-vs-time/?$',
'formunculous.views.reviewer.response_over_time',
name="reviewer-stats-response-vs-time"
),
url(r'^review/(?P<slug>[-\w]+)/field-pie/(?P<field>\d+)/?$',
'formunculous.views.reviewer.field_pie',
name="reviewer-stats-field-pie"
),
url(r'^review/(?P<slug>[-\w]+)/delete/$',
'formunculous.views.reviewer.delete',
name="reviewer-delete"
),
url(r'^review/(?P<slug>[-\w]+)/export/$',
'formunculous.views.reviewer.export_csv',
name="reviewer-export"
),
url(r'^review/(?P<slug>[-\w]+)/export_zip/$',
'formunculous.views.reviewer.export_zip',
name="reviewer-export-zip"
),
url(r'^review/(?P<slug>[-\w]+)/(?P<app>\d+)/$',
'formunculous.views.reviewer.application',
name="reviewer-application"),
# Builder views
url(r'^builder/add/$',
builder.AddAppDef.as_view(),
name="builder-add-ad"
),
url(r'^builder/edit/(?P<slug>[-\w]+)/$',
builder.ModifyAppDef.as_view(),
name="builder-edit-ad"
),
url(r'^builder/fields/(?P<slug>[-\w]+)/$',
builder.ModifyFields.as_view(),
name="builder-edit-fields"
),
url(r'^builder/add/field/(?P<slug>[-\w]+)/$',
builder.AddFieldForm.as_view(),
name="builder-add-field"
),
url(r'^builder/add/dropdown/$',
builder.AddModifyDropDown.as_view(),
name="builder-add-dropdown"
),
url(r'^builder/delete/$',
builder.DeleteAppDef.as_view(),
name="builder-delete-ad"
),
url(r'^builder/copy/$',
builder.CopyAppDef.as_view(),
name="builder-copy-ad"
),
url(r'^builder/preview/$',
builder.PreviewAppDef.as_view(),
name="builder-preview-ad"
),
url(r'^builder/subform/add/$',
builder.AddSubAppDef.as_view(),
name="builder-add-subapp"
),
url(r'^builder/subform/change/$',
builder.ChangeSubAppDef.as_view(),
name="builder-change-subapp"
),
url(r'^builder/?$',
builder.Index.as_view(),
name="builder-index"
),
# File static server view
url(r'^storage/(?P<ad_slug>[-\w]+)/(?P<app>\d+)/(?P<field_slug>[-\w]+)/(?P<file>.+)$',
'formunculous.views.apply.file_view',
name = "storage_view"
)
)
| gpl-3.0 | -2,882,577,813,195,735,000 | 32.560284 | 90 | 0.591716 | false |
ardi69/pyload-0.4.10 | pyload/plugin/hoster/MyfastfileCom.py | 1 | 1074 | # -*- coding: utf-8 -*-
from pyload.utils import json_loads
from pyload.plugin.internal.MultiHoster import MultiHoster
class MyfastfileCom(MultiHoster):
__name = "MyfastfileCom"
__type = "hoster"
__version = "0.08"
__pattern = r'http://\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/dl/'
__config = [("use_premium", "bool", "Use premium account if available", True)]
__description = """Myfastfile.com multi-hoster plugin"""
__license = "GPLv3"
__authors = [("stickell", "[email protected]")]
def setup(self):
self.chunkLimit = -1
def handle_premium(self, pyfile):
self.html = self.load('http://myfastfile.com/api.php',
get={'user': self.user, 'pass': self.account.getAccountData(self.user)['password'],
'link': pyfile.url})
self.logDebug("JSON data: " + self.html)
self.html = json_loads(self.html)
if self.html['status'] != 'ok':
self.fail(_("Unable to unrestrict link"))
self.link = self.html['link']
| gpl-3.0 | -1,776,470,991,979,768,300 | 30.588235 | 108 | 0.564246 | false |
DedMemez/ODS-August-2017 | cogdominium/DistCogdoCrane.py | 1 | 35160 | # Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.cogdominium.DistCogdoCrane
from panda3d.physics import ActorNode, PhysicalNode, PhysicsCollisionHandler, PhysicsObject
from panda3d.direct import SmoothMover
from panda3d.core import BitMask32, CollideMask, CollisionHandler, CollisionHandlerEvent, CollisionInvSphere, CollisionNode, CollisionSphere, CollisionTube, GeomNode, Light, Mat4, NodePath, NodePathCollection, Point3, RopeNode, TextNode, Texture, TextureStage, VBase3, VBase4, Vec3, lookAt
from direct.gui.DirectGui import *
from direct.interval.IntervalGlobal import *
from direct.distributed.ClockDelta import *
from direct.fsm import FSM
from direct.distributed import DistributedObject
from direct.showutil import Rope
from direct.showbase import PythonUtil
from direct.task import Task
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from otp.otpbase import OTPGlobals
from toontown.cogdominium import CogdoCraneGameConsts as GameConsts
import random
class DistCogdoCrane(DistributedObject.DistributedObject, FSM.FSM):
notify = DirectNotifyGlobal.directNotify.newCategory('DistCogdoCrane')
firstMagnetBit = 21
craneMinY = 8
craneMaxY = 25
armMinH = -45
armMaxH = 45
shadowOffset = 7
emptySlideSpeed = 10
emptyRotateSpeed = 20
lookAtPoint = Point3(0.3, 0, 0.1)
lookAtUp = Vec3(0, -1, 0)
neutralStickHinge = VBase3(0, 90, 0)
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, cr)
FSM.FSM.__init__(self, 'DistCogdoCrane')
self.craneGame = None
self.index = None
self.avId = 0
self.cableLength = 20
self.numLinks = 3
self.initialArmPosition = (0, 20, 0)
self.slideSpeed = self.emptySlideSpeed
self.rotateSpeed = self.emptyRotateSpeed
self.changeSeq = 0
self.lastChangeSeq = 0
self.moveSound = None
self.links = []
self.activeLinks = []
self.collisions = NodePathCollection()
self.physicsActivated = 0
self.snifferActivated = 0
self.magnetOn = 0
self.root = NodePath('root')
self.hinge = self.root.attachNewNode('hinge')
self.hinge.setPos(0, -17.6, 38.5)
self.controls = self.root.attachNewNode('controls')
self.controls.setPos(0, -4.9, 0)
self.arm = self.hinge.attachNewNode('arm')
self.crane = self.arm.attachNewNode('crane')
self.cable = self.hinge.attachNewNode('cable')
self.topLink = self.crane.attachNewNode('topLink')
self.topLink.setPos(0, 0, -1)
self.shadow = None
self.p0 = Point3(0, 0, 0)
self.v1 = Vec3(1, 1, 1)
self.armSmoother = SmoothMover()
self.armSmoother.setSmoothMode(SmoothMover.SMOn)
self.linkSmoothers = []
self.smoothStarted = 0
self.__broadcastPeriod = 0.2
self.cable.node().setFinal(1)
self.crane.setPos(*self.initialArmPosition)
self.heldObject = None
self.craneAdviceLabel = None
self.magnetAdviceLabel = None
self.atLimitSfx = loader.loadSfx('phase_4/audio/sfx/MG_cannon_adjust.ogg')
self.magnetOnSfx = loader.loadSfx('phase_10/audio/sfx/CBHQ_CFO_magnet_on.ogg')
self.magnetLoopSfx = loader.loadSfx('phase_10/audio/sfx/CBHQ_CFO_magnet_loop.ogg')
self.magnetSoundInterval = Parallel(SoundInterval(self.magnetOnSfx), Sequence(Wait(0.5), Func(base.playSfx, self.magnetLoopSfx, looping=1)))
self.craneMoveSfx = loader.loadSfx('phase_9/audio/sfx/CHQ_FACT_elevator_up_down.ogg')
self.fadeTrack = None
return
def announceGenerate(self):
DistributedObject.DistributedObject.announceGenerate(self)
self.name = 'crane-%s' % self.doId
self.root.setName(self.name)
self.root.setPosHpr(*GameConsts.CranePosHprs[self.index])
self.rotateLinkName = self.uniqueName('rotateLink')
self.snifferEvent = self.uniqueName('sniffer')
self.triggerName = self.uniqueName('trigger')
self.triggerEvent = 'enter%s' % self.triggerName
self.shadowName = self.uniqueName('shadow')
self.flickerName = self.uniqueName('flicker')
self.smoothName = self.uniqueName('craneSmooth')
self.posHprBroadcastName = self.uniqueName('craneBroadcast')
self.craneAdviceName = self.uniqueName('craneAdvice')
self.magnetAdviceName = self.uniqueName('magnetAdvice')
self.controlModel = self.craneGame.controls.copyTo(self.controls)
self.cc = NodePath('cc')
column = self.controlModel.find('**/column')
column.getChildren().reparentTo(self.cc)
self.cc.reparentTo(column)
self.stickHinge = self.cc.attachNewNode('stickHinge')
self.stick = self.craneGame.stick.copyTo(self.stickHinge)
self.stickHinge.setHpr(self.neutralStickHinge)
self.stick.setHpr(0, -90, 0)
self.stick.flattenLight()
self.bottom = self.controlModel.find('**/bottom')
self.bottom.wrtReparentTo(self.cc)
self.bottomPos = self.bottom.getPos()
cs = CollisionSphere(0, -5, -2, 3)
cs.setTangible(0)
cn = CollisionNode(self.triggerName)
cn.addSolid(cs)
cn.setIntoCollideMask(OTPGlobals.WallBitmask)
self.trigger = self.root.attachNewNode(cn)
self.trigger.stash()
cs = CollisionTube(0, 2.7, 0, 0, 2.7, 3, 1.2)
cn = CollisionNode('tube')
cn.addSolid(cs)
cn.setIntoCollideMask(OTPGlobals.WallBitmask)
self.tube = self.controlModel.attachNewNode(cn)
cs = CollisionSphere(0, 0, 2, 3)
cn = CollisionNode('safetyBubble')
cn.addSolid(cs)
cn.setIntoCollideMask(ToontownGlobals.PieBitmask)
self.controls.attachNewNode(cn)
arm = self.craneGame.craneArm.copyTo(self.crane)
self.craneGame.cranes[self.index] = self
def disable(self):
DistributedObject.DistributedObject.disable(self)
del self.craneGame.cranes[self.index]
self.cleanup()
def cleanup(self):
if self.state != 'Off':
self.demand('Off')
self.craneGame = None
return
def accomodateToon(self, toon):
origScale = self.controlModel.getSz()
origCcPos = self.cc.getPos()
origBottomPos = self.bottom.getPos()
origStickHingeHpr = self.stickHinge.getHpr()
scale = toon.getGeomNode().getChild(0).getSz(render)
self.controlModel.setScale(scale)
self.cc.setPos(0, 0, 0)
toon.setPosHpr(self.controls, 0, 0, 0, 0, 0, 0)
toon.pose('leverNeutral', 0)
toon.update()
pos = toon.rightHand.getPos(self.cc)
self.cc.setPos(pos[0], pos[1], pos[2] - 1)
self.bottom.setZ(toon, 0.0)
self.bottom.setPos(self.bottomPos[0], self.bottomPos[1], self.bottom.getZ())
self.stickHinge.lookAt(toon.rightHand, self.lookAtPoint, self.lookAtUp)
lerpTime = 0.5
return Parallel(self.controlModel.scaleInterval(lerpTime, scale, origScale, blendType='easeInOut'), self.cc.posInterval(lerpTime, self.cc.getPos(), origCcPos, blendType='easeInOut'), self.bottom.posInterval(lerpTime, self.bottom.getPos(), origBottomPos, blendType='easeInOut'), self.stickHinge.quatInterval(lerpTime, self.stickHinge.getHpr(), origStickHingeHpr, blendType='easeInOut'))
def getRestoreScaleInterval(self):
lerpTime = 1
return Parallel(self.controlModel.scaleInterval(lerpTime, 1, blendType='easeInOut'), self.cc.posInterval(lerpTime, Point3(0, 0, 0), blendType='easeInOut'), self.bottom.posInterval(lerpTime, self.bottomPos, blendType='easeInOut'), self.stickHinge.quatInterval(lerpTime, self.neutralStickHinge, blendType='easeInOut'))
def makeToonGrabInterval(self, toon):
origPos = toon.getPos()
origHpr = toon.getHpr()
a = self.accomodateToon(toon)
newPos = toon.getPos()
newHpr = toon.getHpr()
origHpr.setX(PythonUtil.fitSrcAngle2Dest(origHpr[0], newHpr[0]))
toon.setPosHpr(origPos, origHpr)
walkTime = 0.2
reach = ActorInterval(toon, 'leverReach')
if reach.getDuration() < walkTime:
reach = Sequence(ActorInterval(toon, 'walk', loop=1, duration=walkTime - reach.getDuration()), reach)
i = Sequence(Parallel(toon.posInterval(walkTime, newPos, origPos), toon.hprInterval(walkTime, newHpr, origHpr), reach), Func(self.startWatchJoystick, toon))
i = Parallel(i, a)
return i
def __toonPlayWithCallback(self, animName, numFrames):
duration = numFrames / 24.0
self.toon.play(animName)
taskMgr.doMethodLater(duration, self.__toonPlayCallback, self.uniqueName('toonPlay'))
def __toonPlayCallback(self, task):
if self.changeSeq == self.lastChangeSeq:
self.__toonPlayWithCallback('leverNeutral', 40)
else:
self.__toonPlayWithCallback('leverPull', 40)
self.lastChangeSeq = self.changeSeq
def startWatchJoystick(self, toon):
self.toon = toon
taskMgr.add(self.__watchJoystick, self.uniqueName('watchJoystick'))
self.__toonPlayWithCallback('leverNeutral', 40)
self.accept(toon.uniqueName('disable'), self.__handleUnexpectedExit, extraArgs=[toon.doId])
def stopWatchJoystick(self):
taskMgr.remove(self.uniqueName('toonPlay'))
taskMgr.remove(self.uniqueName('watchJoystick'))
if self.toon:
self.ignore(self.toon.uniqueName('disable'))
self.toon = None
return
def __watchJoystick(self, task):
self.toon.setPosHpr(self.controls, 0, 0, 0, 0, 0, 0)
self.toon.update()
self.stickHinge.lookAt(self.toon.rightHand, self.lookAtPoint, self.lookAtUp)
return Task.cont
def __handleUnexpectedExit(self, toonId):
self.notify.warning('%s: unexpected exit for %s' % (self.doId, toonId))
if self.toon and self.toon.doId == toonId:
self.stopWatchJoystick()
def __activatePhysics(self):
if not self.physicsActivated:
for an, anp, cnp in self.activeLinks:
self.craneGame.physicsMgr.attachPhysicalNode(an)
base.cTrav.addCollider(cnp, self.handler)
self.collisions.unstash()
self.physicsActivated = 1
def __deactivatePhysics(self):
if self.physicsActivated:
for an, anp, cnp in self.activeLinks:
self.craneGame.physicsMgr.removePhysicalNode(an)
base.cTrav.removeCollider(cnp)
self.collisions.stash()
self.physicsActivated = 0
def __straightenCable(self):
for linkNum in xrange(self.numLinks):
an, anp, cnp = self.activeLinks[linkNum]
an.getPhysicsObject().setVelocity(0, 0, 0)
z = float(linkNum + 1) / float(self.numLinks) * self.cableLength
anp.setPos(self.crane.getPos(self.cable))
anp.setZ(-z)
def setCableLength(self, length):
self.cableLength = length
linkWidth = float(length) / float(self.numLinks)
self.shell.setRadius(linkWidth + 1)
def setupCable(self):
activated = self.physicsActivated
self.clearCable()
self.handler = PhysicsCollisionHandler()
self.handler.setStaticFrictionCoef(0.1)
self.handler.setDynamicFrictionCoef(GameConsts.Settings.EmptyFrictionCoef.get())
linkWidth = float(self.cableLength) / float(self.numLinks)
self.shell = CollisionInvSphere(0, 0, 0, linkWidth + 1)
self.links = []
self.links.append((self.topLink, Point3(0, 0, 0)))
anchor = self.topLink
for linkNum in xrange(self.numLinks):
anchor = self.__makeLink(anchor, linkNum)
self.collisions.stash()
self.bottomLink = self.links[-1][0]
self.middleLink = self.links[-2][0]
self.magnet = self.bottomLink.attachNewNode('magnet')
self.wiggleMagnet = self.magnet.attachNewNode('wiggleMagnet')
taskMgr.add(self.__rotateMagnet, self.rotateLinkName)
magnetModel = self.craneGame.magnet.copyTo(self.wiggleMagnet)
magnetModel.setHpr(90, 45, 90)
self.gripper = magnetModel.attachNewNode('gripper')
self.gripper.setPos(0, 0, -4)
cn = CollisionNode('sniffer')
self.sniffer = magnetModel.attachNewNode(cn)
self.sniffer.stash()
cs = CollisionSphere(0, 0, -10, 6)
cs.setTangible(0)
cn.addSolid(cs)
cn.setIntoCollideMask(BitMask32(0))
cn.setFromCollideMask(ToontownGlobals.CashbotBossObjectBitmask)
self.snifferHandler = CollisionHandlerEvent()
self.snifferHandler.addInPattern(self.snifferEvent)
self.snifferHandler.addAgainPattern(self.snifferEvent)
rope = self.makeSpline()
rope.reparentTo(self.cable)
rope.setTexture(self.craneGame.cableTex)
ts = TextureStage.getDefault()
rope.setTexScale(ts, 0.15, 0.13)
rope.setTexOffset(ts, 0.83, 0.01)
if activated:
self.__activatePhysics()
def clearCable(self):
self.__deactivatePhysics()
taskMgr.remove(self.rotateLinkName)
self.links = []
self.activeLinks = []
self.linkSmoothers = []
self.collisions.clear()
self.cable.getChildren().detach()
self.topLink.getChildren().detach()
self.gripper = None
return
def makeSpline(self):
rope = Rope.Rope()
rope.setup(min(len(self.links), 4), self.links)
rope.curve.normalizeKnots()
rn = rope.ropeNode
rn.setRenderMode(RopeNode.RMTube)
rn.setNumSlices(3)
rn.setTubeUp(Vec3(0, -1, 0))
rn.setUvMode(RopeNode.UVParametric)
rn.setUvDirection(1)
rn.setThickness(0.5)
return rope
def startShadow(self):
self.shadow = self.craneGame.geomRoot.attachNewNode('%s-shadow' % self.name)
self.shadow.setColor(1, 1, 1, 0.3)
self.shadow.setDepthWrite(0)
self.shadow.setTransparency(1)
self.shadow.setBin('shadow', 0)
self.shadow.node().setFinal(1)
self.magnetShadow = loader.loadModel('phase_3/models/props/drop_shadow')
self.magnetShadow.reparentTo(self.shadow)
self.craneShadow = loader.loadModel('phase_3/models/props/square_drop_shadow')
self.craneShadow.setScale(0.5, 4, 1)
self.craneShadow.setPos(0, -12, 0)
self.craneShadow.flattenLight()
self.craneShadow.reparentTo(self.shadow)
taskMgr.add(self.__followShadow, self.shadowName)
rope = self.makeSpline()
rope.reparentTo(self.shadow)
rope.setColor(1, 1, 1, 0.2)
tex = self.craneShadow.findTexture('*')
rope.setTexture(tex)
rn = rope.ropeNode
rn.setRenderMode(RopeNode.RMTape)
rn.setNumSubdiv(6)
rn.setThickness(0.8)
rn.setTubeUp(Vec3(0, 0, 1))
rn.setMatrix(Mat4.translateMat(0, 0, self.shadowOffset) * Mat4.scaleMat(1, 1, 0.01))
def stopShadow(self):
if self.shadow:
self.shadow.removeNode()
self.shadow = None
self.magnetShadow = None
self.craneShadow = None
taskMgr.remove(self.shadowName)
return
def __followShadow(self, task):
p = self.magnet.getPos(self.craneGame.geomRoot)
self.magnetShadow.setPos(p[0], p[1], self.shadowOffset)
self.craneShadow.setPosHpr(self.crane, 0, 0, 0, 0, 0, 0)
self.craneShadow.setZ(self.shadowOffset)
return Task.cont
def __makeLink(self, anchor, linkNum):
an = ActorNode('link%s' % linkNum)
an.getPhysicsObject().setMass(GameConsts.Settings.RopeLinkMass.get())
anp = NodePath(an)
cn = CollisionNode('cn')
sphere = CollisionSphere(0, 0, 0, 1)
cn.addSolid(sphere)
cnp = anp.attachNewNode(cn)
self.handler.addCollider(cnp, anp)
self.activeLinks.append((an, anp, cnp))
self.linkSmoothers.append(SmoothMover())
anp.reparentTo(self.cable)
z = float(linkNum + 1) / float(self.numLinks) * self.cableLength
anp.setPos(self.crane.getPos())
anp.setZ(-z)
mask = BitMask32.bit(self.firstMagnetBit + linkNum)
cn.setFromCollideMask(mask)
cn.setIntoCollideMask(BitMask32(0))
shellNode = CollisionNode('shell%s' % linkNum)
shellNode.addSolid(self.shell)
shellNP = anchor.attachNewNode(shellNode)
shellNode.setIntoCollideMask(mask)
self.collisions.addPath(shellNP)
self.collisions.addPath(cnp)
self.links.append((anp, Point3(0, 0, 0)))
return anp
def __rotateMagnet(self, task):
self.magnet.lookAt(self.middleLink, self.p0, self.v1)
return Task.cont
def __enableControlInterface(self):
gui = loader.loadModel('phase_3.5/models/gui/avatar_panel_gui')
self.craneKey = base.getKey('pickup')
self.accept(self.craneKey, self.__controlPressed)
self.accept(self.craneKey + '-up', self.__controlReleased)
self.accept('InputState-forward', self.__upArrow)
self.accept('InputState-reverse', self.__downArrow)
self.accept('InputState-turnLeft', self.__leftArrow)
self.accept('InputState-turnRight', self.__rightArrow)
taskMgr.add(self.__watchControls, 'watchCraneControls')
taskMgr.doMethodLater(5, self.__displayCraneAdvice, self.craneAdviceName)
taskMgr.doMethodLater(10, self.__displayMagnetAdvice, self.magnetAdviceName)
NametagGlobals.setOnscreenChatForced(1)
self.arrowVert = 0
self.arrowHorz = 0
def __disableControlInterface(self):
self.__turnOffMagnet()
self.__cleanupCraneAdvice()
self.__cleanupMagnetAdvice()
self.ignore('escape')
self.ignore(self.craneKey)
self.ignore(self.craneKey + '-up')
self.ignore('InputState-forward')
self.ignore('InputState-reverse')
self.ignore('InputState-turnLeft')
self.ignore('InputState-turnRight')
self.arrowVert = 0
self.arrowHorz = 0
NametagGlobals.setOnscreenChatForced(0)
taskMgr.remove('watchCraneControls')
self.__setMoveSound(None)
return
def __displayCraneAdvice(self, task):
if self.craneAdviceLabel == None:
self.craneAdviceLabel = DirectLabel(text=TTLocalizer.CashbotCraneAdvice, text_fg=VBase4(1, 1, 1, 1), text_align=TextNode.ACenter, relief=None, pos=(0, 0, 0.69), scale=0.1)
return
def __cleanupCraneAdvice(self):
if self.craneAdviceLabel:
self.craneAdviceLabel.destroy()
self.craneAdviceLabel = None
taskMgr.remove(self.craneAdviceName)
return
def __displayMagnetAdvice(self, task):
if self.magnetAdviceLabel == None:
self.magnetAdviceLabel = DirectLabel(text=TTLocalizer.CashbotMagnetAdvice, text_fg=VBase4(1, 1, 1, 1), text_align=TextNode.ACenter, relief=None, pos=(0, 0, 0.55), scale=0.1)
return
def __cleanupMagnetAdvice(self):
if self.magnetAdviceLabel:
self.magnetAdviceLabel.destroy()
self.magnetAdviceLabel = None
taskMgr.remove(self.magnetAdviceName)
return
def __watchControls(self, task):
if self.arrowHorz or self.arrowVert:
self.__moveCraneArcHinge(self.arrowHorz, self.arrowVert)
else:
self.__setMoveSound(None)
return Task.cont
def __incrementChangeSeq(self):
self.changeSeq = self.changeSeq + 1 & 255
def __controlPressed(self):
self.__cleanupMagnetAdvice()
self.__turnOnMagnet()
def __controlReleased(self):
self.__turnOffMagnet()
def __turnOnMagnet(self):
if not self.magnetOn:
self.__incrementChangeSeq()
self.magnetOn = 1
if not self.heldObject:
self.__activateSniffer()
def __turnOffMagnet(self):
if self.magnetOn:
self.magnetOn = 0
self.__deactivateSniffer()
self.releaseObject()
def __upArrow(self, pressed):
self.__incrementChangeSeq()
self.__cleanupCraneAdvice()
if pressed:
self.arrowVert = 1
elif self.arrowVert > 0:
self.arrowVert = 0
def __downArrow(self, pressed):
self.__incrementChangeSeq()
self.__cleanupCraneAdvice()
if pressed:
self.arrowVert = -1
elif self.arrowVert < 0:
self.arrowVert = 0
def __rightArrow(self, pressed):
self.__incrementChangeSeq()
self.__cleanupCraneAdvice()
if pressed:
self.arrowHorz = 1
elif self.arrowHorz > 0:
self.arrowHorz = 0
def __leftArrow(self, pressed):
self.__incrementChangeSeq()
self.__cleanupCraneAdvice()
if pressed:
self.arrowHorz = -1
elif self.arrowHorz < 0:
self.arrowHorz = 0
def __moveCraneArcHinge(self, xd, yd):
dt = globalClock.getDt()
h = self.arm.getH() - xd * self.rotateSpeed * dt
limitH = max(min(h, self.armMaxH), self.armMinH)
self.arm.setH(limitH)
y = self.crane.getY() + yd * self.slideSpeed * dt
limitY = max(min(y, self.craneMaxY), self.craneMinY)
atLimit = limitH != h or limitY != y
if atLimit:
now = globalClock.getFrameTime()
x = math.sin(now * 79) * 0.05
z = math.sin(now * 70) * 0.02
self.crane.setPos(x, limitY, z)
self.__setMoveSound(self.atLimitSfx)
else:
self.crane.setPos(0, limitY, 0)
self.__setMoveSound(self.craneMoveSfx)
def __setMoveSound(self, sfx):
if sfx != self.moveSound:
if self.moveSound:
self.moveSound.stop()
self.moveSound = sfx
if self.moveSound:
base.playSfx(self.moveSound, looping=1, volume=0.5)
def __activateSniffer(self):
if not self.snifferActivated:
self.sniffer.unstash()
base.cTrav.addCollider(self.sniffer, self.snifferHandler)
self.accept(self.snifferEvent, self.__sniffedSomething)
self.startFlicker()
self.snifferActivated = 1
def __deactivateSniffer(self):
if self.snifferActivated:
base.cTrav.removeCollider(self.sniffer)
self.sniffer.stash()
self.ignore(self.snifferEvent)
self.stopFlicker()
self.snifferActivated = 0
def startFlicker(self):
self.magnetSoundInterval.start()
self.lightning = []
for i in xrange(4):
t = float(i) / 3.0 - 0.5
l = self.craneGame.lightning.copyTo(self.gripper)
l.setScale(random.choice([1, -1]), 1, 5)
l.setZ(random.uniform(-5, -5.5))
l.flattenLight()
l.setTwoSided(1)
l.setBillboardAxis()
l.setScale(random.uniform(0.5, 1.0))
if t < 0:
l.setX(t - 0.7)
else:
l.setX(t + 0.7)
l.setR(-20 * t)
l.setP(random.uniform(-20, 20))
self.lightning.append(l)
taskMgr.add(self.__flickerLightning, self.flickerName)
def stopFlicker(self):
self.magnetSoundInterval.finish()
self.magnetLoopSfx.stop()
taskMgr.remove(self.flickerName)
for l in self.lightning:
l.detachNode()
self.lightning = None
return
def __flickerLightning(self, task):
for l in self.lightning:
if random.random() < 0.5:
l.hide()
else:
l.show()
return Task.cont
def __sniffedSomething(self, entry):
np = entry.getIntoNodePath()
doId = int(np.getNetTag('object'))
obj = base.cr.doId2do.get(doId)
if obj and obj.state != 'LocalDropped' and (obj.state != 'Dropped' or obj.craneId != self.doId):
obj.d_requestGrab()
obj.demand('LocalGrabbed', localAvatar.doId, self.doId)
def grabObject(self, obj):
if self.state == 'Off':
return
else:
if self.heldObject != None:
self.releaseObject()
self.__deactivateSniffer()
obj.wrtReparentTo(self.gripper)
if obj.lerpInterval:
obj.lerpInterval.finish()
obj.lerpInterval = Parallel(obj.posInterval(ToontownGlobals.CashbotBossToMagnetTime, Point3(*obj.grabPos)), obj.quatInterval(ToontownGlobals.CashbotBossToMagnetTime, VBase3(obj.getH(), 0, 0)), obj.toMagnetSoundInterval)
obj.lerpInterval.start()
self.heldObject = obj
self.handler.setDynamicFrictionCoef(obj.craneFrictionCoef)
self.slideSpeed = obj.craneSlideSpeed
self.rotateSpeed = obj.craneRotateSpeed
if self.avId == localAvatar.doId and not self.magnetOn:
self.releaseObject()
return
def dropObject(self, obj):
if obj.lerpInterval:
obj.lerpInterval.finish()
obj.wrtReparentTo(render)
obj.lerpInterval = Parallel(obj.quatInterval(ToontownGlobals.CashbotBossFromMagnetTime, VBase3(obj.getH(), 0, 0), blendType='easeOut'))
obj.lerpInterval.start()
p1 = self.bottomLink.node().getPhysicsObject()
v = render.getRelativeVector(self.bottomLink, p1.getVelocity())
obj.physicsObject.setVelocity(v * 1.5)
if self.heldObject == obj:
self.heldObject = None
self.handler.setDynamicFrictionCoef(GameConsts.Settings.EmptyFrictionCoef.get())
self.slideSpeed = self.emptySlideSpeed
self.rotateSpeed = self.emptyRotateSpeed
return
def releaseObject(self):
if self.heldObject:
obj = self.heldObject
obj.d_requestDrop()
if obj.state == 'Grabbed':
obj.demand('LocalDropped', localAvatar.doId, self.doId)
def __hitTrigger(self, event):
pass
def setCraneGameId(self, craneGameId):
self.craneGameId = craneGameId
self.craneGame = base.cr.doId2do[craneGameId]
def setIndex(self, index):
self.index = index
def setState(self, state, avId):
if state == 'C':
self.demand('Controlled', avId)
elif state == 'F':
self.demand('Free')
else:
self.notify.error('Invalid state from AI: %s' % state)
def d_requestControl(self):
self.sendUpdate('requestControl')
def d_requestFree(self):
self.sendUpdate('requestFree')
def b_clearSmoothing(self):
self.d_clearSmoothing()
self.clearSmoothing()
def d_clearSmoothing(self):
self.sendUpdate('clearSmoothing', [0])
def clearSmoothing(self, bogus = None):
self.armSmoother.clearPositions(1)
for smoother in self.linkSmoothers:
smoother.clearPositions(1)
def reloadPosition(self):
self.armSmoother.clearPositions(0)
self.armSmoother.setPos(self.crane.getPos())
self.armSmoother.setHpr(self.arm.getHpr())
self.armSmoother.setPhonyTimestamp()
for linkNum in xrange(self.numLinks):
smoother = self.linkSmoothers[linkNum]
an, anp, cnp = self.activeLinks[linkNum]
smoother.clearPositions(0)
smoother.setPos(anp.getPos())
smoother.setPhonyTimestamp()
def doSmoothTask(self, task):
self.armSmoother.computeAndApplySmoothPosHpr(self.crane, self.arm)
for linkNum in xrange(self.numLinks):
smoother = self.linkSmoothers[linkNum]
anp = self.activeLinks[linkNum][1]
smoother.computeAndApplySmoothPos(anp)
return Task.cont
def startSmooth(self):
if not self.smoothStarted:
taskName = self.smoothName
taskMgr.remove(taskName)
self.reloadPosition()
taskMgr.add(self.doSmoothTask, taskName)
self.smoothStarted = 1
def stopSmooth(self):
if self.smoothStarted:
taskName = self.smoothName
taskMgr.remove(taskName)
self.forceToTruePosition()
self.smoothStarted = 0
def forceToTruePosition(self):
if self.armSmoother.getLatestPosition():
self.armSmoother.applySmoothPos(self.crane)
self.armSmoother.applySmoothHpr(self.arm)
self.armSmoother.clearPositions(1)
for linkNum in xrange(self.numLinks):
smoother = self.linkSmoothers[linkNum]
an, anp, cnp = self.activeLinks[linkNum]
if smoother.getLatestPosition():
smoother.applySmoothPos(anp)
smoother.clearPositions(1)
def setCablePos(self, changeSeq, y, h, links, timestamp):
self.changeSeq = changeSeq
if self.smoothStarted:
now = globalClock.getFrameTime()
local = globalClockDelta.networkToLocalTime(timestamp, now)
self.armSmoother.setY(y)
self.armSmoother.setH(h)
self.armSmoother.setTimestamp(local)
self.armSmoother.markPosition()
for linkNum in xrange(self.numLinks):
smoother = self.linkSmoothers[linkNum]
lp = links[linkNum]
smoother.setPos(*lp)
smoother.setTimestamp(local)
smoother.markPosition()
else:
self.crane.setY(y)
self.arm.setH(h)
def d_sendCablePos(self):
timestamp = globalClockDelta.getFrameNetworkTime()
links = []
for linkNum in xrange(self.numLinks):
an, anp, cnp = self.activeLinks[linkNum]
p = anp.getPos()
links.append((p[0], p[1], p[2]))
self.sendUpdate('setCablePos', [self.changeSeq,
self.crane.getY(),
self.arm.getH(),
links,
timestamp])
def stopPosHprBroadcast(self):
taskName = self.posHprBroadcastName
taskMgr.remove(taskName)
def startPosHprBroadcast(self):
taskName = self.posHprBroadcastName
self.b_clearSmoothing()
self.d_sendCablePos()
taskMgr.remove(taskName)
taskMgr.doMethodLater(self.__broadcastPeriod, self.__posHprBroadcast, taskName)
def __posHprBroadcast(self, task):
self.d_sendCablePos()
taskName = self.posHprBroadcastName
taskMgr.doMethodLater(self.__broadcastPeriod, self.__posHprBroadcast, taskName)
return Task.done
def enterOff(self):
self.clearCable()
self.root.detachNode()
def exitOff(self):
if self.craneGame:
self.setupCable()
self.root.reparentTo(render)
def enterControlled(self, avId):
self.avId = avId
toon = base.cr.doId2do.get(avId)
if not toon:
return
self.grabTrack = self.makeToonGrabInterval(toon)
if avId == localAvatar.doId:
self.craneGame.toCraneMode()
camera.reparentTo(self.hinge)
camera.setPosHpr(0, -20, -5, 0, -20, 0)
self.tube.stash()
localAvatar.setPosHpr(self.controls, 0, 0, 0, 0, 0, 0)
localAvatar.sendCurrentPosition()
self.__activatePhysics()
self.__enableControlInterface()
self.startPosHprBroadcast()
self.startShadow()
else:
self.startSmooth()
toon.stopSmooth()
self.grabTrack = Sequence(self.grabTrack, Func(toon.startSmooth))
self.grabTrack.start()
def exitControlled(self):
self.grabTrack.finish()
del self.grabTrack
if self.toon and not self.toon.isDisabled():
self.toon.loop('neutral')
self.toon.startSmooth()
self.stopWatchJoystick()
self.stopPosHprBroadcast()
self.stopShadow()
self.stopSmooth()
if self.avId == localAvatar.doId:
self.__disableControlInterface()
self.__deactivatePhysics()
self.tube.unstash()
camera.reparentTo(base.localAvatar)
camera.setPos(base.localAvatar.cameraPositions[0][0])
camera.setHpr(0, 0, 0)
self.__straightenCable()
def enterFree(self):
if self.fadeTrack:
self.fadeTrack.finish()
self.fadeTrack = None
self.restoreScaleTrack = Sequence(Wait(6), self.getRestoreScaleInterval())
self.restoreScaleTrack.start()
if self.avId == localAvatar.doId:
self.controlModel.setAlphaScale(0.3)
self.controlModel.setTransparency(1)
taskMgr.doMethodLater(5, self.__allowDetect, self.triggerName)
self.fadeTrack = Sequence(Func(self.controlModel.setTransparency, 1), self.controlModel.colorScaleInterval(0.2, VBase4(1, 1, 1, 0.3)))
self.fadeTrack.start()
else:
self.trigger.unstash()
self.accept(self.triggerEvent, self.__hitTrigger)
self.avId = 0
return
def __allowDetect(self, task):
if self.fadeTrack:
self.fadeTrack.finish()
self.fadeTrack = Sequence(self.controlModel.colorScaleInterval(0.2, VBase4(1, 1, 1, 1)), Func(self.controlModel.clearColorScale), Func(self.controlModel.clearTransparency))
self.fadeTrack.start()
self.trigger.unstash()
self.accept(self.triggerEvent, self.__hitTrigger)
def exitFree(self):
if self.fadeTrack:
self.fadeTrack.finish()
self.fadeTrack = None
self.restoreScaleTrack.pause()
del self.restoreScaleTrack
taskMgr.remove(self.triggerName)
self.controlModel.clearColorScale()
self.controlModel.clearTransparency()
self.trigger.stash()
self.ignore(self.triggerEvent)
return
def enterMovie(self):
self.__activatePhysics()
def exitMovie(self):
self.__deactivatePhysics()
self.__straightenCable() | apache-2.0 | -2,962,020,186,007,563,000 | 38.093501 | 393 | 0.611177 | false |
wphicks/Writing3D | pyw3d/blender_actions/trigger.py | 1 | 1973 | # Copyright (C) 2016 William Hicks
#
# This file is part of Writing3D.
#
# Writing3D is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
"""Tools for enabling/disabling triggers in Blender"""
from pyw3d.names import generate_trigger_name
class TriggerEnabler(object):
"""Generate Python logic for how link should change when action first
starts, as it continues, and when it ends
:param str change: The change to be performed (one of "Start", "Stop",
"Continue", or "Start if not started")
:param int offset: A number of tabs (4 spaces) to add before Python logic
strings"""
@property
def start_string(self):
script_text = [
"trigger = scene.objects['{}']".format(self.trigger)
]
script_text.append(
"trigger['enabled'] = {}".format(self.enable)
)
try:
script_text[0] = "{}{}".format(" "*self.offset, script_text[0])
except IndexError:
return ""
return "\n{}".format(" "*self.offset).join(script_text)
@property
def continue_string(self):
return "{}pass".format(" "*self.offset)
@property
def end_string(self):
return "{}pass".format(" "*self.offset)
def __init__(self, trigger, enable, offset=0):
self.trigger = generate_trigger_name(trigger)
self.enable = enable
self.offset = offset
| gpl-3.0 | -6,210,813,533,540,317,000 | 33.614035 | 78 | 0.65484 | false |
Open-Power-System-Data/time_series | timeseries_scripts/imputation.py | 1 | 14280 | """
Open Power System Data
Time series Datapackage
imputation.py : fill functions for imputation of missing data.
"""
from datetime import datetime, date, timedelta
import pandas as pd
import numpy as np
import logging
logger = logging.getLogger(__name__)
logger.setLevel('INFO')
def find_nan(df, res_key, headers, patch=False):
'''
Search for missing values in a DataFrame and optionally apply further
functions on each column.
Parameters
----------
df : pandas.DataFrame
DataFrame to inspect and possibly patch
headers : list
List of strings indicating the level names of the pandas.MultiIndex
for the columns of the dataframe
patch : bool, default=False
If False, return unaltered DataFrame,
if True, return patched DataFrame
Returns
----------
patched: pandas.DataFrame
original df or df with gaps patched and marker column appended
nan_table: pandas.DataFrame
Contains detailed information about missing data
'''
nan_table = pd.DataFrame()
patched = pd.DataFrame()
# marker_col = pd.Series(np.nan, index=df.index)
if df.empty:
overview = pd.DataFrame()
return patched, nan_table, overview
else:
overview = df.describe()
# Get the frequency/length of one period of df
one_period = pd.Timedelta(res_key)
for col_name, col in df.iteritems():
overview.loc['first', col_name] = col.first_valid_index()
overview.loc['last', col_name] = col.last_valid_index()
col = col.to_frame()
message = '| {:5.5} | {:6.6} | {:10.10} | {:10.10} | {:10.10} | '.format(
res_key, *col_name[0:4])
# make an empty list of NaN blocks to use as default
nan_idx = pd.MultiIndex.from_arrays([
[0, 0, 0, 0],
['count', 'span', 'start_idx', 'till_idx']])
nan_list = pd.DataFrame(index=nan_idx, columns=col.columns)
# skip this column if it has no entries at all.
# This will also delete the column from the patched df
if col.empty:
continue
# tag all occurences of NaN in the data with True
# (but not before first or after last actual entry)
col['tag'] = (
(col.index >= col.first_valid_index()) &
(col.index <= col.last_valid_index()) &
col.isnull().transpose().values
).transpose()
# count missing values
overview.loc['nan_count', col_name] = col['tag'].sum()
# make another DF to hold info about each block
nan_blocks = pd.DataFrame()
# We filter out those timestamps where a NaN block starts
# first row of a block is where True is preceded by False in tags
nan_blocks['start_idx'] = col.index[
col['tag'] & ~
col['tag'].shift(1).fillna(False)]
# last row row of a block is where False is preceded by True
nan_blocks['till_idx'] = col.index[
col['tag'] & ~
col['tag'].shift(-1).fillna(False)]
# if there are no NaNs, do nothing
if not col['tag'].any():
logger.info(message + 'column already complete')
col.drop('tag', axis=1, inplace=True)
patched_col = col
# else make a list of the NaN blocks
else:
# how long is each block
nan_blocks['span'] = (
nan_blocks['till_idx'] - nan_blocks['start_idx'] + one_period)
nan_blocks['count'] = (nan_blocks['span'] / one_period)
# sort the nan_blocks DataFrame to put longest missing block on top
nan_blocks = (nan_blocks.sort_values('count', ascending=False)
.reset_index(drop=True))
col.drop('tag', axis=1, inplace=True)
nan_list = nan_blocks.stack().to_frame()
nan_list.columns = col.columns
if patch:
patched_col, patched_blocks = choose_fill_method(
message, col, col_name, nan_blocks, df, one_period)
overview.loc['interpolated_blocks', col_name] = patched_blocks
overview.loc['interpolated_values', col_name] = patched_col.iloc[
:, 0].count() - col.iloc[:, 0].count()
else:
patched_col = col
if patched.empty:
patched = patched_col
else:
patched = patched.combine_first(patched_col)
if nan_table.empty:
nan_table = nan_list
else:
nan_table = nan_table.combine_first(nan_list)
overview.loc['nan_blocks', col_name] = nan_blocks.shape[0]
# # append the marker to the DataFrame
# marker_col = marker_col.to_frame()
# tuples = [('interpolated_values', '', '', '', '', '')]
# marker_col.columns = pd.MultiIndex.from_tuples(tuples, names=headers)
# patched = pd.concat([patched, marker_col], axis=1)
# set the level names for the output
nan_table.columns.names = headers
nan_table.columns = nan_table.columns.droplevel(['source', 'web', 'unit'])
patched.columns.names = headers
return patched, nan_table, overview
def choose_fill_method(
message, col, col_name, nan_blocks, df, one_period):
'''
Choose the appropriate function for filling a block of missing values.
Parameters
----------
col : pandas.DataFrame
A column from frame as a separate DataFrame
col_name : tuple
tuple of header levels of column to inspect
nan_blocks : pandas.DataFrame
DataFrame with each row representing a block of missing data in col
df : pandas.DataFrame
DataFrame to patch with n rows
marker_col : pandas.DataFrame
An n*1 DataFrame specifying for each row which of the previously treated
columns have been patched
one_period : pandas.Timedelta
Time resolution of frame and col (15/30/60 minutes)
Returns
----------
col : pandas.DataFrame
An n*1 DataFrame containing col with nan_blocks filled
and another column for the marker
marker_col: pandas.DataFrame
Definition as under Parameters, but now appended with markers for col
'''
for i, nan_block in nan_blocks.iterrows():
j = 0
if col_name[1] == 'price':
# Do not interpolate prices
patched_col = col
# Interpolate missing value spans up to 2 hours
elif nan_block['span'] <= timedelta(hours=2):
patched_col = my_interpolate(
i, j, nan_block, col, col_name, nan_blocks,
one_period, message)
# Guess missing value spans longer than one hour based on other tsos
# (Only for German wind and solar generation data)
elif col_name[0][:2] == 'DE' and col_name[2] == 'generation_actual':
# NOT IMPLEMENTED
# patched_col = impute(nan_block, col, col_name, nan_blocks, df, one_period)
# instead:
patched_col = col
else:
j += 1
patched_col = col
patched_blocks = nan_blocks.shape[0] - j
logger.info(message + 'interpolated %s blocks', patched_blocks)
return patched_col, patched_blocks
def my_interpolate(
i, j, nan_block, col, col_name, nan_blocks, one_period, message):
'''
Interpolate one missing value block in one column as described by
nan_block.
The default pd.Series.interpolate() function does not work if
interpolation is to be restricted to periods of a certain length.
(A limit-argument can be specified, but it results in longer periods
of missing data to be filled partially)
Parameters
----------
i : int
Counter for total number of blocks of missing data
j : int
Counter for number blocks of missing data not treated by by this
function
nan_block : pandas.Series
Contains information on one block of missing data in col
count:
span:
start_idx:
till_idx:
See choose_fill_method() for info on other parameters.
Returns
----------
patched_col : pandas.DataFrame
The column with all nan_blocks treated for periods up to 2:00.
'''
to_fill = slice(nan_block['start_idx'] - one_period,
nan_block['till_idx'] + one_period)
comment_now = slice(nan_block['start_idx'], nan_block['till_idx'])
col.iloc[:, 0].loc[to_fill] = col.iloc[:, 0].loc[to_fill].interpolate()
# Create a marker column to mark where data has been interpolated
col_name_str = '_'.join(
[level for level in col_name[0:3] if not level == ''])
# comment_before = marker_col.notnull()
# comment_again = comment_before.loc[comment_now]
# if comment_again.any():
# marker_col[comment_before & comment_again] = marker_col + \
# ' | ' + col_name_str
# else:
# marker_col.loc[comment_now] = col_name_str
return col
# Not implemented: For the generation timeseries, larger gaps are guessed
# by up-/down scaling the data from other balancing areas to fit the
# expected magnitude of the missing data.
def impute(nan_block, col, col_name, nan_blocks, df, one_period):
'''
Impute missing value spans longer than one hour based on other TSOs.
Parameters
----------
nan_block : pandas.Series
Contains information on one block of missing data in col
col : pandas.DataFrame
A column from df as a separate DataFrame
col_name : tuple
tuple of header levels of column to inspect
nan_blocks : : pandas.DataFrame
DataFrame with each row representing a block of missing data in col
df : pandas.DataFrame
DataFrame to patch
one_period : pandas.Timedelta
Time resolution of df and col (15/60 minutes)
'''
#logger.info('guessed %s entries after %s', row['count'], row['start_idx'])
day_before = pd.DatetimeIndex(
freq='15min',
start=nan_block['start_idx'] - timedelta(hours=24),
end=nan_block['start_idx'] - one_period)
to_fill = pd.DatetimeIndex(
freq='15min',
start=nan_block['start_idx'],
end=nan_block['till_idx'])
# other_tsos = [c[1] for c in compact.drop(col_name, axis=1)
#.loc[:,(col_name[0],slice(None),col_name[2])].columns.tolist()]
other_tsos = [tso
for tso in ['DE-50Hertz', 'DE-Amprion', 'DE-TenneT', 'DE-TransnetBW']
if tso != col_name[1]]
# select columns with data for same technology (wind/solar) but from other
# TSOs
similar = df.loc[:, (col_name[0], other_tsos, col_name[2])]
# calculate the sum using columns without NaNs the day
# before or during the period to be guessed
similar = similar.dropna(
axis=1,
how='any',
subset=day_before.append(to_fill)
).sum(axis=1)
# calculate scaling factor for other TSO data
factor = similar.loc[day_before].sum(
axis=0) / col.loc[day_before, :].sum(axis=0)
guess = similar.loc[to_fill] / float(factor)
col.iloc[:, 0].loc[to_fill] = guess
a = float(col.iloc[:, 0].loc[nan_block['start_idx'] - one_period])
b = float(col.iloc[:, 0].loc[nan_block['start_idx']])
if a == 0:
deviation = '{} absolut'.format(a - b)
else:
deviation = '{:.2f} %'.format((a - b) / a * 100)
logger.info(
'%s : \n '
'guessed %s entries after %s \n '
'last non-missing: %s \n '
'first guessed: %s \n '
'deviation of first guess from last known value: %s',
col_name[0:3], nan_block['count'], nan_block[
'start_idx'], a, b, deviation
)
return col
def resample_markers(group, drop_region='x'):
'''Resample marker column from 15(30) to 60 min
Parameters
----------
group: pd.Series
Series of 2(4) succeeding half(quarter)-hourly values from the marker column
that have to be combined into one.
drop_region: string
region to drop from marker column
Returns
----------
aggregated_marker : str or np.nan
If there were any markers in group: the unique values from the marker
column group joined together in one string, np.nan otherwise
'''
if group.notnull().values.any():
# unpack string of markers into a list
unpacked = [mark
for line in group if type(line) is str
for mark in line.split(' | ') if not mark.startswith(drop_region)] # [:-1]]
# keep only unique values from the list
aggregated_marker = ' | '.join(set(unpacked)) # + ' | '
else:
aggregated_marker = np.nan
return aggregated_marker
def glue_markers(marker_1, marker_2):
'''Concatenate two marker columns from two DataFrames to be combined,
using ' | ' as delimiter.
Parameters
----------
marker_1, marker_2 : pd.Series
Series of strings/ np.nan, containing information which columns have been interpolated in one Data Frame.
I.e.: 'ES_load_entsoe_transparency | ES_solar_generation_actual | ES_wind_onshore_generation_actual | LV_load_entsoe_transparency' .
Returns
----------
glued : pd.Series
The marker for the combind DataFrame
'''
both = marker_1.notnull() & marker_2.notnull()
only_2 = marker_1.isnull() & marker_2.notnull()
glued = marker_1.copy()
glued.loc[both] = marker_1.str.cat(others=marker_2, sep=' | ')
glued.loc[only_2] = marker_2
return glued
def mark_own_calc(col_name):
'''Prepend the entry in the 4th level of a multiindex-column-name-tuple,
which contains the 'source'-name with the prefix 'own calculation based on '
Parameters
----------
col_name: tuple
Multiindex-column-name-tuple.
Returns
----------
col_name : tuple
The same tuple with the 4th entry prepended with
'own calculation based on '
'''
col_name = list(col_name)
if not col_name[3].startswith('own calculation'):
col_name[3] = 'own calculation based on ' + col_name[3]
col_name = tuple(col_name)
return col_name
| mit | -2,187,778,753,264,839,200 | 32.521127 | 140 | 0.60014 | false |
30loops/django-sphinxdoc | setup.py | 1 | 1150 | #! /usr/bin/env python
from distutils.core import setup
import sys
reload(sys).setdefaultencoding('Utf-8')
setup(
name='django-sphinxdoc',
version='1.0',
author='Stefan Scherfke',
author_email='stefan at sofa-rockers.org',
description='Easily integrate Sphinx documentation into your website.',
long_description=open('README.txt').read(),
url='http://stefan.sofa-rockers.org/django-sphinxdoc/',
download_url='http://bitbucket.org/scherfke/django-sphinxdoc/downloads/',
license='BSD',
packages=[
'sphinxdoc',
'sphinxdoc.management',
'sphinxdoc.management.commands',
],
package_data={
'sphinxdoc': ['templates/sphinxdoc/*'],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
| bsd-3-clause | 3,340,625,887,401,585,700 | 30.944444 | 77 | 0.630435 | false |
elffersj/cnfgen | tests/test_randomcnf.py | 1 | 1406 | from cnfformula import RandomKCNF
from cnfformula.families.randomformulas import all_clauses
from . import TestCNFBase
class TestRandomCNF(TestCNFBase) :
def check_random_cnf(self,width,numvariables,numclauses) :
F = RandomKCNF(width,numvariables,numclauses)
self.assertListEqual(list(F.variables()),
['x_{0}'.format(i) for i in range(1,numvariables+1)])
self.assertEqual(len(F),numclauses)
self.assertEqual(len(set(frozenset(x) for x in F.clauses())),numclauses)
def test_empty_cnf(self) :
self.check_random_cnf(0,0,0)
def test_empty_cnf_with_vars(self) :
self.check_random_cnf(0,10,0)
def test_random_cnf_medium(self) :
self.check_random_cnf(3,10,50)
def test_full(self):
self.check_random_cnf(3,5,5*4*3/(1*2*3)*2**3)
def test_too_full(self):
with self.assertRaises(ValueError):
RandomKCNF(3,5,5*4*3/(1*2*3)*2**5+1)
def test_too_wide(self):
with self.assertRaises(ValueError):
RandomKCNF(10,5,1)
def test_negative_width(self):
with self.assertRaises(ValueError):
RandomKCNF(-1,5,1)
def test_negative_variables(self):
with self.assertRaises(ValueError):
RandomKCNF(3,-1,1)
def test_negative_clauses(self):
with self.assertRaises(ValueError):
RandomKCNF(3,5,-1)
| gpl-3.0 | -6,824,272,867,502,637,000 | 30.954545 | 82 | 0.631579 | false |
leppa/home-assistant | homeassistant/components/owntracks/messages.py | 1 | 11088 | """OwnTracks Message handlers."""
import json
import logging
from nacl.encoding import Base64Encoder
from nacl.secret import SecretBox
from homeassistant.components import zone as zone_comp
from homeassistant.components.device_tracker import (
SOURCE_TYPE_BLUETOOTH_LE,
SOURCE_TYPE_GPS,
)
from homeassistant.const import STATE_HOME
from homeassistant.util import decorator, slugify
from .helper import supports_encryption
_LOGGER = logging.getLogger(__name__)
HANDLERS = decorator.Registry()
def get_cipher():
"""Return decryption function and length of key.
Async friendly.
"""
def decrypt(ciphertext, key):
"""Decrypt ciphertext using key."""
return SecretBox(key).decrypt(ciphertext, encoder=Base64Encoder)
return (SecretBox.KEY_SIZE, decrypt)
def _parse_topic(topic, subscribe_topic):
"""Parse an MQTT topic {sub_topic}/user/dev, return (user, dev) tuple.
Async friendly.
"""
subscription = subscribe_topic.split("/")
try:
user_index = subscription.index("#")
except ValueError:
_LOGGER.error("Can't parse subscription topic: '%s'", subscribe_topic)
raise
topic_list = topic.split("/")
try:
user, device = topic_list[user_index], topic_list[user_index + 1]
except IndexError:
_LOGGER.error("Can't parse topic: '%s'", topic)
raise
return user, device
def _parse_see_args(message, subscribe_topic):
"""Parse the OwnTracks location parameters, into the format see expects.
Async friendly.
"""
user, device = _parse_topic(message["topic"], subscribe_topic)
dev_id = slugify(f"{user}_{device}")
kwargs = {"dev_id": dev_id, "host_name": user, "attributes": {}}
if message["lat"] is not None and message["lon"] is not None:
kwargs["gps"] = (message["lat"], message["lon"])
else:
kwargs["gps"] = None
if "acc" in message:
kwargs["gps_accuracy"] = message["acc"]
if "batt" in message:
kwargs["battery"] = message["batt"]
if "vel" in message:
kwargs["attributes"]["velocity"] = message["vel"]
if "tid" in message:
kwargs["attributes"]["tid"] = message["tid"]
if "addr" in message:
kwargs["attributes"]["address"] = message["addr"]
if "cog" in message:
kwargs["attributes"]["course"] = message["cog"]
if "bs" in message:
kwargs["attributes"]["battery_status"] = message["bs"]
if "t" in message:
if message["t"] in ("c", "u"):
kwargs["source_type"] = SOURCE_TYPE_GPS
if message["t"] == "b":
kwargs["source_type"] = SOURCE_TYPE_BLUETOOTH_LE
return dev_id, kwargs
def _set_gps_from_zone(kwargs, location, zone):
"""Set the see parameters from the zone parameters.
Async friendly.
"""
if zone is not None:
kwargs["gps"] = (zone.attributes["latitude"], zone.attributes["longitude"])
kwargs["gps_accuracy"] = zone.attributes["radius"]
kwargs["location_name"] = location
return kwargs
def _decrypt_payload(secret, topic, ciphertext):
"""Decrypt encrypted payload."""
try:
if supports_encryption():
keylen, decrypt = get_cipher()
else:
_LOGGER.warning("Ignoring encrypted payload because nacl not installed")
return None
except OSError:
_LOGGER.warning("Ignoring encrypted payload because nacl not installed")
return None
if isinstance(secret, dict):
key = secret.get(topic)
else:
key = secret
if key is None:
_LOGGER.warning(
"Ignoring encrypted payload because no decryption key known for topic %s",
topic,
)
return None
key = key.encode("utf-8")
key = key[:keylen]
key = key.ljust(keylen, b"\0")
try:
message = decrypt(ciphertext, key)
message = message.decode("utf-8")
_LOGGER.debug("Decrypted payload: %s", message)
return message
except ValueError:
_LOGGER.warning(
"Ignoring encrypted payload because unable to decrypt using key for topic %s",
topic,
)
return None
@HANDLERS.register("location")
async def async_handle_location_message(hass, context, message):
"""Handle a location message."""
if not context.async_valid_accuracy(message):
return
if context.events_only:
_LOGGER.debug("Location update ignored due to events_only setting")
return
dev_id, kwargs = _parse_see_args(message, context.mqtt_topic)
if context.regions_entered[dev_id]:
_LOGGER.debug(
"Location update ignored, inside region %s", context.regions_entered[-1]
)
return
context.async_see(**kwargs)
context.async_see_beacons(hass, dev_id, kwargs)
async def _async_transition_message_enter(hass, context, message, location):
"""Execute enter event."""
zone = hass.states.get("zone.{}".format(slugify(location)))
dev_id, kwargs = _parse_see_args(message, context.mqtt_topic)
if zone is None and message.get("t") == "b":
# Not a HA zone, and a beacon so mobile beacon.
# kwargs will contain the lat/lon of the beacon
# which is not where the beacon actually is
# and is probably set to 0/0
beacons = context.mobile_beacons_active[dev_id]
if location not in beacons:
beacons.add(location)
_LOGGER.info("Added beacon %s", location)
context.async_see_beacons(hass, dev_id, kwargs)
else:
# Normal region
regions = context.regions_entered[dev_id]
if location not in regions:
regions.append(location)
_LOGGER.info("Enter region %s", location)
_set_gps_from_zone(kwargs, location, zone)
context.async_see(**kwargs)
context.async_see_beacons(hass, dev_id, kwargs)
async def _async_transition_message_leave(hass, context, message, location):
"""Execute leave event."""
dev_id, kwargs = _parse_see_args(message, context.mqtt_topic)
regions = context.regions_entered[dev_id]
if location in regions:
regions.remove(location)
beacons = context.mobile_beacons_active[dev_id]
if location in beacons:
beacons.remove(location)
_LOGGER.info("Remove beacon %s", location)
context.async_see_beacons(hass, dev_id, kwargs)
else:
new_region = regions[-1] if regions else None
if new_region:
# Exit to previous region
zone = hass.states.get("zone.{}".format(slugify(new_region)))
_set_gps_from_zone(kwargs, new_region, zone)
_LOGGER.info("Exit to %s", new_region)
context.async_see(**kwargs)
context.async_see_beacons(hass, dev_id, kwargs)
return
_LOGGER.info("Exit to GPS")
# Check for GPS accuracy
if context.async_valid_accuracy(message):
context.async_see(**kwargs)
context.async_see_beacons(hass, dev_id, kwargs)
@HANDLERS.register("transition")
async def async_handle_transition_message(hass, context, message):
"""Handle a transition message."""
if message.get("desc") is None:
_LOGGER.error(
"Location missing from `Entering/Leaving` message - "
"please turn `Share` on in OwnTracks app"
)
return
# OwnTracks uses - at the start of a beacon zone
# to switch on 'hold mode' - ignore this
location = message["desc"].lstrip("-")
# Create a layer of indirection for Owntracks instances that may name
# regions differently than their HA names
if location in context.region_mapping:
location = context.region_mapping[location]
if location.lower() == "home":
location = STATE_HOME
if message["event"] == "enter":
await _async_transition_message_enter(hass, context, message, location)
elif message["event"] == "leave":
await _async_transition_message_leave(hass, context, message, location)
else:
_LOGGER.error(
"Misformatted mqtt msgs, _type=transition, event=%s", message["event"]
)
async def async_handle_waypoint(hass, name_base, waypoint):
"""Handle a waypoint."""
name = waypoint["desc"]
pretty_name = f"{name_base} - {name}"
lat = waypoint["lat"]
lon = waypoint["lon"]
rad = waypoint["rad"]
# check zone exists
entity_id = zone_comp.ENTITY_ID_FORMAT.format(slugify(pretty_name))
# Check if state already exists
if hass.states.get(entity_id) is not None:
return
zone = zone_comp.Zone(
hass, pretty_name, lat, lon, rad, zone_comp.ICON_IMPORT, False
)
zone.entity_id = entity_id
await zone.async_update_ha_state()
@HANDLERS.register("waypoint")
@HANDLERS.register("waypoints")
async def async_handle_waypoints_message(hass, context, message):
"""Handle a waypoints message."""
if not context.import_waypoints:
return
if context.waypoint_whitelist is not None:
user = _parse_topic(message["topic"], context.mqtt_topic)[0]
if user not in context.waypoint_whitelist:
return
if "waypoints" in message:
wayps = message["waypoints"]
else:
wayps = [message]
_LOGGER.info("Got %d waypoints from %s", len(wayps), message["topic"])
name_base = " ".join(_parse_topic(message["topic"], context.mqtt_topic))
for wayp in wayps:
await async_handle_waypoint(hass, name_base, wayp)
@HANDLERS.register("encrypted")
async def async_handle_encrypted_message(hass, context, message):
"""Handle an encrypted message."""
if "topic" not in message and isinstance(context.secret, dict):
_LOGGER.error("You cannot set per topic secrets when using HTTP")
return
plaintext_payload = _decrypt_payload(
context.secret, message.get("topic"), message["data"]
)
if plaintext_payload is None:
return
decrypted = json.loads(plaintext_payload)
if "topic" in message and "topic" not in decrypted:
decrypted["topic"] = message["topic"]
await async_handle_message(hass, context, decrypted)
@HANDLERS.register("lwt")
@HANDLERS.register("configuration")
@HANDLERS.register("beacon")
@HANDLERS.register("cmd")
@HANDLERS.register("steps")
@HANDLERS.register("card")
async def async_handle_not_impl_msg(hass, context, message):
"""Handle valid but not implemented message types."""
_LOGGER.debug("Not handling %s message: %s", message.get("_type"), message)
async def async_handle_unsupported_msg(hass, context, message):
"""Handle an unsupported or invalid message type."""
_LOGGER.warning("Received unsupported message type: %s.", message.get("_type"))
async def async_handle_message(hass, context, message):
"""Handle an OwnTracks message."""
msgtype = message.get("_type")
_LOGGER.debug("Received %s", message)
handler = HANDLERS.get(msgtype, async_handle_unsupported_msg)
await handler(hass, context, message)
| apache-2.0 | 1,786,945,677,978,614,800 | 30.589744 | 90 | 0.637085 | false |
jonathadv/PyPoABus | tests/test_cli.py | 1 | 3362 | # -*- coding: utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
# Disabling the below pylint warnings in order to use long names convention in the tests
# and because some entities are used seamlessly instead of being directly called.
# pylint: disable=invalid-name
# pylint: disable=unused-import
"""
tests.TestIntegration
------------------
The integration test set for functions in pypoabus.__main__
"""
import pytest
import pytest_mock
import requests
from pypoabus import __main__, __title__, __version__
from pypoabus.pypoabus import BusLine
def test_get_version(mock, capsys):
""" Check if -v returns the correct application version """
mock.patch('sys.argv', ['', '-v'])
expected = '{} {}\n'.format(__title__, __version__)
try:
__main__.main()
except SystemExit:
pass
capture_result = capsys.readouterr()
assert capture_result.out == expected
def test_get_line_list_from_valid_zone(mock, capsys):
""" Checks if cli returns the correct bus list in unformatted json
for correct zone
"""
expected = '{ "list": ["l1", "l2"] }\n'
mock.patch('sys.argv', ['', '-l', 'south'])
mock.patch('pypoabus.pypoabus.list_bus_lines', return_value='["l1", "l2"]')
try:
__main__.main()
except SystemExit:
pass
capture_result = capsys.readouterr()
assert capture_result.out == expected
def test_get_line_list_from_invalid_zone(mock, capsys):
""" Checks if cli returns the correct error message
for incorrect zone argument
"""
zone = 'NOT_VALID_ZONE'
mock.patch('sys.argv', ['', '-l', zone])
expected = "usage: {} [-h] [-v] [-l zone | -t line_code] [-f format]" \
" [-d]\npypoabus: error: argument -l/--list: " \
"invalid choice: '{}' (choose from 'north', " \
"'south', 'east', 'public')\n".format(__title__, zone)
try:
__main__.main()
except SystemExit:
pass
capture_result = capsys.readouterr()
assert capture_result.err == expected
def test_get_timetable_from_valid_line(mock, capsys):
""" Checks if cli returns the correct bus timetable in unformatted json
for the correct busline
"""
expected = '{"code": "bar", "name": "foo", "schedules": []}\n'
mock.patch('sys.argv', ['', '-t', 'non_existing_line'])
mock.patch('pypoabus.pypoabus.get_bus_timetable', return_value=BusLine('foo', 'bar'))
try:
__main__.main()
except SystemExit:
pass
capture_result = capsys.readouterr()
assert capture_result.out == expected
def test_get_timetable_from_invalid_line(mock, capsys):
""" Checks if cli returns the correct error message
for the incorrect busline argument
"""
expected = 'pypoabus: Error to connect to the server: ' \
'Unable to get EPTC page content. HTTP code: 500, reason: ' \
'Internal Server Error\n\n'
mocked_response = requests.Response()
mocked_response.status_code = 500
mocked_response.reason = 'Internal Server Error'
mock.patch('sys.argv', ['', '-t', 'non_existing_line'])
mock.patch('requests.get', return_value=mocked_response)
try:
__main__.main()
except SystemExit:
pass
capture_result = capsys.readouterr()
assert capture_result.err == expected
| mit | 7,278,359,651,027,564,000 | 29.017857 | 89 | 0.620167 | false |
fimad/mitmproxy | netlib/tcp.py | 1 | 32312 | from __future__ import (absolute_import, print_function, division)
import os
import select
import socket
import sys
import threading
import time
import traceback
import binascii
from six.moves import range
import certifi
from backports import ssl_match_hostname
import six
import OpenSSL
from OpenSSL import SSL
from . import certutils, version_check, utils
# This is a rather hackish way to make sure that
# the latest version of pyOpenSSL is actually installed.
from netlib.exceptions import InvalidCertificateException, TcpReadIncomplete, TlsException, \
TcpTimeout, TcpDisconnect, TcpException
version_check.check_pyopenssl_version()
if six.PY2:
socket_fileobject = socket._fileobject
else:
socket_fileobject = socket.SocketIO
EINTR = 4
if os.environ.get("NO_ALPN"):
HAS_ALPN = False
else:
HAS_ALPN = OpenSSL._util.lib.Cryptography_HAS_ALPN
# To enable all SSL methods use: SSLv23
# then add options to disable certain methods
# https://bugs.launchpad.net/pyopenssl/+bug/1020632/comments/3
SSL_BASIC_OPTIONS = (
SSL.OP_CIPHER_SERVER_PREFERENCE
)
if hasattr(SSL, "OP_NO_COMPRESSION"):
SSL_BASIC_OPTIONS |= SSL.OP_NO_COMPRESSION
SSL_DEFAULT_METHOD = SSL.SSLv23_METHOD
SSL_DEFAULT_OPTIONS = (
SSL.OP_NO_SSLv2 |
SSL.OP_NO_SSLv3 |
SSL_BASIC_OPTIONS
)
if hasattr(SSL, "OP_NO_COMPRESSION"):
SSL_DEFAULT_OPTIONS |= SSL.OP_NO_COMPRESSION
"""
Map a reasonable SSL version specification into the format OpenSSL expects.
Don't ask...
https://bugs.launchpad.net/pyopenssl/+bug/1020632/comments/3
"""
sslversion_choices = {
"all": (SSL.SSLv23_METHOD, SSL_BASIC_OPTIONS),
# SSLv23_METHOD + NO_SSLv2 + NO_SSLv3 == TLS 1.0+
# TLSv1_METHOD would be TLS 1.0 only
"secure": (SSL.SSLv23_METHOD, (SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3 | SSL_BASIC_OPTIONS)),
"SSLv2": (SSL.SSLv2_METHOD, SSL_BASIC_OPTIONS),
"SSLv3": (SSL.SSLv3_METHOD, SSL_BASIC_OPTIONS),
"TLSv1": (SSL.TLSv1_METHOD, SSL_BASIC_OPTIONS),
"TLSv1_1": (SSL.TLSv1_1_METHOD, SSL_BASIC_OPTIONS),
"TLSv1_2": (SSL.TLSv1_2_METHOD, SSL_BASIC_OPTIONS),
}
class SSLKeyLogger(object):
def __init__(self, filename):
self.filename = filename
self.f = None
self.lock = threading.Lock()
# required for functools.wraps, which pyOpenSSL uses.
__name__ = "SSLKeyLogger"
def __call__(self, connection, where, ret):
if where == SSL.SSL_CB_HANDSHAKE_DONE and ret == 1:
with self.lock:
if not self.f:
d = os.path.dirname(self.filename)
if not os.path.isdir(d):
os.makedirs(d)
self.f = open(self.filename, "ab")
self.f.write(b"\r\n")
client_random = binascii.hexlify(connection.client_random())
masterkey = binascii.hexlify(connection.master_key())
self.f.write(b"CLIENT_RANDOM %s %s\r\n" % (client_random, masterkey))
self.f.flush()
def close(self):
with self.lock:
if self.f:
self.f.close()
@staticmethod
def create_logfun(filename):
if filename:
return SSLKeyLogger(filename)
return False
log_ssl_key = SSLKeyLogger.create_logfun(
os.getenv("MITMPROXY_SSLKEYLOGFILE") or os.getenv("SSLKEYLOGFILE"))
class _FileLike(object):
BLOCKSIZE = 1024 * 32
def __init__(self, o):
self.o = o
self._log = None
self.first_byte_timestamp = None
def set_descriptor(self, o):
self.o = o
def __getattr__(self, attr):
return getattr(self.o, attr)
def start_log(self):
"""
Starts or resets the log.
This will store all bytes read or written.
"""
self._log = []
def stop_log(self):
"""
Stops the log.
"""
self._log = None
def is_logging(self):
return self._log is not None
def get_log(self):
"""
Returns the log as a string.
"""
if not self.is_logging():
raise ValueError("Not logging!")
return b"".join(self._log)
def add_log(self, v):
if self.is_logging():
self._log.append(v)
def reset_timestamps(self):
self.first_byte_timestamp = None
class Writer(_FileLike):
def flush(self):
"""
May raise TcpDisconnect
"""
if hasattr(self.o, "flush"):
try:
self.o.flush()
except (socket.error, IOError) as v:
raise TcpDisconnect(str(v))
def write(self, v):
"""
May raise TcpDisconnect
"""
if v:
self.first_byte_timestamp = self.first_byte_timestamp or time.time()
try:
if hasattr(self.o, "sendall"):
self.add_log(v)
return self.o.sendall(v)
else:
r = self.o.write(v)
self.add_log(v[:r])
return r
except (SSL.Error, socket.error) as e:
raise TcpDisconnect(str(e))
class Reader(_FileLike):
def read(self, length):
"""
If length is -1, we read until connection closes.
"""
result = b''
start = time.time()
while length == -1 or length > 0:
if length == -1 or length > self.BLOCKSIZE:
rlen = self.BLOCKSIZE
else:
rlen = length
try:
data = self.o.read(rlen)
except SSL.ZeroReturnError:
# TLS connection was shut down cleanly
break
except (SSL.WantWriteError, SSL.WantReadError):
# From the OpenSSL docs:
# If the underlying BIO is non-blocking, SSL_read() will also return when the
# underlying BIO could not satisfy the needs of SSL_read() to continue the
# operation. In this case a call to SSL_get_error with the return value of
# SSL_read() will yield SSL_ERROR_WANT_READ or SSL_ERROR_WANT_WRITE.
if (time.time() - start) < self.o.gettimeout():
time.sleep(0.1)
continue
else:
raise TcpTimeout()
except socket.timeout:
raise TcpTimeout()
except socket.error as e:
raise TcpDisconnect(str(e))
except SSL.SysCallError as e:
if e.args == (-1, 'Unexpected EOF'):
break
raise TlsException(str(e))
except SSL.Error as e:
raise TlsException(str(e))
self.first_byte_timestamp = self.first_byte_timestamp or time.time()
if not data:
break
result += data
if length != -1:
length -= len(data)
self.add_log(result)
return result
def readline(self, size=None):
result = b''
bytes_read = 0
while True:
if size is not None and bytes_read >= size:
break
ch = self.read(1)
bytes_read += 1
if not ch:
break
else:
result += ch
if ch == b'\n':
break
return result
def safe_read(self, length):
"""
Like .read, but is guaranteed to either return length bytes, or
raise an exception.
"""
result = self.read(length)
if length != -1 and len(result) != length:
if not result:
raise TcpDisconnect()
else:
raise TcpReadIncomplete(
"Expected %s bytes, got %s" % (length, len(result))
)
return result
def peek(self, length):
"""
Tries to peek into the underlying file object.
Returns:
Up to the next N bytes if peeking is successful.
Raises:
TcpException if there was an error with the socket
TlsException if there was an error with pyOpenSSL.
NotImplementedError if the underlying file object is not a [pyOpenSSL] socket
"""
if isinstance(self.o, socket_fileobject):
try:
return self.o._sock.recv(length, socket.MSG_PEEK)
except socket.error as e:
raise TcpException(repr(e))
elif isinstance(self.o, SSL.Connection):
try:
if tuple(int(x) for x in OpenSSL.__version__.split(".")[:2]) > (0, 15):
return self.o.recv(length, socket.MSG_PEEK)
else:
# TODO: remove once a new version is released
# Polyfill for pyOpenSSL <= 0.15.1
# Taken from https://github.com/pyca/pyopenssl/commit/1d95dea7fea03c7c0df345a5ea30c12d8a0378d2
buf = SSL._ffi.new("char[]", length)
result = SSL._lib.SSL_peek(self.o._ssl, buf, length)
self.o._raise_ssl_error(self.o._ssl, result)
return SSL._ffi.buffer(buf, result)[:]
except SSL.Error as e:
six.reraise(TlsException, TlsException(str(e)), sys.exc_info()[2])
else:
raise NotImplementedError("Can only peek into (pyOpenSSL) sockets")
class Address(utils.Serializable):
"""
This class wraps an IPv4/IPv6 tuple to provide named attributes and
ipv6 information.
"""
def __init__(self, address, use_ipv6=False):
self.address = tuple(address)
self.use_ipv6 = use_ipv6
def get_state(self):
return {
"address": self.address,
"use_ipv6": self.use_ipv6
}
def set_state(self, state):
self.address = state["address"]
self.use_ipv6 = state["use_ipv6"]
@classmethod
def from_state(cls, state):
return Address(**state)
@classmethod
def wrap(cls, t):
if isinstance(t, cls):
return t
else:
return cls(t)
def __call__(self):
return self.address
@property
def host(self):
return self.address[0]
@property
def port(self):
return self.address[1]
@property
def use_ipv6(self):
return self.family == socket.AF_INET6
@use_ipv6.setter
def use_ipv6(self, b):
self.family = socket.AF_INET6 if b else socket.AF_INET
def __repr__(self):
return "{}:{}".format(self.host, self.port)
def __eq__(self, other):
if not other:
return False
other = Address.wrap(other)
return (self.address, self.family) == (other.address, other.family)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.address) ^ 42 # different hash than the tuple alone.
def ssl_read_select(rlist, timeout):
"""
This is a wrapper around select.select() which also works for SSL.Connections
by taking ssl_connection.pending() into account.
Caveats:
If .pending() > 0 for any of the connections in rlist, we avoid the select syscall
and **will not include any other connections which may or may not be ready**.
Args:
rlist: wait until ready for reading
Returns:
subset of rlist which is ready for reading.
"""
return [
conn for conn in rlist
if isinstance(conn, SSL.Connection) and conn.pending() > 0
] or select.select(rlist, (), (), timeout)[0]
def close_socket(sock):
"""
Does a hard close of a socket, without emitting a RST.
"""
try:
# We already indicate that we close our end.
# may raise "Transport endpoint is not connected" on Linux
sock.shutdown(socket.SHUT_WR)
# Section 4.2.2.13 of RFC 1122 tells us that a close() with any pending
# readable data could lead to an immediate RST being sent (which is the
# case on Windows).
# http://ia600609.us.archive.org/22/items/TheUltimateSo_lingerPageOrWhyIsMyTcpNotReliable/the-ultimate-so_linger-page-or-why-is-my-tcp-not-reliable.html
#
# This in turn results in the following issue: If we send an error page
# to the client and then close the socket, the RST may be received by
# the client before the error page and the users sees a connection
# error rather than the error page. Thus, we try to empty the read
# buffer on Windows first. (see
# https://github.com/mitmproxy/mitmproxy/issues/527#issuecomment-93782988)
#
if os.name == "nt": # pragma: no cover
# We cannot rely on the shutdown()-followed-by-read()-eof technique
# proposed by the page above: Some remote machines just don't send
# a TCP FIN, which would leave us in the unfortunate situation that
# recv() would block infinitely. As a workaround, we set a timeout
# here even if we are in blocking mode.
sock.settimeout(sock.gettimeout() or 20)
# limit at a megabyte so that we don't read infinitely
for _ in range(1024 ** 3 // 4096):
# may raise a timeout/disconnect exception.
if not sock.recv(4096):
break
# Now we can close the other half as well.
sock.shutdown(socket.SHUT_RD)
except socket.error:
pass
sock.close()
class _Connection(object):
rbufsize = -1
wbufsize = -1
def _makefile(self):
"""
Set up .rfile and .wfile attributes from .connection
"""
# Ideally, we would use the Buffered IO in Python 3 by default.
# Unfortunately, the implementation of .peek() is broken for n>1 bytes,
# as it may just return what's left in the buffer and not all the bytes we want.
# As a workaround, we just use unbuffered sockets directly.
# https://mail.python.org/pipermail/python-dev/2009-June/089986.html
if six.PY2:
self.rfile = Reader(self.connection.makefile('rb', self.rbufsize))
self.wfile = Writer(self.connection.makefile('wb', self.wbufsize))
else:
self.rfile = Reader(socket.SocketIO(self.connection, "rb"))
self.wfile = Writer(socket.SocketIO(self.connection, "wb"))
def __init__(self, connection):
if connection:
self.connection = connection
self.peer_address = Address(connection.getpeername())
self._makefile()
else:
self.connection = None
self.peer_address = None
self.rfile = None
self.wfile = None
self.ssl_established = False
self.finished = False
def get_current_cipher(self):
if not self.ssl_established:
return None
name = self.connection.get_cipher_name()
bits = self.connection.get_cipher_bits()
version = self.connection.get_cipher_version()
return name, bits, version
def finish(self):
self.finished = True
# If we have an SSL connection, wfile.close == connection.close
# (We call _FileLike.set_descriptor(conn))
# Closing the socket is not our task, therefore we don't call close
# then.
if not isinstance(self.connection, SSL.Connection):
if not getattr(self.wfile, "closed", False):
try:
self.wfile.flush()
self.wfile.close()
except TcpDisconnect:
pass
self.rfile.close()
else:
try:
self.connection.shutdown()
except SSL.Error:
pass
def _create_ssl_context(self,
method=SSL_DEFAULT_METHOD,
options=SSL_DEFAULT_OPTIONS,
verify_options=SSL.VERIFY_NONE,
ca_path=None,
ca_pemfile=None,
cipher_list=None,
alpn_protos=None,
alpn_select=None,
alpn_select_callback=None,
):
"""
Creates an SSL Context.
:param method: One of SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, TLSv1_METHOD, TLSv1_1_METHOD, or TLSv1_2_METHOD
:param options: A bit field consisting of OpenSSL.SSL.OP_* values
:param verify_options: A bit field consisting of OpenSSL.SSL.VERIFY_* values
:param ca_path: Path to a directory of trusted CA certificates prepared using the c_rehash tool
:param ca_pemfile: Path to a PEM formatted trusted CA certificate
:param cipher_list: A textual OpenSSL cipher list, see https://www.openssl.org/docs/apps/ciphers.html
:rtype : SSL.Context
"""
context = SSL.Context(method)
# Options (NO_SSLv2/3)
if options is not None:
context.set_options(options)
# Verify Options (NONE/PEER and trusted CAs)
if verify_options is not None:
def verify_cert(conn, x509, errno, err_depth, is_cert_verified):
if not is_cert_verified:
self.ssl_verification_error = dict(errno=errno,
depth=err_depth)
return is_cert_verified
context.set_verify(verify_options, verify_cert)
if ca_path is None and ca_pemfile is None:
ca_pemfile = certifi.where()
context.load_verify_locations(ca_pemfile, ca_path)
# Workaround for
# https://github.com/pyca/pyopenssl/issues/190
# https://github.com/mitmproxy/mitmproxy/issues/472
# Options already set before are not cleared.
context.set_mode(SSL._lib.SSL_MODE_AUTO_RETRY)
# Cipher List
if cipher_list:
try:
context.set_cipher_list(cipher_list)
# TODO: maybe change this to with newer pyOpenSSL APIs
context.set_tmp_ecdh(OpenSSL.crypto.get_elliptic_curve('prime256v1'))
except SSL.Error as v:
raise TlsException("SSL cipher specification error: %s" % str(v))
# SSLKEYLOGFILE
if log_ssl_key:
context.set_info_callback(log_ssl_key)
if HAS_ALPN:
if alpn_protos is not None:
# advertise application layer protocols
context.set_alpn_protos(alpn_protos)
elif alpn_select is not None and alpn_select_callback is None:
# select application layer protocol
def alpn_select_callback(conn_, options):
if alpn_select in options:
return bytes(alpn_select)
else: # pragma no cover
return options[0]
context.set_alpn_select_callback(alpn_select_callback)
elif alpn_select_callback is not None and alpn_select is None:
context.set_alpn_select_callback(alpn_select_callback)
elif alpn_select_callback is not None and alpn_select is not None:
raise TlsException("ALPN error: only define alpn_select (string) OR alpn_select_callback (method).")
return context
class TCPClient(_Connection):
def __init__(self, address, source_address=None):
super(TCPClient, self).__init__(None)
self.address = address
self.source_address = source_address
self.cert = None
self.server_certs = []
self.ssl_verification_error = None
self.sni = None
@property
def address(self):
return self.__address
@address.setter
def address(self, address):
if address:
self.__address = Address.wrap(address)
else:
self.__address = None
@property
def source_address(self):
return self.__source_address
@source_address.setter
def source_address(self, source_address):
if source_address:
self.__source_address = Address.wrap(source_address)
else:
self.__source_address = None
def close(self):
# Make sure to close the real socket, not the SSL proxy.
# OpenSSL is really good at screwing up, i.e. when trying to recv from a failed connection,
# it tries to renegotiate...
if isinstance(self.connection, SSL.Connection):
close_socket(self.connection._socket)
else:
close_socket(self.connection)
def create_ssl_context(self, cert=None, alpn_protos=None, **sslctx_kwargs):
context = self._create_ssl_context(
alpn_protos=alpn_protos,
**sslctx_kwargs)
# Client Certs
if cert:
try:
context.use_privatekey_file(cert)
context.use_certificate_file(cert)
except SSL.Error as v:
raise TlsException("SSL client certificate error: %s" % str(v))
return context
def convert_to_ssl(self, sni=None, alpn_protos=None, **sslctx_kwargs):
"""
cert: Path to a file containing both client cert and private key.
options: A bit field consisting of OpenSSL.SSL.OP_* values
verify_options: A bit field consisting of OpenSSL.SSL.VERIFY_* values
ca_path: Path to a directory of trusted CA certificates prepared using the c_rehash tool
ca_pemfile: Path to a PEM formatted trusted CA certificate
"""
verification_mode = sslctx_kwargs.get('verify_options', None)
if verification_mode == SSL.VERIFY_PEER and not sni:
raise TlsException("Cannot validate certificate hostname without SNI")
context = self.create_ssl_context(
alpn_protos=alpn_protos,
**sslctx_kwargs
)
self.connection = SSL.Connection(context, self.connection)
if sni:
self.sni = sni
self.connection.set_tlsext_host_name(sni)
self.connection.set_connect_state()
try:
self.connection.do_handshake()
except SSL.Error as v:
if self.ssl_verification_error:
raise InvalidCertificateException("SSL handshake error: %s" % repr(v))
else:
raise TlsException("SSL handshake error: %s" % repr(v))
else:
# Fix for pre v1.0 OpenSSL, which doesn't throw an exception on
# certificate validation failure
if verification_mode == SSL.VERIFY_PEER and self.ssl_verification_error is not None:
raise InvalidCertificateException("SSL handshake error: certificate verify failed")
self.cert = certutils.SSLCert(self.connection.get_peer_certificate())
# Keep all server certificates in a list
for i in self.connection.get_peer_cert_chain():
self.server_certs.append(certutils.SSLCert(i))
# Validate TLS Hostname
try:
crt = dict(
subjectAltName=[("DNS", x.decode("ascii", "strict")) for x in self.cert.altnames]
)
if self.cert.cn:
crt["subject"] = [[["commonName", self.cert.cn.decode("ascii", "strict")]]]
if sni:
hostname = sni.decode("ascii", "strict")
else:
hostname = "no-hostname"
ssl_match_hostname.match_hostname(crt, hostname)
except (ValueError, ssl_match_hostname.CertificateError) as e:
self.ssl_verification_error = dict(depth=0, errno="Invalid Hostname")
if verification_mode == SSL.VERIFY_PEER:
raise InvalidCertificateException("Presented certificate for {} is not valid: {}".format(sni, str(e)))
self.ssl_established = True
self.rfile.set_descriptor(self.connection)
self.wfile.set_descriptor(self.connection)
def connect(self):
try:
connection = socket.socket(self.address.family, socket.SOCK_STREAM)
if self.source_address:
connection.bind(self.source_address())
connection.connect(self.address())
self.source_address = Address(connection.getsockname())
except (socket.error, IOError) as err:
raise TcpException(
'Error connecting to "%s": %s' %
(self.address.host, err))
self.connection = connection
self.peer_address = Address(connection.getpeername())
self._makefile()
def settimeout(self, n):
self.connection.settimeout(n)
def gettimeout(self):
return self.connection.gettimeout()
def get_alpn_proto_negotiated(self):
if HAS_ALPN and self.ssl_established:
return self.connection.get_alpn_proto_negotiated()
else:
return b""
class BaseHandler(_Connection):
"""
The instantiator is expected to call the handle() and finish() methods.
"""
def __init__(self, connection, address, server):
super(BaseHandler, self).__init__(connection)
self.address = Address.wrap(address)
self.server = server
self.clientcert = None
def create_ssl_context(self,
cert, key,
handle_sni=None,
request_client_cert=None,
chain_file=None,
dhparams=None,
extra_chain_certs=None,
**sslctx_kwargs):
"""
cert: A certutils.SSLCert object or the path to a certificate
chain file.
handle_sni: SNI handler, should take a connection object. Server
name can be retrieved like this:
connection.get_servername()
And you can specify the connection keys as follows:
new_context = Context(TLSv1_METHOD)
new_context.use_privatekey(key)
new_context.use_certificate(cert)
connection.set_context(new_context)
The request_client_cert argument requires some explanation. We're
supposed to be able to do this with no negative effects - if the
client has no cert to present, we're notified and proceed as usual.
Unfortunately, Android seems to have a bug (tested on 4.2.2) - when
an Android client is asked to present a certificate it does not
have, it hangs up, which is frankly bogus. Some time down the track
we may be able to make the proper behaviour the default again, but
until then we're conservative.
"""
context = self._create_ssl_context(**sslctx_kwargs)
context.use_privatekey(key)
if isinstance(cert, certutils.SSLCert):
context.use_certificate(cert.x509)
else:
context.use_certificate_chain_file(cert)
if extra_chain_certs:
for i in extra_chain_certs:
context.add_extra_chain_cert(i.x509)
if handle_sni:
# SNI callback happens during do_handshake()
context.set_tlsext_servername_callback(handle_sni)
if request_client_cert:
def save_cert(conn_, cert, errno_, depth_, preverify_ok_):
self.clientcert = certutils.SSLCert(cert)
# Return true to prevent cert verification error
return True
context.set_verify(SSL.VERIFY_PEER, save_cert)
# Cert Verify
if chain_file:
context.load_verify_locations(chain_file)
if dhparams:
SSL._lib.SSL_CTX_set_tmp_dh(context._context, dhparams)
return context
def convert_to_ssl(self, cert, key, **sslctx_kwargs):
"""
Convert connection to SSL.
For a list of parameters, see BaseHandler._create_ssl_context(...)
"""
context = self.create_ssl_context(
cert,
key,
**sslctx_kwargs)
self.connection = SSL.Connection(context, self.connection)
self.connection.set_accept_state()
try:
self.connection.do_handshake()
except SSL.Error as v:
raise TlsException("SSL handshake error: %s" % repr(v))
self.ssl_established = True
self.rfile.set_descriptor(self.connection)
self.wfile.set_descriptor(self.connection)
def handle(self): # pragma: no cover
raise NotImplementedError
def settimeout(self, n):
self.connection.settimeout(n)
def get_alpn_proto_negotiated(self):
if HAS_ALPN and self.ssl_established:
return self.connection.get_alpn_proto_negotiated()
else:
return b""
class TCPServer(object):
request_queue_size = 20
def __init__(self, address):
self.address = Address.wrap(address)
self.__is_shut_down = threading.Event()
self.__shutdown_request = False
self.socket = socket.socket(self.address.family, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.address())
self.address = Address.wrap(self.socket.getsockname())
self.socket.listen(self.request_queue_size)
def connection_thread(self, connection, client_address):
client_address = Address(client_address)
try:
self.handle_client_connection(connection, client_address)
except:
self.handle_error(connection, client_address)
finally:
close_socket(connection)
def serve_forever(self, poll_interval=0.1):
self.__is_shut_down.clear()
try:
while not self.__shutdown_request:
try:
r, w_, e_ = select.select(
[self.socket], [], [], poll_interval)
except select.error as ex: # pragma: no cover
if ex[0] == EINTR:
continue
else:
raise
if self.socket in r:
connection, client_address = self.socket.accept()
t = threading.Thread(
target=self.connection_thread,
args=(connection, client_address),
name="ConnectionThread (%s:%s -> %s:%s)" %
(client_address[0], client_address[1],
self.address.host, self.address.port)
)
t.setDaemon(1)
try:
t.start()
except threading.ThreadError:
self.handle_error(connection, Address(client_address))
connection.close()
finally:
self.__shutdown_request = False
self.__is_shut_down.set()
def shutdown(self):
self.__shutdown_request = True
self.__is_shut_down.wait()
self.socket.close()
self.handle_shutdown()
def handle_error(self, connection_, client_address, fp=sys.stderr):
"""
Called when handle_client_connection raises an exception.
"""
# If a thread has persisted after interpreter exit, the module might be
# none.
if traceback:
exc = six.text_type(traceback.format_exc())
print(u'-' * 40, file=fp)
print(
u"Error in processing of request from %s" % repr(client_address), file=fp)
print(exc, file=fp)
print(u'-' * 40, file=fp)
def handle_client_connection(self, conn, client_address): # pragma: no cover
"""
Called after client connection.
"""
raise NotImplementedError
def handle_shutdown(self):
"""
Called after server shutdown.
"""
| mit | 7,919,982,118,599,797,000 | 34.121739 | 160 | 0.567405 | false |
sebastienhupin/qxrad | tool/lib/qxjsonrpc/_access.py | 1 | 7491 | #!/usr/bin/python
# -*- coding: ascii -*-
'''==========================================================================
qxjsonrpc - JSON-RPC backend for the qooxdoo JavaScript library
(C) 2007 - Viktor Ferenczi ([email protected]) - Licence: GNU LGPL
-----------------------------------------------------------------------------
This module provides access control constants and decorators.
According to the Apache httpd manual:
http://httpd.apache.org/docs/1.3/howto/auth.html
--- --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---
"Apache has three distinct ways of dealing with the question of whether a
particular request for a resource will result in that resource actually be
returned. These criteria are called Authorization, Authentication, and
Access control.
Authentication is any process by which you verify that someone is who they
claim they are. This usually involves a username and a password, but can
include any other method of demonstrating identity, such as a smart card,
retina scan, voice recognition, or fingerprints. Authentication is
equivalent to showing your drivers license at the ticket counter at
the airport.
Authorization is finding out if the person, once identified, is permitted
to have the resource. This is usually determined by finding out if that
person is a part of a particular group, if that person has paid admission,
or has a particular level of security clearance. Authorization is
equivalent to checking the guest list at an exclusive party, or checking
for your ticket when you go to the opera.
Finally, access control is a much more general way of talking about
controlling access to a web resource. Access can be granted or denied
based on a wide variety of criteria, such as the network address of the
client, the time of day, the phase of the moon, or the browser which the
visitor is using. Access control is analogous to locking the gate at closing
time, or only letting people onto the ride who are more than 48 inches
tall - it's controlling entrance by some arbitrary condition which may or
may not have anything to do with the attributes of the particular visitor."
--- --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---
The JSON-RPC server itself does not provide any fixed authentication or
authorization scheme, however the session support allows easy implementation
of them. Sessions can be started manually or automatically if supported by
the transport currently used.
Authentication should be done by the application (service) before the client
tries to access protected content. Application should start a session and
store data about the authenticated user. Authorization should be based on
session data and application specific authorization rules.
Access control can be used to allow or deny calling of methods. Undecorated
methods cannot be called by the JSON-RPC clients. All externally accessible
methods must be marked with access control decorators. Default decorators
specify basic access rights:
public : any client can call this method
domain : clients from the same domain as the request's server can call
session : clients with a valid session can call this method
fail : the method cannot be called (access denied)
Complex access rights depending on authenticated user, the client's address
or anything else can be added by providing a simple access check function,
such as:
def isAdmin(method, request):
"""Access granted only for administrators"""
session=request.session
if session is None: return False
return session.user.admin
Usage:
@access(isAdmin)
def anAdminMethod(...):
...
Access checkers can be chained by simply using more decorators. Access is
granted if all checkers allow it. For example
@domain
@access(isAdmin)
def anAdminMethodForThisDomain(...):
...
Methods can be temporarily disabled this way (for debugging, etc.):
@fail
@all_other_decorators
def fn(...):
...
This module is NOT intended for direct import. Import symbols from qxjsonrpc.
=========================================================================='''
# Exported symbols
__all__=['MethodAccessibility', 'PassSessionAttributeName',
'getMethodAccessCheckers', 'access', 'public', 'domain', 'session', 'fail']
#============================================================================
# Constants
class MethodAccessibility:
'''Method Accessibility values'''
@staticmethod
def Public(method, request):
return True
@staticmethod
def Domain(method, request):
return request.domain is None or request.domain==request.server.domain
@staticmethod
def Session(method, request):
return request.session is not None
@staticmethod
def Fail(method, request):
return False
# Default accessibility for undecorated methods
default = Fail
#----------------------------------------------------------------------------
# Method attribute names for the access checker list and session passing
MethodAccessCheckersAttributeName='_qxjsonrpc_access_checkers_'
PassSessionAttributeName='_qxjsonrpc_pass_session_'
#----------------------------------------------------------------------------
def getMethodAccessCheckers(method, default=[MethodAccessibility.default]):
'''Get access checker function of the passed method'''
return getattr(method, MethodAccessCheckersAttributeName, default)
#============================================================================
# Function decorators to define method accessibility
def access(access_checker):
'''Generic decorator to define method accessibility.
access_checker=function with args (method, request) that returns True if
access is granted to the passed method for the request specified'''
def f(fn):
# Add access checker list if not defined
if not hasattr(fn, MethodAccessCheckersAttributeName):
setattr(fn, MethodAccessCheckersAttributeName, [])
# Append this checker to the list
getattr(fn, MethodAccessCheckersAttributeName).append(access_checker)
return fn
return f
#----------------------------------------------------------------------------
'''The decorated method may be called from any session, and without any
checking of who the referer is. Access is granted.'''
public=access(MethodAccessibility.Public)
#----------------------------------------------------------------------------
'''The method may only be called by a referer with the same domain value
as the running server. Some transports may not support domains. If
domains are not supported, then access is granted.'''
domain=access(MethodAccessibility.Domain)
#----------------------------------------------------------------------------
'''The referer must match the referer of the very first RPC request
issued during the current session. If sessions are not supported or
cannot be initiated, then access is denied.'''
def session(fn):
setattr(fn, PassSessionAttributeName, True)
return access(MethodAccessibility.Session)(fn)
#----------------------------------------------------------------------------
'''Access is explicitly denied.'''
fail=access(MethodAccessibility.Fail)
#============================================================================
| lgpl-3.0 | -3,126,790,091,060,564,500 | 39.616667 | 79 | 0.641303 | false |
zhaochao/fuel-web | fuel_upgrade_system/fuel_upgrade/fuel_upgrade/engines/raise_error.py | 1 | 1959 | # -*- coding: utf-8 -*-
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from fuel_upgrade.engines.base import UpgradeEngine
from fuel_upgrade import errors
class RaiseErrorUpgrader(UpgradeEngine):
"""The test upgrader intended to use in system tests.
In order to test the rollback feature we used to inject raising error
code in one of our upgraders in-place::
self.fuel_web.modify_python_file(self.env.get_admin_remote(),
"61i \ \ \ \ \ \ \ \ raise errors."
"ExecutedErrorNonZeroExitCode('{0}')"
.format('Some bad error'),
'/var/upgrade/site-packages/'
'fuel_upgrade/engines/'
'openstack.py')
It's a bad design decision which leads to time-to-time falls in tests due
to changes in the upgrader's code. So the class is going to solve this
issue by providing a special upgrader which will always fail.
"""
error_message = 'Something Goes Wrong'
def upgrade(self):
raise errors.FuelUpgradeException(self.error_message)
def rollback(self):
return NotImplemented
def on_success(self):
return NotImplemented
@property
def required_free_space(self):
return {}
| apache-2.0 | 4,153,677,363,062,223,000 | 35.962264 | 78 | 0.617662 | false |
ulikoehler/UliEngineering | UliEngineering/SignalProcessing/Normalize.py | 1 | 3030 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Functions for normalizing signals
"""
import numpy as np
from collections import namedtuple
import scipy.signal
from .Utils import peak_to_peak
__all__ = ["normalize_max", "center_to_zero", "normalize_minmax", "normalize_plusminus_peak"]
NormalizationResult = namedtuple("NormalizationResult", ["data", "factor", "offset"])
def normalize_max(signal):
"""
Normalize signal by dividing by its max value.
Does not perform any offset adjustment.
This approach works well for data that is guaranteed to be
positive and if no offset adjustment is desired.
In case signal has a max of <= 0.0, signal is returned.
For a similar function that also limits the minimum value,
see normalize_plusminus_peak.
Returns
-------
A NormalizationResult() object.
Use .data to access the data
Use .factor to access the factor that signal was divided by
Use .offset to access the offset that was subtracted from signal
"""
if len(signal) == 0:
return NormalizationResult([], 1., 0.)
mx = np.max(signal)
# Avoid divide by zero
if mx <= 0.:
return NormalizationResult(signal, 1., 0.)
return NormalizationResult(signal / mx, mx, 0.)
def normalize_minmax(signal):
"""
Normalize signal by setting its lowest value
to 0.0 and its highest value to 1.0,
keeping all other values.
If signal consists of only zeros, no factor
normalization is applied.
Returns
-------
A NormalizationResult() object.
Use .data to access the data
Use .factor to access the factor that signal was divided by
Use .offset to access the offset that was subtracted from signal
"""
if len(signal) == 0:
return NormalizationResult([], 1., 0.)
mi = np.min(signal)
mx = np.max(signal)
factor = mx - mi
if factor == 0.0:
factor = 1.0
return NormalizationResult((signal - mi) / factor, factor, mi)
def center_to_zero(signal):
"""
Normalize signal by subtracting its mean
Does not perform any factor normalization
Returns
-------
A NormalizationResult() object.
Use .data to access the data
Use .factor to access the factor that signal was divided by
Use .offset to access the offset that was subtracted from signal
"""
mn = np.mean(signal)
return NormalizationResult(signal - mn, 1., mn)
def normalize_plusminus_peak(signal):
"""
Center a signal to zero and normalize so that
- np.max(result) is <= 1.0
- np.min(result) is <= 1.0
Returns
-------
A NormalizationResult() object.
Use .data to access the data
Use .factor to access the factor that signal was divided by
Use .offset to access the offset that was subtracted from signal
"""
norm_res = center_to_zero(signal)
mi = np.min(norm_res.data)
mx = np.max(norm_res.data)
factor = max(mi, mx)
return NormalizationResult(norm_res / factor, factor, norm_res.offset)
| apache-2.0 | -2,281,879,049,683,676,700 | 28.705882 | 93 | 0.662706 | false |
OpenDroneMap/WebODM | app/plugins/functions.py | 1 | 12861 | import os
import sys
import logging
import importlib
import subprocess
import traceback
import platform
import json
import shutil
from functools import reduce
from string import Template
from django.http import HttpResponse
from app.models import Plugin
from app.models import Setting
from django.conf import settings
from app.security import path_traversal_check
logger = logging.getLogger('app.logger')
# Add additional python path to discover plugins
if not settings.MEDIA_ROOT in sys.path:
sys.path.append(settings.MEDIA_ROOT)
def init_plugins():
# Make sure app/media/plugins exists
if not os.path.exists(get_plugins_persistent_path()):
os.mkdir(get_plugins_persistent_path())
# Make sure app/media/plugins is importable as a module
if not os.path.isfile(os.path.join(get_plugins_persistent_path(), "__init__.py")):
try:
with open(os.path.join(get_plugins_persistent_path(), "__init__.py"), 'w') as f:
f.write("\n")
except Exception as e:
logger.warning("Cannot create __init__.py: %s" % str(e))
build_plugins()
sync_plugin_db()
register_plugins()
def sync_plugin_db():
"""
Creates db entries for undiscovered plugins to keep track
of enabled/disabled plugins
"""
if settings.MIGRATING: return
# Erase cache
clear_plugins_cache()
db_plugins = Plugin.objects.all()
fs_plugins = get_plugins()
# Remove plugins that are in the database but not on the file system
for db_plugin in db_plugins:
fs_found = next((fs_plugin for fs_plugin in fs_plugins if db_plugin.name == fs_plugin.get_name()), None)
if not fs_found:
Plugin.objects.filter(name=db_plugin.name).delete()
logger.info("Cleaned [{}] plugin from database (not found in file system)".format(db_plugin.name))
# Add plugins found in the file system, but not yet in the database
for plugin in get_plugins():
# Plugins that have a "disabled" file are disabled
disabled_path = plugin.get_path("disabled")
disabled = os.path.isfile(disabled_path)
_, created = Plugin.objects.get_or_create(
name=plugin.get_name(),
defaults={'enabled': not disabled},
)
if created:
logger.info("Added [{}] plugin to database".format(plugin))
def clear_plugins_cache():
global plugins
plugins = None
def build_plugins():
for plugin in get_plugins():
# Check for package.json in public directory
# and run npm install if needed
if plugin.path_exists("public/package.json") and not plugin.path_exists("public/node_modules"):
logger.info("Running npm install for {}".format(plugin))
try:
npm = "npm"
if platform.system() == "Windows":
npm = "npm.cmd"
subprocess.call([npm, 'install'], cwd=plugin.get_path("public"))
except FileNotFoundError:
logger.warn("npm is not installed, will skip this plugin")
continue
# Check if we need to generate a webpack.config.js
if len(plugin.build_jsx_components()) > 0 and plugin.path_exists('public'):
build_paths = map(lambda p: os.path.join(plugin.get_path('public'), p), plugin.build_jsx_components())
paths_ok = not (False in map(lambda p: os.path.exists, build_paths))
if paths_ok:
wpc_path = os.path.join(settings.BASE_DIR, 'app', 'plugins', 'templates', 'webpack.config.js.tmpl')
with open(wpc_path) as f:
tmpl = Template(f.read())
# Create entry configuration
entry = {}
for e in plugin.build_jsx_components():
entry[os.path.splitext(os.path.basename(e))[0]] = [os.path.join('.', e)]
wpc_content = tmpl.substitute({
'entry_json': json.dumps(entry)
})
with open(plugin.get_path('public/webpack.config.js'), 'w') as f:
f.write(wpc_content)
else:
logger.warning(
"Cannot generate webpack.config.js for {}, a path is missing: {}".format(plugin, ' '.join(build_paths)))
# Check for webpack.config.js (if we need to build it)
if plugin.path_exists("public/webpack.config.js"):
if settings.DEV and webpack_watch_process_count() <= 2:
logger.info("Running webpack with watcher for {}".format(plugin.get_name()))
subprocess.Popen(['webpack-cli', '--watch'], cwd=plugin.get_path("public"))
elif not plugin.path_exists("public/build"):
logger.info("Running webpack for {}".format(plugin.get_name()))
try:
webpack = "webpack-cli"
if platform.system() == "Windows":
webpack = "webpack-cli.cmd"
subprocess.call([webpack], cwd=plugin.get_path("public"))
except FileNotFoundError:
logger.warn("webpack-cli is not installed, plugin will not work")
def webpack_watch_process_count():
count = 0
try:
pids = [pid for pid in os.listdir('/proc') if pid.isdigit()]
for pid in pids:
try:
if "/usr/bin/webpack-cli" in open(os.path.join('/proc', pid, 'cmdline'), 'r').read().split('\0'):
count += 1
except IOError: # proc has already terminated
continue
except:
logger.warning("webpack_watch_process_count is not supported on this platform.")
return count
def register_plugins():
for plugin in get_active_plugins():
try:
plugin.register()
logger.info("Registered {}".format(plugin))
except Exception as e:
disable_plugin(plugin.get_name())
logger.warning("Cannot register {}: {}".format(plugin, str(e)))
def valid_plugin(plugin_path):
initpy_path = os.path.join(plugin_path, "__init__.py")
pluginpy_path = os.path.join(plugin_path, "plugin.py")
manifest_path = os.path.join(plugin_path, "manifest.json")
return os.path.isfile(initpy_path) and os.path.isfile(manifest_path) and os.path.isfile(pluginpy_path)
plugins = None
def get_plugins():
"""
:return: all plugins instances (enabled or not)
"""
# Cache plugins search
global plugins
if plugins != None: return plugins
plugins_paths = get_plugins_paths()
plugins = []
for plugins_path in plugins_paths:
if not os.path.isdir(plugins_path):
continue
for dir in os.listdir(plugins_path):
# Each plugin must have a manifest.json and a plugin.py
plugin_path = os.path.join(plugins_path, dir)
# Do not load test plugin unless we're in test mode
if os.path.basename(plugin_path).endswith('test') and not settings.TESTING:
continue
# Ignore .gitignore
if os.path.basename(plugin_path) == '.gitignore':
continue
# Check plugin required files
if not valid_plugin(plugin_path):
continue
# Instantiate the plugin
try:
try:
if settings.TESTING:
module = importlib.import_module("app.media_test.plugins.{}".format(dir))
else:
module = importlib.import_module("plugins.{}".format(dir))
plugin = (getattr(module, "Plugin"))()
except (ImportError, AttributeError):
module = importlib.import_module("coreplugins.{}".format(dir))
plugin = (getattr(module, "Plugin"))()
# Check version
manifest = plugin.get_manifest()
if 'webodmMinVersion' in manifest:
min_version = manifest['webodmMinVersion']
manifest_path = os.path.join(plugin_path, "manifest.json")
if versionToInt(min_version) > versionToInt(settings.VERSION):
logger.warning(
"In {} webodmMinVersion is set to {} but WebODM version is {}. Plugin will not be loaded. Update WebODM.".format(
manifest_path, min_version, settings.VERSION))
continue
# Skip plugins in blacklist
if plugin.get_name() in settings.PLUGINS_BLACKLIST:
continue
# Skip plugins already added
if plugin.get_name() in [p.get_name() for p in plugins]:
logger.warning("Duplicate plugin name found in {}, skipping".format(plugin_path))
continue
plugins.append(plugin)
except Exception as e:
logger.warning("Failed to instantiate plugin {}: {}".format(dir, e))
return plugins
def get_active_plugins():
if settings.MIGRATING: return []
plugins = []
try:
enabled_plugins = [p.name for p in Plugin.objects.filter(enabled=True).all()]
for plugin in get_plugins():
if plugin.get_name() in enabled_plugins:
plugins.append(plugin)
except Exception as e:
logger.warning("Cannot get active plugins. If running a migration this is expected: %s" % str(e))
return plugins
def get_plugin_by_name(name, only_active=True, refresh_cache_if_none=False):
if only_active:
plugins = get_active_plugins()
else:
plugins = get_plugins()
res = list(filter(lambda p: p.get_name() == name, plugins))
res = res[0] if res else None
if refresh_cache_if_none and res is None:
# Retry after clearing the cache
clear_plugins_cache()
return get_plugin_by_name(name, only_active=only_active, refresh_cache_if_none=False)
else:
return res
def get_current_plugin():
"""
When called from a python module inside a plugin's directory,
it returns the plugin that this python module belongs to
:return: Plugin instance
"""
caller_filename = traceback.extract_stack()[-2][0]
for p in get_plugins_paths():
relp = os.path.relpath(caller_filename, p)
if ".." in relp:
continue
parts = relp.split(os.sep)
if len(parts) > 0:
plugin_name = parts[0]
return get_plugin_by_name(plugin_name, only_active=False)
return None
def get_plugins_paths():
current_path = os.path.dirname(os.path.realpath(__file__))
return [
os.path.abspath(get_plugins_persistent_path()),
os.path.abspath(os.path.join(current_path, "..", "..", "coreplugins")),
]
def get_plugins_persistent_path(*paths):
return path_traversal_check(os.path.join(settings.MEDIA_ROOT, "plugins", *paths), os.path.join(settings.MEDIA_ROOT, "plugins"))
def get_dynamic_script_handler(script_path, callback=None, **kwargs):
def handleRequest(request):
if callback is not None:
template_params = callback(request, **kwargs)
if not template_params:
return HttpResponse("")
else:
template_params = kwargs
with open(script_path) as f:
tmpl = Template(f.read())
try:
return HttpResponse(tmpl.substitute(template_params))
except TypeError as e:
return HttpResponse("Template substitution failed with params: {}. {}".format(str(template_params), e))
return handleRequest
def enable_plugin(plugin_name):
p = get_plugin_by_name(plugin_name, only_active=False)
p.register()
Plugin.objects.get(pk=plugin_name).enable()
return p
def disable_plugin(plugin_name):
p = get_plugin_by_name(plugin_name, only_active=False)
Plugin.objects.get(pk=plugin_name).disable()
return p
def delete_plugin(plugin_name):
Plugin.objects.get(pk=plugin_name).delete()
if os.path.exists(get_plugins_persistent_path(plugin_name)):
shutil.rmtree(get_plugins_persistent_path(plugin_name))
clear_plugins_cache()
def get_site_settings():
return Setting.objects.first()
def versionToInt(version):
"""
Converts a WebODM version string (major.minor.build) to a integer value
for comparison
>>> versionToInt("1.2.3")
100203
>>> versionToInt("1")
100000
>>> versionToInt("1.2.3.4")
100203
>>> versionToInt("wrong")
-1
"""
try:
return sum([reduce(lambda mult, ver: mult * ver, i) for i in zip([100000, 100, 1], map(int, version.split(".")))])
except:
return -1
| mpl-2.0 | 817,419,384,110,945,500 | 34.626039 | 141 | 0.590234 | false |
mganeva/mantid | Framework/PythonInterface/test/python/mantid/api/IPeaksWorkspaceTest.py | 1 | 4825 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
import unittest
from testhelpers import run_algorithm, WorkspaceCreationHelper
from mantid.kernel import V3D
from mantid.geometry import OrientedLattice
from mantid.api import IPeaksWorkspace, IPeak
class IPeaksWorkspaceTest(unittest.TestCase):
"""
Test the python interface to PeaksWorkspace's
"""
def test_interface(self):
""" Rudimentary test to get peak and get/set some values """
pws = WorkspaceCreationHelper.createPeaksWorkspace(1)
self.assertTrue(isinstance(pws, IPeaksWorkspace))
self.assertEqual(pws.getNumberPeaks(), 1)
p = pws.getPeak(0)
# Try a few IPeak get/setters. Not everything.
p.setH(234)
self.assertEqual(p.getH(), 234)
p.setHKL(5,6,7)
self.assertEqual(p.getH(), 5)
self.assertEqual(p.getK(), 6)
self.assertEqual(p.getL(), 7)
hkl = p.getHKL()
self.assertEquals(hkl, V3D(5,6,7))
p.setIntensity(456)
p.setSigmaIntensity(789)
self.assertEqual(p.getIntensity(), 456)
self.assertEqual(p.getSigmaIntensity(), 789)
# Finally try to remove a peak
pws.removePeak(0)
self.assertEqual(pws.getNumberPeaks(), 0)
# Create a new peak at some Q in the lab frame
qlab = V3D(1,2,3)
p = pws.createPeak(qlab, 1.54)
p.getQLabFrame()
self.assertAlmostEquals( p.getQLabFrame().X(), 1.0, 3)
# Now try to add the peak back
pws.addPeak(p)
self.assertEqual(pws.getNumberPeaks(), 1)
# Check that it is what we added to it
p = pws.getPeak(0)
self.assertAlmostEquals( p.getQLabFrame().X(), 1.0, 3)
# Peaks workspace will not be integrated by default.
self.assertTrue(not pws.hasIntegratedPeaks())
def test_createPeakHKL(self):
pws = WorkspaceCreationHelper.createPeaksWorkspace(0, True)
lattice = pws.mutableSample().getOrientedLattice()
# Simple test that the creational method is exposed
p = pws.createPeakHKL([1,1,1])
self.assertTrue(IPeak != None)
def test_peak_setQLabFrame(self):
pws = WorkspaceCreationHelper.createPeaksWorkspace(1, True)
p = pws.getPeak(0)
try:
p.setQLabFrame(V3D(1,1,1))
except Exception:
self.fail("Tried setQLabFrame with one V3D argument")
self.assertAlmostEquals( p.getQLabFrame().X(), 1.0, places=10)
self.assertAlmostEquals( p.getQLabFrame().Y(), 1.0, places=10)
self.assertAlmostEquals( p.getQLabFrame().Z(), 1.0, places=10)
try:
p.setQLabFrame(V3D(1,1,1), 1)
except Exception:
self.fail("Tried setQLabFrame with one V3D argument and a double distance")
self.assertAlmostEquals( p.getQLabFrame().X(), 1.0, places=10)
self.assertAlmostEquals( p.getQLabFrame().Y(), 1.0, places=10)
self.assertAlmostEquals( p.getQLabFrame().Z(), 1.0, places=10)
def test_peak_setQSampleFrame(self):
pws = WorkspaceCreationHelper.createPeaksWorkspace(1, True)
p = pws.getPeak(0)
try:
p.setQSampleFrame(V3D(1,1,1))
except Exception:
self.fail("Tried setQSampleFrame with one V3D argument")
self.assertAlmostEquals( p.getQSampleFrame().X(), 1.0, places=10)
self.assertAlmostEquals( p.getQSampleFrame().Y(), 1.0, places=10)
self.assertAlmostEquals( p.getQSampleFrame().Z(), 1.0, places=10)
try:
p.setQSampleFrame(V3D(1,1,1), 1)
except Exception:
self.fail("Tried setQSampleFrame with one V3D argument and a double distance")
self.assertAlmostEquals( p.getQSampleFrame().X(), 1.0, places=10)
self.assertAlmostEquals( p.getQSampleFrame().Y(), 1.0, places=10)
self.assertAlmostEquals( p.getQSampleFrame().Z(), 1.0, places=10)
def test_setCell_with_column_name(self):
pws = WorkspaceCreationHelper.createPeaksWorkspace(1, True)
pws.setCell("h", 0, 1)
pws.setCell("k", 0, 2)
pws.setCell("l", 0, 3)
pws.setCell("QLab", 0, V3D(1,1,1))
pws.setCell("QSample", 0, V3D(1,1,1))
self.assertEquals(pws.cell("h", 0), 1)
self.assertEquals(pws.cell("k", 0), 2)
self.assertEquals(pws.cell("l", 0), 3)
self.assertEquals(pws.cell("QLab", 0), V3D(1,1,1))
self.assertEquals(pws.cell("QSample", 0), V3D(1,1,1))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 1,478,124,847,016,855,600 | 35.832061 | 90 | 0.632953 | false |
certik/sympy-oldcore | sympy/plotting/tests/test_plotting.py | 1 | 2212 | import sys
sys.path.append(".")
disabled = False
try:
# pyglet requires ctypes > 1.0.0
import ctypes
ctypes_major = int(ctypes.__version__.split('.')[0])
if ctypes_major < 1:
disabled = True
except:
disabled = True
from sympy import *
x,y = symbols('xy')
class TestPlotting:
def __init__(self):
global disabled
self.disabled = disabled
def test_import(self):
from sympy import Plot
def test_plot_2d(self):
from sympy import Plot
p=Plot(x, [x, -5, 5, 4], visible=False)
p.wait_for_calculations()
def test_plot_2d_discontinuous(self):
from sympy import Plot
p=Plot(1/x, [x, -1, 1, 2], visible=False)
p.wait_for_calculations()
def test_plot_3d(self):
from sympy import Plot
p=Plot(x*y, [x, -5, 5, 5], [y, -5, 5, 5], visible=False)
p.wait_for_calculations()
def test_plot_3d_discontinuous(self):
from sympy import Plot
p=Plot(1/x, [x, -3, 3, 6], [y, -1, 1, 1], visible=False)
p.wait_for_calculations()
def test_plot_2d_polar(self):
from sympy import Plot
p=Plot(1/x, [x,-1,1,4], 'mode=polar', visible=False)
p.wait_for_calculations()
def test_plot_3d_cylinder(self):
from sympy import Plot
p=Plot(1/y, [x,0,6.282,4], [y,-1,1,4], 'mode=polar;style=solid', visible=False)
p.wait_for_calculations()
def test_plot_3d_spherical(self):
from sympy import Plot
p=Plot(1, [x,0,6.282,4], [y,0,3.141,4], 'mode=spherical;style=wireframe', visible=False)
p.wait_for_calculations()
def test_plot_2d_parametric(self):
from sympy import Plot
p=Plot(sin(x), cos(x), [x, 0, 6.282, 4], visible=False)
p.wait_for_calculations()
def test_plot_3d_parametric(self):
from sympy import Plot
p=Plot(sin(x), cos(x), x/5.0, [x, 0, 6.282, 4], visible=False)
p.wait_for_calculations()
def _test_plot_log(self):
from sympy import Plot
p=Plot(log(x), [x,0,6.282,4], 'mode=polar', visible=False)
p.wait_for_calculations()
| bsd-3-clause | 2,950,774,211,579,863,600 | 28.30137 | 96 | 0.563291 | false |
bacaldwell/ironic | ironic/tests/unit/drivers/modules/irmc/test_management.py | 1 | 15185 | # Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test class for iRMC Management Driver
"""
import os
import xml.etree.ElementTree as ET
import mock
from ironic.common import boot_devices
from ironic.common import driver_factory
from ironic.common import exception
from ironic.conductor import task_manager
from ironic.drivers.modules import ipmitool
from ironic.drivers.modules.irmc import common as irmc_common
from ironic.drivers.modules.irmc import management as irmc_management
from ironic.drivers import utils as driver_utils
from ironic.tests.unit.conductor import mgr_utils
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.db import utils as db_utils
from ironic.tests.unit.drivers import third_party_driver_mock_specs \
as mock_specs
from ironic.tests.unit.objects import utils as obj_utils
INFO_DICT = db_utils.get_test_irmc_info()
class IRMCManagementTestCase(db_base.DbTestCase):
def setUp(self):
super(IRMCManagementTestCase, self).setUp()
driver_info = INFO_DICT
mgr_utils.mock_the_extension_manager(driver="fake_irmc")
self.driver = driver_factory.get_driver("fake_irmc")
self.node = obj_utils.create_test_node(self.context,
driver='fake_irmc',
driver_info=driver_info)
self.info = irmc_common.parse_driver_info(self.node)
def test_get_properties(self):
expected = irmc_common.COMMON_PROPERTIES
expected.update(ipmitool.COMMON_PROPERTIES)
expected.update(ipmitool.CONSOLE_PROPERTIES)
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertEqual(expected, task.driver.get_properties())
@mock.patch.object(irmc_common, 'parse_driver_info', spec_set=True,
autospec=True)
def test_validate(self, mock_drvinfo):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.management.validate(task)
mock_drvinfo.assert_called_once_with(task.node)
@mock.patch.object(irmc_common, 'parse_driver_info', spec_set=True,
autospec=True)
def test_validate_fail(self, mock_drvinfo):
side_effect = exception.InvalidParameterValue("Invalid Input")
mock_drvinfo.side_effect = side_effect
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.management.validate,
task)
def test_management_interface_get_supported_boot_devices(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
expected = [boot_devices.PXE, boot_devices.DISK,
boot_devices.CDROM, boot_devices.BIOS,
boot_devices.SAFE]
self.assertEqual(sorted(expected), sorted(task.driver.management.
get_supported_boot_devices(task)))
@mock.patch.object(irmc_management.ipmitool, "send_raw", spec_set=True,
autospec=True)
def _test_management_interface_set_boot_device_ok(
self, boot_mode, params, expected_raw_code, send_raw_mock):
send_raw_mock.return_value = [None, None]
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node.properties['capabilities'] = ''
if boot_mode:
driver_utils.add_node_capability(task, 'boot_mode', boot_mode)
self.driver.management.set_boot_device(task, **params)
send_raw_mock.assert_has_calls([
mock.call(task, "0x00 0x08 0x03 0x08"),
mock.call(task, expected_raw_code)])
def test_management_interface_set_boot_device_ok_pxe(self):
params = {'device': boot_devices.PXE, 'persistent': False}
self._test_management_interface_set_boot_device_ok(
None,
params,
"0x00 0x08 0x05 0x80 0x04 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'bios',
params,
"0x00 0x08 0x05 0x80 0x04 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'uefi',
params,
"0x00 0x08 0x05 0xa0 0x04 0x00 0x00 0x00")
params['persistent'] = True
self._test_management_interface_set_boot_device_ok(
None,
params,
"0x00 0x08 0x05 0xc0 0x04 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'bios',
params,
"0x00 0x08 0x05 0xc0 0x04 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'uefi',
params,
"0x00 0x08 0x05 0xe0 0x04 0x00 0x00 0x00")
def test_management_interface_set_boot_device_ok_disk(self):
params = {'device': boot_devices.DISK, 'persistent': False}
self._test_management_interface_set_boot_device_ok(
None,
params,
"0x00 0x08 0x05 0x80 0x08 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'bios',
params,
"0x00 0x08 0x05 0x80 0x08 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'uefi',
params,
"0x00 0x08 0x05 0xa0 0x08 0x00 0x00 0x00")
params['persistent'] = True
self._test_management_interface_set_boot_device_ok(
None,
params,
"0x00 0x08 0x05 0xc0 0x08 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'bios',
params,
"0x00 0x08 0x05 0xc0 0x08 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'uefi',
params,
"0x00 0x08 0x05 0xe0 0x08 0x00 0x00 0x00")
def test_management_interface_set_boot_device_ok_cdrom(self):
params = {'device': boot_devices.CDROM, 'persistent': False}
self._test_management_interface_set_boot_device_ok(
None,
params,
"0x00 0x08 0x05 0x80 0x20 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'bios',
params,
"0x00 0x08 0x05 0x80 0x20 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'uefi',
params,
"0x00 0x08 0x05 0xa0 0x20 0x00 0x00 0x00")
params['persistent'] = True
self._test_management_interface_set_boot_device_ok(
None,
params,
"0x00 0x08 0x05 0xc0 0x20 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'bios',
params,
"0x00 0x08 0x05 0xc0 0x20 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'uefi',
params,
"0x00 0x08 0x05 0xe0 0x20 0x00 0x00 0x00")
def test_management_interface_set_boot_device_ok_bios(self):
params = {'device': boot_devices.BIOS, 'persistent': False}
self._test_management_interface_set_boot_device_ok(
None,
params,
"0x00 0x08 0x05 0x80 0x18 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'bios',
params,
"0x00 0x08 0x05 0x80 0x18 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'uefi',
params,
"0x00 0x08 0x05 0xa0 0x18 0x00 0x00 0x00")
params['persistent'] = True
self._test_management_interface_set_boot_device_ok(
None,
params,
"0x00 0x08 0x05 0xc0 0x18 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'bios',
params,
"0x00 0x08 0x05 0xc0 0x18 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'uefi',
params,
"0x00 0x08 0x05 0xe0 0x18 0x00 0x00 0x00")
def test_management_interface_set_boot_device_ok_safe(self):
params = {'device': boot_devices.SAFE, 'persistent': False}
self._test_management_interface_set_boot_device_ok(
None,
params,
"0x00 0x08 0x05 0x80 0x0c 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'bios',
params,
"0x00 0x08 0x05 0x80 0x0c 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'uefi',
params,
"0x00 0x08 0x05 0xa0 0x0c 0x00 0x00 0x00")
params['persistent'] = True
self._test_management_interface_set_boot_device_ok(
None,
params,
"0x00 0x08 0x05 0xc0 0x0c 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'bios',
params,
"0x00 0x08 0x05 0xc0 0x0c 0x00 0x00 0x00")
self._test_management_interface_set_boot_device_ok(
'uefi',
params,
"0x00 0x08 0x05 0xe0 0x0c 0x00 0x00 0x00")
@mock.patch.object(irmc_management.ipmitool, "send_raw", spec_set=True,
autospec=True)
def test_management_interface_set_boot_device_ng(self, send_raw_mock):
"""uefi mode, next boot only, unknown device."""
send_raw_mock.return_value = [None, None]
with task_manager.acquire(self.context, self.node.uuid) as task:
driver_utils.add_node_capability(task, 'boot_mode', 'uefi')
self.assertRaises(exception.InvalidParameterValue,
self.driver.management.set_boot_device,
task,
"unknown")
@mock.patch.object(irmc_management, 'scci',
spec_set=mock_specs.SCCICLIENT_IRMC_SCCI_SPEC)
@mock.patch.object(irmc_common, 'get_irmc_report', spec_set=True,
autospec=True)
def test_management_interface_get_sensors_data_scci_ok(
self, mock_get_irmc_report, mock_scci):
"""'irmc_sensor_method' = 'scci' specified and OK data."""
with open(os.path.join(os.path.dirname(__file__),
'fake_sensors_data_ok.xml'), "r") as report:
fake_txt = report.read()
fake_xml = ET.fromstring(fake_txt)
mock_get_irmc_report.return_value = fake_xml
mock_scci.get_sensor_data.return_value = fake_xml.find(
"./System/SensorDataRecords")
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node.driver_info['irmc_sensor_method'] = 'scci'
sensor_dict = self.driver.management.get_sensors_data(task)
expected = {
'Fan (4)': {
'FAN1 SYS (29)': {
'Units': 'RPM',
'Sensor ID': 'FAN1 SYS (29)',
'Sensor Reading': '600 RPM'
},
'FAN2 SYS (29)': {
'Units': 'None',
'Sensor ID': 'FAN2 SYS (29)',
'Sensor Reading': 'None None'
}
},
'Temperature (1)': {
'Systemboard 1 (7)': {
'Units': 'degree C',
'Sensor ID': 'Systemboard 1 (7)',
'Sensor Reading': '80 degree C'
},
'Ambient (55)': {
'Units': 'degree C',
'Sensor ID': 'Ambient (55)',
'Sensor Reading': '42 degree C'
}
}
}
self.assertEqual(expected, sensor_dict)
@mock.patch.object(irmc_management, 'scci',
spec_set=mock_specs.SCCICLIENT_IRMC_SCCI_SPEC)
@mock.patch.object(irmc_common, 'get_irmc_report', spec_set=True,
autospec=True)
def test_management_interface_get_sensors_data_scci_ng(
self, mock_get_irmc_report, mock_scci):
"""'irmc_sensor_method' = 'scci' specified and NG data."""
with open(os.path.join(os.path.dirname(__file__),
'fake_sensors_data_ng.xml'), "r") as report:
fake_txt = report.read()
fake_xml = ET.fromstring(fake_txt)
mock_get_irmc_report.return_value = fake_xml
mock_scci.get_sensor_data.return_value = fake_xml.find(
"./System/SensorDataRecords")
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node.driver_info['irmc_sensor_method'] = 'scci'
sensor_dict = self.driver.management.get_sensors_data(task)
self.assertEqual(len(sensor_dict), 0)
@mock.patch.object(ipmitool.IPMIManagement, 'get_sensors_data',
spec_set=True, autospec=True)
def test_management_interface_get_sensors_data_ipmitool_ok(
self,
get_sensors_data_mock):
"""'irmc_sensor_method' = 'ipmitool' specified."""
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node.driver_info['irmc_sensor_method'] = 'ipmitool'
task.driver.management.get_sensors_data(task)
get_sensors_data_mock.assert_called_once_with(
task.driver.management, task)
@mock.patch.object(irmc_common, 'get_irmc_report', spec_set=True,
autospec=True)
def test_management_interface_get_sensors_data_exception(
self,
get_irmc_report_mock):
"""'FailedToGetSensorData Exception."""
get_irmc_report_mock.side_effect = exception.InvalidParameterValue(
"Fake Error")
irmc_management.scci.SCCIInvalidInputError = Exception
irmc_management.scci.SCCIClientError = Exception
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node.driver_info['irmc_sensor_method'] = 'scci'
e = self.assertRaises(exception.FailedToGetSensorData,
self.driver.management.get_sensors_data,
task)
self.assertEqual("Failed to get sensor data for node 1be26c0b-" +
"03f2-4d2e-ae87-c02d7f33c123. Error: Fake Error",
str(e))
| apache-2.0 | 6,273,096,235,012,135,000 | 40.831956 | 78 | 0.581429 | false |
tiborsimko/invenio-workflows | tests/test_workflows.py | 1 | 31366 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for workflows."""
from __future__ import absolute_import
import logging
import random
import time
from flask_registry import ImportPathRegistry
from invenio_testing import InvenioTestCase
TEST_PACKAGES = [
'invenio_workflows',
'demo_package',
]
class WorkflowTasksTestCase(InvenioTestCase):
""" Workflow class for testing."""
def create_registries(self):
"""Create registries for testing."""
from invenio_workflows.registry import WorkflowsRegistry
self.app.extensions['registry']['workflows.tests'] = \
ImportPathRegistry(initial=TEST_PACKAGES)
self.app.extensions['registry']['workflows'] = \
WorkflowsRegistry(
'workflows', app=self.app, registry_namespace='workflows.tests'
)
self.app.extensions['registry']['workflows.actions'] = \
WorkflowsRegistry(
'actions', app=self.app, registry_namespace='workflows.tests'
)
def cleanup_registries(self):
"""Clean registries for testing."""
del self.app.extensions['registry']['workflows.tests']
del self.app.extensions['registry']['workflows']
del self.app.extensions['registry']['workflows.actions']
class WorkflowTasksTestAPI(WorkflowTasksTestCase):
""" Test basic workflow API."""
def setUp(self):
"""Setup tests."""
self.create_registries()
self.test_data = {}
self.id_workflows = []
self.recxml = """<?xml version="1.0" encoding="UTF-8"?>
<OAI-PMH xmlns="http://www.openarchives.org/OAI/2.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/ http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd">
<responseDate>2013-04-03T13:56:49Z</responseDate>
<request verb="ListRecords" from="2013-03-25" metadataPrefix="arXiv" set="physics:astro-ph">http://export.arxiv.org/oai2</request>
<ListRecords>
<record>
<header>
<identifier>oai:arXiv.org:0801.3931</identifier>
<datestamp>2013-03-26</datestamp>
<setSpec>physics:astro-ph</setSpec>
</header>
<metadata>
<arXiv xmlns="http://arxiv.org/OAI/arXiv/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://arxiv.org/OAI/arXiv/ http://arxiv.org/OAI/arXiv.xsd">
<id>0801.3931</id><created>2008-01-25</created><authors><author><keyname>Manos</keyname><forenames>T.</forenames></author><author><keyname>Athanassoula</keyname><forenames>E.</forenames></author></authors><title>Dynamical study of 2D and 3D barred galaxy models</title><categories>astro-ph</categories><comments>8 pages, 3 figures, to appear in the proceedings of the international
conference "Chaos in Astronomy", Athens, Greece (talk contribution)</comments><journal-ref>Chaos in Astronomy Astrophysics and Space Science Proceedings
2009, pp 115-122</journal-ref><doi>10.1007/978-3-540-75826-6_11</doi><abstract> We study the dynamics of 2D and 3D barred galaxy analytical models, focusing
on the distinction between regular and chaotic orbits with the help of the
Smaller ALigment Index (SALI), a very powerful tool for this kind of problems.
We present briefly the method and we calculate the fraction of chaotic and
regular orbits in several cases. In the 2D model, taking initial conditions on
a Poincar\'{e} $(y,p_y)$ surface of section, we determine the fraction of
regular and chaotic orbits. In the 3D model, choosing initial conditions on a
cartesian grid in a region of the $(x, z, p_y)$ space, which in coordinate
space covers the inner disc, we find how the fraction of regular orbits changes
as a function of the Jacobi constant. Finally, we outline that regions near the
$(x,y)$ plane are populated mainly by regular orbits. The same is true for
regions that lie either near to the galactic center, or at larger relatively
distances from it.
</abstract></arXiv>
</metadata>
</record>
</ListRecords>
</OAI-PMH>
"""
def tearDown(self):
""" Clean up created objects."""
from invenio_workflows.models import Workflow
self.delete_objects(
Workflow.get(Workflow.module_name == "unit_tests").all())
self.cleanup_registries()
def test_halt(self):
"""Test halt task."""
from invenio_workflows.registry import workflows
from invenio_workflows.api import start
from invenio_workflows.engine import WorkflowStatus
from invenio_workflows.models import (BibWorkflowObjectLog,
ObjectVersion)
def halt_engine(obj, eng):
return eng.halt("Test")
class HaltTest(object):
workflow = [halt_engine]
workflows['halttest'] = HaltTest
data = [set(('somekey', 'somevalue'))]
eng = start('halttest', data, module_name="unit_tests")
idx, obj = list(eng.getObjects())[0]
self.assertEqual(ObjectVersion.WAITING, obj.version)
self.assertEqual(WorkflowStatus.HALTED, eng.status)
self.assertEqual(0, BibWorkflowObjectLog.get(
id_object=obj.id, log_type=logging.ERROR).count())
def test_halt_in_branch(self):
"""Test halt task when in conditionnal branch."""
from workflow.patterns import IF_ELSE
from invenio_workflows.registry import workflows
from invenio_workflows.api import start
from invenio_workflows.engine import WorkflowStatus
from invenio_workflows.models import (BibWorkflowObjectLog,
ObjectVersion)
def always_true(obj, eng):
return True
def halt_engine(obj, eng):
return eng.halt("Test")
class BranchTest(object):
workflow = [
IF_ELSE(always_true, [halt_engine], [halt_engine])
]
workflows['branchtest'] = BranchTest
data = [set(('somekey', 'somevalue'))]
eng = start('branchtest', data, module_name="unit_tests")
idx, obj = list(eng.getObjects())[0]
self.assertEqual(ObjectVersion.WAITING, obj.version)
self.assertEqual(WorkflowStatus.HALTED, eng.status)
self.assertEqual(0, BibWorkflowObjectLog.get(
id_object=obj.id, log_type=logging.ERROR).count())
def test_object_creation_complete(self):
"""
Test status of object before/after workflow.
When created before calling API, with "high" test-data that will
make the workflow complete.
"""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.engine import WorkflowStatus
from invenio_workflows.api import start
test_object = BibWorkflowObject()
test_object.set_data(20)
test_object.save()
self.assertEqual(ObjectVersion.INITIAL, test_object.version)
self.assertEqual(None, test_object.id_parent)
self.assertEqual(20, test_object.get_data())
engine = start('demo_workflow', [test_object],
module_name="unit_tests")
self.assertEqual(38, test_object.get_data())
self.assertEqual(None, test_object.id_parent)
self.assertEqual(WorkflowStatus.COMPLETED, engine.status)
self.assertEqual(ObjectVersion.COMPLETED, test_object.version)
def test_object_creation_halt(self):
"""Test status of object before/after workflow.
When created before calling API, with "low" test-data that will
make the workflow halt.
"""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start
from invenio_workflows.engine import WorkflowStatus
test_object = BibWorkflowObject()
test_object.set_data(2)
test_object.save()
self.assertEqual(ObjectVersion.INITIAL, test_object.version)
self.assertEqual(None, test_object.id_parent)
self.assertEqual(2, test_object.get_data())
engine = start('demo_workflow', [test_object],
module_name="unit_tests")
self.assertEqual(2, test_object.get_data())
self.assertEqual(ObjectVersion.WAITING, test_object.version)
self.assertEqual(WorkflowStatus.HALTED, engine.status)
def test_workflow_engine_instantiation(self):
"""Check the proper init of the Workflow and BibWorkflowEngine."""
from invenio_workflows.models import Workflow
from invenio_workflows.engine import BibWorkflowEngine
from uuid import uuid1 as new_uuid
test_workflow = Workflow(name='demo_workflow', uuid=new_uuid(),
id_user=0, module_name="Unknown", )
test_workflow_engine = BibWorkflowEngine(name=test_workflow.name,
uuid=test_workflow.uuid)
self.assertEqual(test_workflow.name, test_workflow_engine.name)
def test_workflow_restarts(self):
"""Check if all is well when restarting a workflow several times."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start, continue_oid
from invenio_workflows.engine import WorkflowStatus
test_object = BibWorkflowObject()
random.seed(time.time())
tries = 15
test_object.set_data(tries)
test_object.save()
engine = start('demo_workflow_hardcore', [test_object],
module_name="unit_tests")
for i in range(0, tries):
self.assertEqual(engine.status, WorkflowStatus.HALTED)
for my_object_b in engine.getObjects():
engine = continue_oid(my_object_b[1].id, "restart_task")
self.assertEqual(0, test_object.get_data())
self.assertEqual(ObjectVersion.COMPLETED, test_object.version)
self.assertEqual(WorkflowStatus.COMPLETED, engine.status)
def test_workflow_object_creation(self):
"""Test to see if the right snapshots or object versions are created."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start
initial_data = 22
final_data = 40
test_object = BibWorkflowObject()
test_object.set_data(initial_data)
test_object.save()
workflow = start(workflow_name="demo_workflow",
data=[test_object],
module_name="unit_tests")
# Get parent object of the workflow we just ran
initial_object = BibWorkflowObject.query.filter(
BibWorkflowObject.id_parent == test_object.id).one()
all_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid
).order_by(BibWorkflowObject.id).all()
# There should only be 2 objects (initial, final)
self.assertEqual(2, len(all_objects))
self.assertEqual(test_object.id, initial_object.id_parent)
self.assertEqual(ObjectVersion.INITIAL, initial_object.version)
self.assertEqual(initial_data, initial_object.get_data())
self.assertEqual(final_data, test_object.get_data())
self.assertEqual(ObjectVersion.COMPLETED, test_object.version)
def test_workflow_object_creation_simple(self):
"""Test to see if the right snapshots or object versions are created."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start
initial_data = 22
final_data = 40
workflow = start(workflow_name="demo_workflow",
data=[initial_data],
module_name="unit_tests")
# Get parent object of the workflow we just ran
initial_object = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid,
BibWorkflowObject.id_parent == None).first() # noqa E711
test_object = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid,
BibWorkflowObject.id_parent == initial_object.id).first()
all_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid
).order_by(BibWorkflowObject.id).all()
# There should only be 2 objects (initial, final)
self.assertEqual(2, len(all_objects))
self.assertEqual(test_object.id_parent, initial_object.id)
self.assertEqual(ObjectVersion.COMPLETED, initial_object.version)
self.assertEqual(final_data, initial_object.get_data())
self.assertEqual(initial_data, test_object.get_data())
self.assertEqual(ObjectVersion.INITIAL, test_object.version)
def test_workflow_complex_run(self):
"""Test running workflow with several data objects."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start
self.test_data = [1, 20]
final_data = [1, 38]
workflow = start(workflow_name="demo_workflow",
data=self.test_data,
module_name="unit_tests")
# Get parent objects of the workflow we just ran
objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid,
BibWorkflowObject.id_parent == None # noqa E711
).order_by(BibWorkflowObject.id).all()
# Let's check that we found anything.
# There should only be three objects
self.assertEqual(2, len(objects))
all_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid
).order_by(BibWorkflowObject.id).all()
self.assertEqual(4, len(all_objects))
for obj in objects:
# The child object should have the final or halted version
self.assertTrue(obj.child_objects[0].version in (ObjectVersion.INITIAL,
ObjectVersion.HALTED))
# Making sure the final data is correct
self.assertTrue(obj.get_data() in final_data)
self.assertTrue(obj.child_objects[0].get_data() in self.test_data)
def test_workflow_approve_step(self):
"""Test runnning a record ingestion workflow with a action step."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.engine import WorkflowStatus
from invenio_workflows.api import start
initial_data = 1
workflow = start(workflow_name="demo_workflow_approve",
data=[initial_data],
module_name="unit_tests")
# Get objects of the workflow we just ran
objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid,
BibWorkflowObject.id_parent == None # noqa E711
).order_by(BibWorkflowObject.id).all()
self._check_workflow_execution(objects, initial_data)
all_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid
).order_by(BibWorkflowObject.id).all()
self.assertEqual(2, len(all_objects))
self.assertEqual(WorkflowStatus.HALTED, workflow.status)
current = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid,
BibWorkflowObject.version == ObjectVersion.HALTED
).one()
self.assertEqual(current.get_action(), "approval")
def test_workflow_for_halted_object(self):
"""Test workflow with continuing a halted object."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start, continue_oid
from invenio_workflows.engine import WorkflowStatus
current = BibWorkflowObject()
current.set_data(1)
current.save()
workflow = start(workflow_name="demo_workflow_approve",
data=[current],
module_name="unit_tests")
self.assertEqual(WorkflowStatus.HALTED, workflow.status)
self.assertEqual(ObjectVersion.HALTED, current.version)
workflow = continue_oid(current.id,
module_name="unit_tests")
self.assertEqual(WorkflowStatus.COMPLETED, workflow.status)
self.assertEqual(ObjectVersion.COMPLETED, current.version)
def test_workflow_for_finished_object(self):
"""Test starting workflow with finished object given."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start
from invenio_workflows.engine import WorkflowStatus
current = BibWorkflowObject()
current.set_data(20)
current.save()
workflow = start(workflow_name="demo_workflow",
data=[current],
module_name="unit_tests")
self.assertEqual(WorkflowStatus.COMPLETED, workflow.status)
self.assertEqual(ObjectVersion.COMPLETED, current.version)
self.assertEqual(38, current.get_data())
previous = BibWorkflowObject.query.get(current.id)
workflow_2 = start(workflow_name="demo_workflow",
data=[previous],
module_name="unit_tests")
self.assertEqual(WorkflowStatus.COMPLETED, workflow_2.status)
self.assertEqual(ObjectVersion.COMPLETED, previous.version)
self.assertEqual(56, previous.get_data())
def test_logging_for_workflow_objects_without_workflow(self):
"""Test run a virtual object out of a workflow for test purpose."""
from invenio_workflows.models import (BibWorkflowObject,
BibWorkflowObjectLog,
ObjectVersion)
initial_data = 20
obj_init = BibWorkflowObject(
id_workflow=None,
version=ObjectVersion.INITIAL)
obj_init.set_data(initial_data)
obj_init.save()
err_msg = "This is an error message"
info_msg = "This is an info message"
obj_init.log.info(info_msg)
obj_init.log.error("This is an error message")
# FIXME: loglevels are simply overwritten somewhere in Celery
# even if Celery is not being "used".
#
# This means loglevel.DEBUG is NOT working at the moment!
# debug_msg = "This is a debug message"
# obj_init.log.debug(debug_msg)
obj_init.save()
obj_test = BibWorkflowObjectLog.query.filter(
BibWorkflowObjectLog.id_object == obj_init.id).all()
messages_found = 0
for current_obj in obj_test:
if current_obj.message == info_msg and messages_found == 0:
messages_found += 1
elif current_obj.message == err_msg and messages_found == 1:
messages_found += 1
self.assertEqual(2, messages_found)
def test_workflow_for_running_object(self):
"""Test workflow with running object given and watch it fail."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start_by_oids
from invenio_workflows.errors import WorkflowObjectVersionError
obj_running = BibWorkflowObject()
obj_running.set_data(1234)
obj_running.save(version=ObjectVersion.RUNNING)
try:
start_by_oids(
'demo_workflow', [
obj_running.id], module_name="unit_tests")
except Exception as e:
self.assertTrue(isinstance(e, WorkflowObjectVersionError))
obj_running.delete(e.id_object)
obj_running.delete(obj_running)
obj_running = BibWorkflowObject()
obj_running.set_data(1234)
obj_running.save(version=ObjectVersion.RUNNING)
try:
start_by_oids(
'demo_workflow', [
obj_running.id], module_name="unit_tests")
except Exception as e:
self.assertTrue(isinstance(e, WorkflowObjectVersionError))
obj_running.delete(e.id_object)
obj_running.delete(obj_running)
obj_running = BibWorkflowObject()
obj_running.set_data(1234)
obj_running.save(version=5)
try:
start_by_oids('demo_workflow', [obj_running.id],
module_name="unit_tests")
except Exception as e:
self.assertTrue(isinstance(e, WorkflowObjectVersionError))
obj_running.delete(e.id_object)
obj_running.delete(obj_running)
def test_continue_execution_for_object(self):
"""Test continuing execution of workflow for object given."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start, continue_oid
initial_data = 1
# testing restarting from previous task
init_workflow = start("demo_workflow",
data=[initial_data],
module_name="unit_tests")
obj_halted = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == init_workflow.uuid,
BibWorkflowObject.version == ObjectVersion.WAITING
).first()
self.assertTrue(obj_halted)
self.assertEqual(1, obj_halted.get_data())
# Try to restart, we should halt again actually.
continue_oid(oid=obj_halted.id, start_point="restart_task",
module_name="unit_tests")
self.assertEqual(1, obj_halted.get_data())
self.assertEqual(ObjectVersion.WAITING, obj_halted.version)
# We skip to next part, this should work
continue_oid(oid=obj_halted.id, module_name="unit_tests")
self.assertEqual(19, obj_halted.get_data())
self.assertEqual(ObjectVersion.COMPLETED, obj_halted.version)
# Let's do that last task again, shall we?
continue_oid(oid=obj_halted.id, start_point="restart_prev",
module_name="unit_tests")
self.assertEqual(37, obj_halted.get_data())
self.assertEqual(ObjectVersion.COMPLETED, obj_halted.version)
def test_restart_workflow(self):
"""Test restarting workflow for given workflow id."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start, start_by_wid
initial_data = 1
init_workflow = start(workflow_name="demo_workflow",
data=[initial_data],
module_name="unit_tests")
init_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == init_workflow.uuid
).order_by(BibWorkflowObject.id).all()
self.assertEqual(2, len(init_objects))
restarted_workflow = start_by_wid(wid=init_workflow.uuid,
module_name="unit_tests")
# We expect the same workflow to be re-started
self.assertTrue(init_workflow.uuid == restarted_workflow.uuid)
restarted_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == restarted_workflow.uuid
).order_by(BibWorkflowObject.id).all()
# This time we should only have one more initial object
self.assertEqual(2, len(restarted_objects))
# Last object will be INITIAL
self.assertEqual(ObjectVersion.INITIAL, restarted_objects[1].version)
self.assertEqual(restarted_objects[1].id_parent,
restarted_objects[0].id)
def test_restart_failed_workflow(self):
"""Test restarting workflow for given workflow id."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.engine import WorkflowStatus
from invenio_workflows.api import start, start_by_oids
from invenio_workflows.errors import WorkflowError
initial_data = BibWorkflowObject.create_object()
initial_data.set_data(1)
initial_data.save()
self.assertRaises(
WorkflowError,
start,
workflow_name="demo_workflow_error",
data=[initial_data],
module_name="unit_tests"
)
self.assertEqual(initial_data.version, ObjectVersion.ERROR)
restarted_workflow = start_by_oids("demo_workflow",
oids=[initial_data.id],
module_name="unit_tests")
self.assertEqual(initial_data.version, ObjectVersion.WAITING)
self.assertEqual(restarted_workflow.status, WorkflowStatus.HALTED)
def _check_workflow_execution(self, objects, initial_data):
"""Test correct workflow execution."""
from invenio_workflows.models import ObjectVersion
# Let's check that we found anything. There should only be one object
self.assertEqual(len(objects), 1)
parent_object = objects[0]
# The object should be the inital version
self.assertEqual(ObjectVersion.HALTED, parent_object.version)
# The object should have the inital data
self.assertEqual(initial_data, objects[0].child_objects[0].get_data())
# Fetch final object which should exist
final_object = objects[0].child_objects[0]
self.assertTrue(final_object)
class TestWorkflowTasks(WorkflowTasksTestCase):
"""Test meant for testing the the generic tasks available."""
def setUp(self):
"""Setup tests."""
self.create_registries()
def tearDown(self):
"""Clean up tests."""
from invenio_workflows.models import Workflow
self.delete_objects(
Workflow.get(Workflow.module_name == "unit_tests").all())
self.cleanup_registries()
def test_logic_tasks_restart(self):
"""Test that the logic tasks work correctly when restarted."""
from invenio_workflows.models import BibWorkflowObject
from invenio_workflows.api import (start,
start_by_wid)
test_object = BibWorkflowObject()
test_object.set_data(0)
test_object.save()
# Initial run
workflow = start('demo_workflow_logic', [test_object],
module_name="unit_tests")
self.assertEqual(5, test_object.get_data())
self.assertEqual("lt9", test_object.get_extra_data()["test"])
# Reset before re-starting (reset Iterator data)
workflow.reset_extra_data()
workflow = start_by_wid(workflow.uuid)
self.assertEqual(5, test_object.get_data())
self.assertEqual("lt9", test_object.get_extra_data()["test"])
def test_logic_tasks_continue(self):
"""Test that the logic tasks work correctly when continuing."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import (start,
continue_oid)
from invenio_workflows.engine import WorkflowStatus
test_object = BibWorkflowObject()
test_object.set_data(0)
test_object.save()
workflow = start('demo_workflow_logic', [test_object],
module_name="unit_tests")
self.assertEqual(5, test_object.get_data())
self.assertEqual("lt9", test_object.get_extra_data()["test"])
workflow = continue_oid(test_object.id)
self.assertEqual(6, test_object.get_data())
self.assertEqual("lt9", test_object.get_extra_data()["test"])
workflow = continue_oid(test_object.id)
self.assertEqual(9, test_object.get_data())
self.assertEqual("gte9", test_object.get_extra_data()["test"])
workflow = continue_oid(test_object.id)
self.assertEqual(15, test_object.get_data())
self.assertEqual("gte9", test_object.get_extra_data()["test"])
workflow = continue_oid(test_object.id)
self.assertEqual(ObjectVersion.COMPLETED, test_object.version)
self.assertEqual(WorkflowStatus.COMPLETED, workflow.status)
def test_workflow_without_workflow_object_saved(self):
"""Test that the logic tasks work correctly."""
from invenio_workflows.models import BibWorkflowObject
from invenio_workflows.api import start, start_by_wid
test_object = BibWorkflowObject()
test_object.set_data(0)
test_object.save()
workflow = start(
'demo_workflow_logic',
[test_object],
module_name="unit_tests")
self.assertEqual(5, test_object.get_data())
self.assertEqual("lt9", test_object.get_extra_data()["test"])
start_by_wid(workflow.uuid)
test_object.delete(test_object.id)
def test_workflow_task_results(self):
"""Test the setting and getting of task results."""
from invenio_workflows.models import BibWorkflowObject
test_object = BibWorkflowObject()
test_object.save() # Saving is needed to instantiate default values
test_object.add_task_result("test", {"data": "testing"})
results = test_object.get_tasks_results()
self.assertEqual(len(results.get("test")), 1)
result_item = results.get("test")[0]
self.assertEqual({"data": "testing"},
result_item.get("result"))
self.assertEqual("workflows/results/default.html",
result_item.get("template"))
self.assertEqual("test",
result_item.get("name"))
| gpl-2.0 | 8,674,019,974,697,286,000 | 40.325428 | 381 | 0.62826 | false |
google/fruit | tests/meta/test_algos.py | 2 | 1446 | #!/usr/bin/env python3
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from absl.testing import parameterized
from fruit_test_common import *
COMMON_DEFINITIONS = '''
#define IN_FRUIT_CPP_FILE 1
#include "meta/common.h"
#include <fruit/impl/meta/algos.h>
'''
class TestAlgos(parameterized.TestCase):
def test_HasDuplicates(self):
source = '''
int main() {
AssertNot(HasDuplicates(Vector<>));
AssertNot(HasDuplicates(Vector<Int<0>>));
AssertNot(HasDuplicates(Vector<Int<0>, Int<1>>));
Assert(HasDuplicates(Vector<Int<0>, Int<0>>));
Assert(HasDuplicates(Vector<Int<2>, Int<0>, Int<1>, Int<0>, Int<3>>));
}
'''
expect_success(
COMMON_DEFINITIONS,
source,
locals())
if __name__ == '__main__':
absltest.main()
| apache-2.0 | -6,990,743,023,058,683,000 | 32.627907 | 84 | 0.637621 | false |
hb9kns/PyBitmessage | src/singleinstance.py | 1 | 3449 | #! /usr/bin/env python
import atexit
import errno
from multiprocessing import Process
import os
import sys
import state
try:
import fcntl # @UnresolvedImport
except:
pass
class singleinstance:
"""
Implements a single instance application by creating a lock file at appdata.
This is based upon the singleton class from tendo https://github.com/pycontribs/tendo
which is under the Python Software Foundation License version 2
"""
def __init__(self, flavor_id="", daemon=False):
self.initialized = False
self.counter = 0
self.daemon = daemon
self.lockPid = None
self.lockfile = os.path.normpath(os.path.join(state.appdata, 'singleton%s.lock' % flavor_id))
if not self.daemon and not state.curses:
# Tells the already running (if any) application to get focus.
import bitmessageqt
bitmessageqt.init()
self.lock()
self.initialized = True
atexit.register(self.cleanup)
def lock(self):
if self.lockPid is None:
self.lockPid = os.getpid()
if sys.platform == 'win32':
try:
# file already exists, we try to remove (in case previous execution was interrupted)
if os.path.exists(self.lockfile):
os.unlink(self.lockfile)
self.fd = os.open(self.lockfile, os.O_CREAT | os.O_EXCL | os.O_RDWR | os.O_TRUNC)
except OSError:
type, e, tb = sys.exc_info()
if e.errno == 13:
print 'Another instance of this application is already running'
sys.exit(-1)
print(e.errno)
raise
else:
pidLine = "%i\n" % self.lockPid
os.write(self.fd, pidLine)
else: # non Windows
self.fp = open(self.lockfile, 'a+')
try:
if self.daemon and self.lockPid != os.getpid():
fcntl.lockf(self.fp, fcntl.LOCK_EX) # wait for parent to finish
else:
fcntl.lockf(self.fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
self.lockPid = os.getpid()
except IOError:
print 'Another instance of this application is already running'
sys.exit(-1)
else:
pidLine = "%i\n" % self.lockPid
self.fp.truncate(0)
self.fp.write(pidLine)
self.fp.flush()
def cleanup(self):
if not self.initialized:
return
if self.daemon and self.lockPid == os.getpid():
# these are the two initial forks while daemonizing
try:
if sys.platform == 'win32':
if hasattr(self, 'fd'):
os.close(self.fd)
else:
fcntl.lockf(self.fp, fcntl.LOCK_UN)
except Exception, e:
pass
return
print "Cleaning up lockfile"
try:
if sys.platform == 'win32':
if hasattr(self, 'fd'):
os.close(self.fd)
os.unlink(self.lockfile)
else:
fcntl.lockf(self.fp, fcntl.LOCK_UN)
if os.path.isfile(self.lockfile):
os.unlink(self.lockfile)
except Exception, e:
pass
| mit | -6,075,821,453,669,778,000 | 33.148515 | 101 | 0.52421 | false |
maxolasersquad/orthosie | inventory/api_views.py | 1 | 6690 | # Copyright 2013 Jack David Baucum
#
# This file is part of Orthosie.
#
# Orthosie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Orthosie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Orthosie. If not, see <http://www.gnu.org/licenses/>.
from django.core.exceptions import ObjectDoesNotExist
from rest_framework import viewsets
from rest_framework.decorators import api_view, detail_route
from rest_framework.response import Response
from rest_framework.reverse import reverse
from inventory.serializers import ItemSerializer, GrocerySerializer
from inventory.serializers import ProduceSerializer, VendorSerializer
from inventory.models import Item, Grocery, Produce, Vendor
@api_view(['GET'])
def api_root(request, format=None):
"""
The entry endpoint of our API.
"""
return Response({
'item': reverse('item-list', request=request),
'grocery': reverse('grocery-list', request=request),
'produce': reverse('produce-list', request=request),
'vendor': reverse('vendor-list', request=request)
})
class ItemViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows items to be viewed or edited.
"""
queryset = Item.objects.all()
serializer_class = ItemSerializer
class GroceryViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows groceries to be viewed or edited.
"""
queryset = Grocery.objects.all()
serializer_class = GrocerySerializer
@detail_route(
methods=['post']
)
def update_vendor(self, request, *args, **kwargs):
grocery = self.get_object()
try:
vendor = Vendor.objects.get(name=request.POST['vendor'])
except ObjectDoesNotExist:
vendor = Vendor(name=request.POST['vendor'])
vendor.save()
grocery.vendor = vendor
grocery.save()
grocery = self.get_object()
serializer = self.get_serializer(grocery)
return Response(serializer.data)
@detail_route(
methods=['post']
)
def update_name(self, request, *args, **kwargs):
grocery = self.get_object()
grocery.name = request.POST['name']
grocery.save()
grocery = self.get_object()
serializer = self.get_serializer(grocery)
return Response(serializer.data)
@detail_route(
methods=['post']
)
def update_price(self, request, *args, **kwargs):
grocery = self.get_object()
grocery.price = request.POST['price']
grocery.save()
grocery = self.get_object()
serializer = self.get_serializer(grocery)
return Response(serializer.data)
@detail_route(
methods=['post']
)
def update_taxable(self, request, *args, **kwargs):
grocery = self.get_object()
grocery.taxable = (request.POST['taxable'].lower() == 'true')
grocery.save()
grocery = self.get_object()
serializer = self.get_serializer(grocery)
return Response(serializer.data)
@detail_route(
methods=['post']
)
def update_scalable(self, request, *args, **kwargs):
grocery = self.get_object()
grocery.scalable = (request.POST['scalable'].lower() == 'true')
grocery.save()
grocery = self.get_object()
serializer = self.get_serializer(grocery)
return Response(serializer.data)
class ProduceViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows produce to be viewed or edited.
"""
queryset = Produce.objects.all()
serializer_class = ProduceSerializer
@detail_route(
methods=['post']
)
def update_name(self, request, *args, **kwargs):
produce = self.get_object()
produce.name = request.POST['name']
produce.save()
produce = self.get_object()
serializer = self.get_serializer(produce)
return Response(serializer.data)
@detail_route(
methods=['post']
)
def update_variety(self, request, *args, **kwargs):
produce = self.get_object()
produce.variety = request.POST['variety']
produce.save()
produce = self.get_object()
serializer = self.get_serializer(produce)
return Response(serializer.data)
@detail_route(
methods=['post']
)
def update_size(self, request, *args, **kwargs):
produce = self.get_object()
produce.size = request.POST['size']
produce.save()
produce = self.get_object()
serializer = self.get_serializer(produce)
return Response(serializer.data)
@detail_route(
methods=['post']
)
def update_botanical(self, request, *args, **kwargs):
produce = self.get_object()
produce.botanical = request.POST['botanical']
produce.save()
produce = self.get_object()
serializer = self.get_serializer(produce)
return Response(serializer.data)
@detail_route(
methods=['post']
)
def update_price(self, request, *args, **kwargs):
produce = self.get_object()
produce.price = request.POST['price']
produce.save()
produce = self.get_object()
serializer = self.get_serializer(produce)
return Response(serializer.data)
@detail_route(
methods=['post']
)
def update_taxable(self, request, *args, **kwargs):
produce = self.get_object()
produce.taxable = (request.POST['taxable'].lower() == 'true')
produce.save()
produce = self.get_object()
serializer = self.get_serializer(produce)
return Response(serializer.data)
@detail_route(
methods=['post']
)
def update_scalable(self, request, *args, **kwargs):
produce = self.get_object()
produce.scalable = (request.POST['scalable'].lower() == 'true')
produce.save()
produce = self.get_object()
serializer = self.get_serializer(produce)
return Response(serializer.data)
class VendorViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows vendors to be viewed or edited.
"""
queryset = Vendor.objects.all()
serializer_class = VendorSerializer
| gpl-3.0 | -4,458,075,924,592,598,500 | 30.706161 | 73 | 0.63423 | false |
KevinVDVelden/7DRL_2015 | Game/GameUtil.py | 1 | 17700 | import random
import math
from Util import *
import ECS
import Cheats
import GameComponents
import GameData
import Actions
import Window
def LoadEntities( self ):
self.playerAction = None
def playerAction( __, _, wasBlocked, curTurn ):
if self.playerAction is not None:
char = GameData.Player.getComponent( GameComponents.Character )
if random.random() < 0.0625:
char.attributes[ 'Health' ] = min( char.attributes[ 'Health' ] + 1, char.attributes[ 'baseHealth' ] )
ret = self.playerAction
self.playerAction = None
return ret
GameData.Player = ECS.Entity()
GameData.Player.addComponent( ECS.Components.Position( int( GameData.CenterPos[ 0 ] ), int( GameData.CenterPos[1] ) ) )
GameData.Player.addComponent( GameComponents.Character( GameData.TypeDefinitions['enemy']['player'] ) )
GameData.Player.addComponent( GameComponents.CharacterRenderer( GameData.TypeDefinitions['enemy']['player'] ) )
GameData.Player.addComponent( GameComponents.TurnTaker( playerAction ) )
GameData.Player.isPlayer = True
GameData.PlayerInventory = GameComponents.Inventory( 8 )
GameData.Player.addComponent( GameData.PlayerInventory )
GameData.PlayerInventory.addItem( GameData.TypeDefinitions['item']['item_explosive'], 1 )
def updateInventoryCallback():
GameData.PlayerInventory.inventory[0][1] = 99
GameData.PlayerInventory.updateCallback = updateInventoryCallback
GameData.PlayerInventory.addItem( GameData.TypeDefinitions['item']['item_explosive'], 1 )
GameData.PlayerPosition = GameData.Player.getComponent( ECS.Components.Position )
self.world.addEntity( GameData.Player )
#Drop key in room
key = CreateEntity( 'item_explosive_special' )
key.addComponent( ECS.Components.Position( int( GameData.TileCount[0] / 2 ), int( GameData.TileCount[1] / 2 ) + 3 ) )
self.world.addEntity( key )
#Spawn some enemies
spawnables = []
for n in GameData.TypeDefinitions['enemy_base']:
if n == 'enemy_base':
continue
for count in range( GameData.TypeDefinitions['enemy_base'][n].spawnChance ):
spawnables.append( n )
curSurface = ( GameData.MapGen_CenterRoom_Size[0] * 2 ) * ( GameData.MapGen_CenterRoom_Size[1] * 2 )
curRadius = -1
def setFixedWall( x, y ):
_buffer[ I( int( x ), int( y ) ) ] = TILE_FIXED_WALL
circleNum = 0
while curRadius < GameData.MapGen_MaxCircleRadius:
sectionCount = max( circleNum * GameData.MapGen_CircleSectionsPerLayer, 1 )
nextSurface = curSurface + ( GameData.MapGen_BaseSurface * sectionCount )
nextRadius = int( math.sqrt( nextSurface / math.pi ) )
sectionAngle = math.pi * 2 / sectionCount
def getPointInsection( curSection ):
r = random.randrange( curRadius, nextRadius )
angle = ( curSection + random.random() ) * sectionAngle
return ( int( math.sin( angle ) * r + GameData.CenterPos[0] ), int( math.cos( angle ) * r + GameData.CenterPos[1] ) )
def isPointValid( point ):
return ( GameData.Map.get( *point ) == TILE_AIR
and abs( point[0] - GameData.CenterPos[0] ) > GameData.MapGen_CenterRoom_Size[0]
and abs( point[1] - GameData.CenterPos[1] ) > GameData.MapGen_CenterRoom_Size[1] )
for curSection in range( sectionCount ):
spawnsRemaining = circleNum * GameData.MapGen_MobsPerLevelIncrease
while True:
point = getPointInsection( curSection )
if isPointValid( point ):
key = CreateEntity( 'item_explosive_special' )
key.addComponent( ECS.Components.Position( *point ) )
self.world.addEntity( key )
break
for attempt in range( int( spawnsRemaining * 2.5 ) ):
point = getPointInsection( curSection )
if isPointValid( point ):
spawnsRemaining -= 1
ent = CreateEntity( random.choice( spawnables ) )
ent.getComponent( GameComponents.Character ).onRemove = lambda ent: GameData.IncrementCounter( 'MonsterDeath' )
ent.addComponent( ECS.Components.Position( *point ) )
ent.active = False
self.world.addEntity( ent )
if spawnsRemaining <= 0:
break
curRadius = nextRadius
curSurface = int( curRadius ** 2 * math.pi )
circleNum += 1
#i = -4
#for n in [ 'enemy_slime', 'enemy_ranged_mook_2', 'enemy_ranged_mook_1' ]:
# ent = CreateEntity( n )
# ent.addComponent( ECS.Components.Position( int( GameData.TileCount[0] / 2 ) + i, int( GameData.TileCount[1] / 2 ) - 3 ) )
# self.world.addEntity( ent )
# i += 1
def HandleExplosions( self, explosionList ):
hitTiles = {}
positionMapping = None
for explosionEnt in explosionList:
explosive = explosionEnt.getComponent( GameComponents.Explosive )
if not explosive.isFiring:
continue
if positionMapping is None:
positionMapping = {}
for ent in self.world.getEntityByComponent( ECS.Components.Position, GameComponents.Character ):
pos = ent.getComponent( ECS.Components.Position )
pos = ( pos.x, pos.y )
if pos not in positionMapping:
positionMapping[ pos ] = [ 0 ]
positionMapping[ pos ].append( ent )
explosionPosition = explosionEnt.getComponent( ECS.Components.Position )
explosionPosition = ( explosionPosition.x, explosionPosition.y )
def handleRay( targetX, targetY ):
curExplosionStrength = explosive.strength
lastPos = ( explosionPosition[0], explosionPosition[1] )
def handleBlock( x, y ): #Return value is whether or not to continue
nonlocal curExplosionStrength
if curExplosionStrength <= 0:
return False
curToughness = TileTypes[ GameData.Map.get( x, y ) ].hardness
if curToughness is None: #Unbreakable block
return False
nonlocal lastPos
if abs( lastPos[0] - x ) + abs( lastPos[1] - y ) == 2:
curExplosionStrength -= 7.07106781187 # sqrt(2)*5
else:
curExplosionStrength -= 5
lastPos = ( x, y )
if ( x, y ) in positionMapping:
entList = positionMapping[ ( x, y ) ]
entList[0] += 1
if ( x, y ) in hitTiles:
curToughness = hitTiles[ ( x, y ) ]
else:
hitTiles[ ( x, y ) ] = curToughness
if curExplosionStrength > curToughness:
hitTiles[ ( x, y ) ] = 0
curExplosionStrength -= curToughness
return True
else: # curToughness >= curExplosionStrength
hitTiles[ ( x, y ) ] = curToughness - curExplosionStrength
curExplosionStrength = 0
return False
for n in LineIter( explosionPosition[0], explosionPosition[1], int( explosionPosition[0] + targetX ), int( explosionPosition[1] + targetY ) ):
yield handleBlock( *n )
yield False
rays = []
for i in range( explosive.rayPerSquare ):
s = math.sin( i * math.pi / 2 / explosive.rayPerSquare )
c = math.cos( i * math.pi / 2 / explosive.rayPerSquare )
rays.append( handleRay( s * 200 + 20 * random.random() - 10, c * 200 + 20 * random.random() - 10 ) )
rays.append( handleRay( -s * 200 + 20 * random.random() - 10, c * 200 + 20 * random.random() - 10 ) )
rays.append( handleRay( s * 200 + 20 * random.random() - 10, -c * 200 + 20 * random.random() - 10 ) )
rays.append( handleRay( -s * 200 + 20 * random.random() - 10, -c * 200 + 20 * random.random() - 10 ) )
newRays = []
while len( rays ) > 0:
for n in rays:
if next( n ):
newRays.append( n )
rays = newRays
newRays = []
explosive.onFire()
if positionMapping is not None:
hitEntities = [ n for n in positionMapping.values() if n[0] > 0 ]
for n in hitEntities:
damage = math.sqrt( n[0] )
for hitEnt in n[1:]:
hitEnt.getComponent( GameComponents.Character ).takeDamage( damage )
if len( hitTiles ) > 0:
for tilePos in hitTiles:
if hitTiles[ tilePos ] == 0:
tileType = TileTypes[ GameData.Map.get( tilePos[0], tilePos[1] ) ]
targetType = TILE_AIR
if hasattr( tileType, 'onDestruction' ):
targetType = tileType.onDestruction( *tilePos )
GameData.Map.set( tilePos[0], tilePos[1], targetType )
effect = ECS.Entity()
effect.addComponent( ECS.Components.Position( *tilePos ) )
effect.addComponent( GameComponents.ExplosionRenderer() )
self.world.addEntity( effect )
CreateEntityComponentMapping = {
'item': ( lambda definition, args: GameComponents.Item( *args ) ),
'specialbomb': ( lambda definition, args: GameComponents.SpecialExplosive( *args ) ),
'character': ( lambda definition, _: ( GameComponents.Character( definition ), GameComponents.CharacterRenderer( definition ) ) ),
'baseAI': ( lambda definition, _: GameComponents.TurnTaker( ai = GameComponents.TurnTakerAi() ) ),
'proximity': ( lambda definition, radius: GameComponents.ProximityBomb( radius ) ),
'sticky': ( lambda definition, distance: GameComponents.StickyBomb( distance ) ),
'explosiveRenderer': ( lambda definition, _: GameComponents.BombRenderer( definition ) ),
}
def CreateEntity( definition ):
if isinstance( definition, str ):
definition = GameData.TypeDefinitions[''][ definition ]
if Cheats.Verbose:
print( 'CreateEntity %s' % str( definition ) )
ent = ECS.Entity()
if definition.has( 'explosion_rayStrength' ) and definition.has( 'explosion_rayCount' ):
exp = GameComponents.Explosive( int( definition.explosion_rayCount / 4 ), definition.explosion_rayStrength )
ent.addComponent( exp )
if definition.has( 'explosion_delay' ):
ent.addComponent( GameComponents.TurnTaker( ai = GameComponents.BombAi( definition.explosion_delay ) ) )
if 'item' in definition.baseType:
ent.addComponent( GameComponents.Item( definition ) )
if definition.has( 'components' ):
try:
for comp in definition.components:
factory = CreateEntityComponentMapping[ comp ]
createComps = factory( definition, definition.components[ comp ] )
try:
for createComp in createComps:
ent.addComponent( createComp )
except TypeError:
ent.addComponent( createComps )
except KeyError as e:
print( 'Exception: ' + repr( e ) )
#TODO Make a nice factory set up out of this
if definition.has( 'image' ) and not ent.hasComponent( ECS.Components.Renderer ):
img = GameData.TypeDefinitions['image'][ definition.image ]
ent.addComponent( ECS.Components.Renderer( GameData.AtlasMap[ img.file ], img.key ) )
return ent
def ShowCombineCount( game, recipe, maxCraftable ):
game.actionWindow.guiParts = []
count = 1
def addButton( text, cb ):
nonlocal count
button = Window.Button( LoadFont( 'ButtonFont', '', '' ), text, ( 20, 30 * count ), ( 260, 25 ) )
button.pressCallback = cb
game.actionWindow.guiParts.append( button )
count += 1
def cancel( *_ ):
GameData.PlayerInventory.isDirty = True
addButton( 'Cancel', cancel )
def craft( finalCount ):
inv = GameData.PlayerInventory.inventory
for i in range( finalCount ):
GameData.IncrementCounter( 'SpecialCount' )
if GameData.PlayerInventory.addItem( GameData.TypeDefinitions['item'][recipe.result], 1 ) == 0:
notDropped = dict( [ (n,1) for n in recipe.items ] )
while len( notDropped ) > 0:
try:
for n in inv:
if inv[n][0].name in notDropped:
del notDropped[inv[n][0].name]
GameData.PlayerInventory.dropItem( n, 1 )
except:
pass
addButton( 'Craft 1', lambda *_: craft( 1 ) )
if maxCraftable > 4: addButton( 'Craft %d' % int( maxCraftable / 2 ), lambda *_: craft( int( maxCraftable / 2 )) )
if maxCraftable > 2: addButton( 'Craft %d' % maxCraftable, lambda *_: craft( maxCraftable ) )
def ShowCombineButton( game ):
game.actionWindow.guiParts = []
count = 1
def addButton( text, cb ):
nonlocal count
button = Window.Button( LoadFont( 'ButtonFont', '', '' ), text, ( 20, 30 * count ), ( 260, 25 ) )
button.pressCallback = cb
game.actionWindow.guiParts.append( button )
count += 1
def cancel( *_ ):
GameData.PlayerInventory.isDirty = True
addButton( 'Cancel', cancel )
selected = game.inventorySlot
definition = GameData.PlayerInventory.inventory[selected][0]
defName = definition.name
for recipeName in GameData.TypeDefinitions['recipe']:
recipe = GameData.TypeDefinitions['recipe'][recipeName]
recipeResult = GameData.TypeDefinitions['item'][recipe.result]
maxCount = recipeResult.maxStackSize
typedCount = {}
for name in recipe.items:
typedCount[name] = 0
for i in GameData.PlayerInventory.inventory:
item = GameData.PlayerInventory.inventory[i][0]
if item.name == name:
typedCount[name] += GameData.PlayerInventory.inventory[i][1]
maxCraftable = min( [ typedCount[n] for n in typedCount ] )
if maxCraftable > maxCount:
maxCraftable = maxCount
if maxCraftable > 0:
def getButtonCb( recipe, maxCraftable ):
def buttonCb( *_ ):
return ShowCombineCount( game, recipe, maxCraftable )
return buttonCb
addButton( '%s (max %d)' % ( recipeResult.displayname, maxCraftable ), getButtonCb( recipe, maxCraftable ) )
fontInventoryCount = LoadFont( 'InventoryCount', 'data/framd.ttf', 8 )
def UpdateInventory( game ):
inventory = GameData.PlayerInventory
selected = game.inventorySlot
for i in range( min( inventory.inventorySize, game.hotbar.slotCount ) ):
if i in inventory.inventory:
def renderSlot( screen, pos ):
item = inventory.inventory[i]
if item[0].has( 'image' ):
img = GameData.TypeDefinitions['image'][item[0].image]
GameData.AtlasMap[ img.file ].render( img.key, screen, pos[0] + 2, pos[1] + 6 )
render = RenderFont( fontInventoryCount, str( item[1] ), ( 255, 255, 255 ) )
screen.blit( render, ( pos[0] + 6, pos[1] + 44 - render.get_height() ) )
game.hotbar.updateSlot( i, renderSlot, 2 if i == selected else 1 )
else:
game.hotbar.updateSlot( i, None, 0 )
#Update action window
if selected == -1 or selected not in inventory.inventory:
game.actionWindow.guiParts = []
else:
definition = inventory.inventory[selected][0]
defName = definition.name
game.actionWindow.guiParts = [ Window.Text( LoadFont( 'NameFont', '', '' ), definition.displayname, ( 20, 5 ), ( 260, 25 ) ) ]
count = 1
def addButton( text, cb ):
nonlocal count
button = Window.Button( LoadFont( 'ButtonFont', '', '' ), text, ( 20, 30 * count ), ( 260, 25 ) )
button.pressCallback = cb
game.actionWindow.guiParts.append( button )
count += 1
# Combine button
for recipeName in GameData.TypeDefinitions['recipe']:
recipe = GameData.TypeDefinitions['recipe'][recipeName]
if defName in recipe.items:
addButton( 'Combine', lambda *_: ShowCombineButton( game ) )
break
# Action buttons
if definition.has( 'use' ):
uses = definition.use
if isinstance( uses, str ):
uses = ( uses, )
if 'throw' in uses:
addButton( 'Throw (Click on target)', None )
if 'drop' in uses:
addButton( 'Drop explosive (E)', lambda *_: game.dropItem() )
curCount = inventory.inventory[selected][1]
if curCount < 99 and not ( inventory.inventory[selected][0].has( 'indestructible' ) and inventory.inventory[selected][0].indestructible ):
addButton( 'Destroy item (1)', lambda *_: inventory.dropItem( selected, 1 ) )
if curCount > 10:
addButton( 'Destroy item (%d)' % int( curCount / 2 ), lambda *_: inventory.dropItem( selected, int( curCount / 2 ) ) )
if curCount > 5:
addButton( 'Destroy item (%d)' % curCount, lambda *_: inventory.dropItem( selected, curCount ) )
| gpl-2.0 | -9,095,789,401,380,697,000 | 39.045249 | 154 | 0.584294 | false |
matt-tingen/debug | debug.py | 1 | 3419 | from functools import wraps
from pprint import pprint
class Debug:
enabled = True
allow_now = True
def __init__(self):
class on:
def __init__(self, parent):
self.parent = parent
def __call__(self):
self.parent.enabled = True
def __enter__(self):
self.parent.enabled = True
def __exit__(self, exc_type, exc_value, traceback):
self.parent.enabled = False
class off:
def __init__(self, parent):
self.parent = parent
def __call__(self):
self.parent.enabled = False
def __enter__(self):
self.parent.enabled = False
self.parent.allow_now = False
def __exit__(self, exc_type, exc_value, traceback):
self.parent.enabled = True
self.parent.allow_now = True
self.on = on(self)
self.off = off(self)
def now(self, val, *args, **kwargs):
if self.allow_now:
if callable(val) and not args and not kwargs:
return self._decorate(val, True)
else:
prev_status = self.enabled
self.enabled = True
self.out(val, *args, **kwargs)
self.enabled = prev_status
def out(self, val, *args, **kwargs):
if self.enabled:
if 'pretty' in kwargs and kwargs['pretty']:
p = pprint
else:
p = print
try:
del kwargs['pretty']
except KeyError:
pass
p(val, *args, **kwargs)
def _decorate(self, subject, skip_enable_check):
@wraps(subject)
def print_args_and_return(*args, **kwargs):
if (skip_enable_check or self.enabled) and self.allow_now:
print(subject.__name__, 'called ', end='')
if args or kwargs:
print('with ', end='')
else:
print('without arguments')
if args:
print('args:')
pprint(args)
if kwargs:
print('kwargs:')
pprint(kwargs)
# We have to call the function after the arguments have been
# printed in case any of them get mutated in the function.
return_val = subject(*args, **kwargs)
if return_val:
print('returned:')
pprint(return_val)
if args or kwargs or return_val:
print('')
return return_val
else:
return subject(*args, **kwargs)
return print_args_and_return
def __enter__(self):
self.enabled = True
def __exit__(self, exc_type, exc_value, traceback):
self.enabled = False
def __call__(self, val, *args, **kwargs):
# Apply decorator regardless of `enabled`.
# `enabled` will be checked each time the function is called so `debug`
# can be toggled freely independent of when the decorator is applied.
if callable(val) and not args and not kwargs:
return self._decorate(val, False)
elif self.enabled:
self.out(val, *args, **kwargs)
debug = Debug() | mit | -1,933,733,955,191,477,200 | 28.230769 | 79 | 0.485815 | false |
bolkedebruin/airflow | airflow/utils/log/gcs_task_handler.py | 1 | 7505 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
from urllib.parse import urlparse
from cached_property import cached_property
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.utils.log.file_task_handler import FileTaskHandler
from airflow.utils.log.logging_mixin import LoggingMixin
class GCSTaskHandler(FileTaskHandler, LoggingMixin):
"""
GCSTaskHandler is a python log handler that handles and reads
task instance logs. It extends airflow FileTaskHandler and
uploads to and reads from GCS remote storage. Upon log reading
failure, it reads from host machine's local disk.
"""
def __init__(self, base_log_folder, gcs_log_folder, filename_template):
super().__init__(base_log_folder, filename_template)
self.remote_base = gcs_log_folder
self.log_relative_path = ''
self._hook = None
self.closed = False
self.upload_on_close = True
@cached_property
def hook(self):
"""
Returns GCS hook.
"""
remote_conn_id = conf.get('logging', 'REMOTE_LOG_CONN_ID')
try:
from airflow.providers.google.cloud.hooks.gcs import GCSHook
return GCSHook(
google_cloud_storage_conn_id=remote_conn_id
)
except Exception as e: # pylint: disable=broad-except
self.log.error(
'Could not create a GoogleCloudStorageHook with connection id '
'"%s". %s\n\nPlease make sure that airflow[gcp] is installed '
'and the GCS connection exists.', remote_conn_id, str(e)
)
def set_context(self, ti):
super().set_context(ti)
# Log relative path is used to construct local and remote
# log path to upload log files into GCS and read from the
# remote location.
self.log_relative_path = self._render_filename(ti, ti.try_number)
self.upload_on_close = not ti.raw
def close(self):
"""
Close and upload local log file to remote storage GCS.
"""
# When application exit, system shuts down all handlers by
# calling close method. Here we check if logger is already
# closed to prevent uploading the log to remote storage multiple
# times when `logging.shutdown` is called.
if self.closed:
return
super().close()
if not self.upload_on_close:
return
local_loc = os.path.join(self.local_base, self.log_relative_path)
remote_loc = os.path.join(self.remote_base, self.log_relative_path)
if os.path.exists(local_loc):
# read log and remove old logs to get just the latest additions
with open(local_loc, 'r') as logfile:
log = logfile.read()
self.gcs_write(log, remote_loc)
# Mark closed so we don't double write if close is called twice
self.closed = True
def _read(self, ti, try_number, metadata=None):
"""
Read logs of given task instance and try_number from GCS.
If failed, read the log from task instance host machine.
:param ti: task instance object
:param try_number: task instance try_number to read logs from
:param metadata: log metadata,
can be used for steaming log reading and auto-tailing.
"""
# Explicitly getting log relative path is necessary as the given
# task instance might be different than task instance passed in
# in set_context method.
log_relative_path = self._render_filename(ti, try_number)
remote_loc = os.path.join(self.remote_base, log_relative_path)
try:
remote_log = self.gcs_read(remote_loc)
log = '*** Reading remote log from {}.\n{}\n'.format(
remote_loc, remote_log)
return log, {'end_of_log': True}
except Exception as e: # pylint: disable=broad-except
log = '*** Unable to read remote log from {}\n*** {}\n\n'.format(
remote_loc, str(e))
self.log.error(log)
local_log, metadata = super()._read(ti, try_number)
log += local_log
return log, metadata
def gcs_read(self, remote_log_location):
"""
Returns the log found at the remote_log_location.
:param remote_log_location: the log's location in remote storage
:type remote_log_location: str (path)
"""
bkt, blob = self.parse_gcs_url(remote_log_location)
return self.hook.download(bkt, blob).decode('utf-8')
def gcs_write(self, log, remote_log_location, append=True):
"""
Writes the log to the remote_log_location. Fails silently if no hook
was created.
:param log: the log to write to the remote_log_location
:type log: str
:param remote_log_location: the log's location in remote storage
:type remote_log_location: str (path)
:param append: if False, any existing log file is overwritten. If True,
the new log is appended to any existing logs.
:type append: bool
"""
if append:
try:
old_log = self.gcs_read(remote_log_location)
log = '\n'.join([old_log, log]) if old_log else log
except Exception as e: # pylint: disable=broad-except
if not hasattr(e, 'resp') or e.resp.get('status') != '404': # pylint: disable=no-member
log = '*** Previous log discarded: {}\n\n'.format(str(e)) + log
try:
bkt, blob = self.parse_gcs_url(remote_log_location)
from tempfile import NamedTemporaryFile
with NamedTemporaryFile(mode='w+') as tmpfile:
tmpfile.write(log)
# Force the file to be flushed, since we're doing the
# upload from within the file context (it hasn't been
# closed).
tmpfile.flush()
self.hook.upload(bkt, blob, tmpfile.name)
except Exception as e: # pylint: disable=broad-except
self.log.error('Could not write logs to %s: %s', remote_log_location, e)
@staticmethod
def parse_gcs_url(gsurl):
"""
Given a Google Cloud Storage URL (gs://<bucket>/<blob>), returns a
tuple containing the corresponding bucket and blob.
"""
parsed_url = urlparse(gsurl)
if not parsed_url.netloc:
raise AirflowException('Please provide a bucket name')
else:
bucket = parsed_url.netloc
blob = parsed_url.path.strip('/')
return bucket, blob
| apache-2.0 | 4,151,379,373,028,525,000 | 40.010929 | 104 | 0.619054 | false |
cprov/snapcraft | tests/unit/commands/__init__.py | 1 | 3601 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015-2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
import os
from textwrap import dedent
from click.testing import CliRunner
from snapcraft import storeapi
from snapcraft.cli._runner import run
from tests import fixture_setup, unit
_sample_keys = [
{
"name": "default",
"sha3-384": (
"vdEeQvRxmZ26npJCFaGnl-VfGz0lU2jZZkWp_s7E-RxVCNtH2_mtjcxq2NkDKkIp"
),
},
{
"name": "another",
"sha3-384": (
"JsfToV5hO2eN9l89pYYCKXUioTERrZIIHUgQQd47jW8YNNBskupiIjWYd3KXLY_D"
),
},
]
def get_sample_key(name):
for key in _sample_keys:
if key["name"] == name:
return key
raise KeyError(name)
def mock_snap_output(command, *args, **kwargs):
if command == ["snap", "keys", "--json"]:
return json.dumps(_sample_keys)
elif command[:2] == ["snap", "export-key"]:
if not command[2].startswith("--account="):
raise AssertionError("Unhandled command: {}".format(command))
account_id = command[2][len("--account=") :]
name = command[3]
# This isn't a full account-key-request assertion, but it's enough
# for testing.
return dedent(
"""\
type: account-key-request
account-id: {account_id}
name: {name}
public-key-sha3-384: {sha3_384}
"""
).format(
account_id=account_id, name=name, sha3_384=get_sample_key(name)["sha3-384"]
)
else:
raise AssertionError("Unhandled command: {}".format(command))
class CommandBaseTestCase(unit.TestCase):
def setUp(self):
super().setUp()
self.runner = CliRunner()
def run_command(self, args, **kwargs):
return self.runner.invoke(run, args, catch_exceptions=False, **kwargs)
class LifecycleCommandsBaseTestCase(CommandBaseTestCase):
yaml_template = """name: {step}-test
version: 1.0
summary: test {step}
description: if the {step} is successful the state file will be updated
confinement: strict
grade: stable
parts:
{parts}"""
yaml_part = """ {step}{iter:d}:
plugin: nil"""
def make_snapcraft_yaml(self, step, n=1, yaml_part=None, create=False):
if not yaml_part:
yaml_part = self.yaml_part
parts = "\n".join([yaml_part.format(step=step, iter=i) for i in range(n)])
super().make_snapcraft_yaml(self.yaml_template.format(step=step, parts=parts))
parts = []
for i in range(n):
part_dir = os.path.join(self.parts_dir, "{}{}".format(step, i))
state_dir = os.path.join(part_dir, "state")
parts.append({"part_dir": part_dir, "state_dir": state_dir})
return parts
class StoreCommandsBaseTestCase(CommandBaseTestCase):
def setUp(self):
super().setUp()
self.fake_store = fixture_setup.FakeStore()
self.useFixture(self.fake_store)
self.client = storeapi.StoreClient()
| gpl-3.0 | -9,074,133,153,076,883,000 | 29.260504 | 87 | 0.630103 | false |
anarang/robottelo | tests/foreman/endtoend/utils.py | 1 | 1566 | """Module that aggregates common bits of the end to end tests."""
from robottelo.vm import VirtualMachine
AK_CONTENT_LABEL = u'rhel-6-server-rhev-agent-rpms'
class ClientProvisioningMixin(object):
def client_provisioning(
self, activation_key_name, organization_label,
package_name='python-kitchen'):
"""Provision a Satellite's client.
Do the following:
1. Install Katello CA cert on the client
2. Register the client using Activation Key
3. Install a package on the client served by the Satellite server.
:param activation_key_name: Name of the Activation Key to register.
:param organization_label: Organization label where the Activation Key
is available.
:param package_name: Name of the package to be installed on the client.
"""
with VirtualMachine(distro='rhel66') as vm:
# Pull rpm from Foreman server and install on client
vm.install_katello_ca()
# Register client with foreman server using act keys
result = vm.register_contenthost(
activation_key_name, organization_label)
self.assertEqual(result.return_code, 0)
# Install rpm on client
result = vm.run('yum install -y {0}'.format(package_name))
self.assertEqual(result.return_code, 0)
# Verify that the package is installed by querying it
result = vm.run('rpm -q {0}'.format(package_name))
self.assertEqual(result.return_code, 0)
| gpl-3.0 | 1,101,187,075,575,255,000 | 42.5 | 79 | 0.644317 | false |
NoYouShutup/CryptMeme | CryptMeme/apps/sam/Demos/streamTests/samOut.py | 1 | 2837 | #!/usr/bin/python
# open a I2P stream destination
# then open another stream that connects to the destination created by samForward.py or samIn.py
# then send bytes through the stream
# usage :
# ./samOut.py [ silent [ sessionName ] ]
#
# silent : should the first incoming after the connection request contain the connection status message (true or false)
# sessionName : session id (default : "forward")
import socket
import sys
import time
if len(sys.argv)>=2 :
silent = " SILENT="+sys.argv[1]
else : silent = " SILENT=false"
if len(sys.argv)>=3 :
name = " ID="+sys.argv[2]
else : name = " ID=testOutStream"
sess = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
sess.connect(("127.0.0.1",7656));
sess.send("HELLO VERSION MIN=3.0 MAX=3.0\n")
sys.stdout.write(sess.recv(1000))
sess.send("SESSION CREATE STYLE=STREAM"+name+" DESTINATION=EYUpJFeW9tiubXR0aOjvCJ~ndj3xN0Wn-ljuGdbpOEttPg7nj0VCTOQDJ~FAolzn9FIDdmR3VjM0OFFDT46Q5HN4vShXFE2VNC8e3~GjzxJfaJhijRC2R9oIOzsNlzKtInD2o9lh0PxPioNMCigwmgWuqlQHs4tjWeaYRAtooHxbrtuoCIhIdGfyVV-nAcPiyYbouKq3leETXE~4kBXm-LfWfyPtrv6OuDk3GBVVcthv19GYBmnl2YI8HpJjc-G-TvNkgYishjzIJyEW-Xrpy43R4ZBXlyQqnheGLlbOEY8NLDbyNHLRMMOGbcr~67SVE3Iw3RqQ3Dhrkq2FCaQwcDucfIUCCbOfCZgu0hlnCkS42xsUvegQeiwMxbdI~h9v7vcR3yFFOrHX6WQvIZSbFLKNGArGJcfmOJVLqw1wTC4AgYXjk3csVDPd-QWbMXOuodyBgrg27Ds2BBYTsVXWskoo6ASsMIQZ6jMfL7PkY9dPLCRParIyzb9aPmf~MntNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABHNqwgkhJnBW4ymaRsdVmITAha-ff0UiALfKSlznqp5HcSewgMHbzQ0I01TQytFnW\n")
sys.stdout.write(sess.recv(1000))
sock = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("127.0.0.1",7656));
sock.send("HELLO VERSION MIN=3.0 MAX=3.0\n")
sys.stdout.write(sock.recv(1000))
sock.send("STREAM CONNECT"+name+" DESTINATION=tYhjbFlFL38WFuO5eCzTvE0UBr4RfaqWMKlekGeMoB-Ouz7nYaWfiS-9j3jMiZT7FH~pwdmoSREOs2ZbXK84sR59P~pPfeCMxnJrk57f3U9uKzXkesjkKWYco3YAGs-G8sw8Fu2FBx0Do57yBdA9~j8Zq6pMjmgPBXCLuXG3vo0Z8zUWCjApJyFY6OXYopHck9Fz9vKy7YhC6zXFHfEuNHVkAooduiLd~aCoGij0TW3lH2rTVU-lx-DUdi6edxQ5-RvDNkXfikvytoCpRkivbNVytjCJLk~7RNU4FpBD20wTZWNJmEG3OY3cjNjawJVFdNjtgczh9K7gZ7ad-NjVjZVhXEj1lU8mk~vAH-2QE5om8dstWUwWoNDwmVDlvIJNKzQmahG~VrpFexFHXO0n3fKIXcSgWGOHDExM8w9neCt7AxUjxPDtXXuYNW~bRwcfiL-C9~z4K9rmwiTPZX0lmsToSXTF28l7WAoj~TMT9kZAjQeFRRWU5oW5oxVuonVvAAAA"+silent+"\n")
# wait for acknowledgement before sending data, if we asked for it
if (silent==" SILENT=false") :
sys.stdout.write(sock.recv(1000))
for i in range(1,11) :
sock.send(str(i)+'\n')
buf=sock.recv(1000)
sys.stdout.write(str(i)+' '+buf)
if not buf : break
print
| mit | -655,332,114,216,717,400 | 53.557692 | 947 | 0.827282 | false |
mahabs/nitro | nssrc/com/citrix/netscaler/nitro/resource/config/ca/capolicy_binding.py | 1 | 3874 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class capolicy_binding(base_resource):
""" Binding class showing the resources that can be bound to capolicy_binding.
"""
def __init__(self) :
self._name = ""
self.capolicy_csvserver_binding = []
self.capolicy_lbvserver_binding = []
self.capolicy_caglobal_binding = []
@property
def name(self) :
"""Name of the content adaptation policy to be displayed.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of the content adaptation policy to be displayed.
"""
try :
self._name = name
except Exception as e:
raise e
@property
def capolicy_csvserver_bindings(self) :
"""csvserver that can be bound to capolicy.
"""
try :
return self._capolicy_csvserver_binding
except Exception as e:
raise e
@property
def capolicy_caglobal_bindings(self) :
"""caglobal that can be bound to capolicy.
"""
try :
return self._capolicy_caglobal_binding
except Exception as e:
raise e
@property
def capolicy_lbvserver_bindings(self) :
"""lbvserver that can be bound to capolicy.
"""
try :
return self._capolicy_lbvserver_binding
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(capolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.capolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(self, service, name) :
""" Use this API to fetch capolicy_binding resource.
"""
try :
if type(name) is not list :
obj = capolicy_binding()
obj.name = name
response = obj.get_resource(service)
else :
if name and len(name) > 0 :
obj = [capolicy_binding() for _ in range(len(name))]
for i in range(len(name)) :
obj[i].name = name[i];
response[i] = obj[i].get_resource(service)
return response
except Exception as e:
raise e
class capolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.capolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.capolicy_binding = [capolicy_binding() for _ in range(length)]
| apache-2.0 | 4,032,117,685,726,313,000 | 27.910448 | 118 | 0.697212 | false |
commonsense/divisi2 | setup.py | 1 | 3743 | #!/usr/bin/env python
"""Divisi2: Commonsense Reasoning over Semantic Networks
Divisi2 is a library for reasoning by analogy and association over
semantic networks, including common sense knowledge. Divisi uses a
sparse higher-order SVD and can help find related concepts, features,
and relation types in any knowledge base that can be represented as a
semantic network. By including common sense knowledge from ConceptNet,
the results can include relationships not expressed in the original
data but related by common sense. See http://divisi.media.mit.edu/ for
more info."""
VERSION = "2.2.5"
try:
from setuptools import setup, Extension
except ImportError:
from distutils.core import setup, Extension
import os.path, sys
from stat import ST_MTIME
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: C',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Topic :: Scientific/Engineering',
'Topic :: Software Development',
'Topic :: Text Processing :: Linguistic',]
### Check for the existence of NumPy.
try:
import numpy
except ImportError:
print >>sys.stderr, """This package requires NumPy.
On a Debian / Ubuntu system, you can run:
sudo apt-get install python-numpy python-dev
Otherwise it will probably suffice to:
sudo easy_install numpy
"""
sys.exit(1)
CYTHON_OUT = 'svdlib/_svdlib.c'
CYTHON_SRC = 'svdlib/_svdlib.pyx'
### Update the Cython file, if necessary.
def get_modification_time(filename):
return os.stat(filename)[ST_MTIME]
try:
if not os.path.exists(CYTHON_OUT) or get_modification_time(CYTHON_SRC) > get_modification_time(CYTHON_OUT):
try:
# Try building the Cython file
print 'Building Cython source'
from Cython.Compiler.Main import compile
res = compile(CYTHON_SRC)
if res.num_errors > 0:
print >>sys.stderr, "Error building the Cython file."
sys.exit(1)
except ImportError:
print >>sys.stderr, 'Warning: Skipped building the Cython file.'
print >>sys.stderr, ' The svdlib source file is more recent than the Cython output file, but'
print >>sys.stderr, ' you seem to lack Cython, so skipping rebuilding it.'
raw_input('Press Enter to acknowledge. ')
except OSError:
print >>sys.stderr, 'Warning: Skipped building the Cython file.'
svdlibc = Extension(
name='divisi2._svdlib',
sources=[
CYTHON_OUT,
'svdlib/svdwrapper.c',
'svdlib/las2.c',
'svdlib/svdlib.c',
'svdlib/svdutil.c',
],
include_dirs=[numpy.get_include(), 'svdlib'],
extra_compile_args=['-g'],
extra_link_args=['-g'],
)
doclines = __doc__.split("\n")
setup(
name="Divisi2",
version = VERSION,
maintainer='MIT Media Lab, Software Agents group',
maintainer_email='[email protected]',
url='http://divisi.media.mit.edu/',
license = "http://www.gnu.org/copyleft/gpl.html",
platforms = ["any"],
description = doclines[0],
classifiers = classifiers,
long_description = "\n".join(doclines[2:]),
ext_modules = [svdlibc],
packages=['divisi2', 'divisi2.algorithms', 'divisi2.test', 'divisi2.test.eval'],
package_data = {'divisi2': ['data/graphs/*', 'data/eval/*', 'data/matrices/*']},
install_requires=['csc-utils >= 0.6.1', 'networkx', 'csc-pysparse'],
)
| gpl-3.0 | 481,974,323,982,936 | 33.33945 | 111 | 0.662303 | false |
coderjames/pascal | quex-0.63.1/quex/engine/generator/mega_state/core.py | 1 | 11790 | from quex.blackboard import setup as Setup, E_StateIndices
from quex.engine.analyzer.mega_state.template.state import TemplateState
from quex.engine.analyzer.mega_state.path_walker.state import PathWalkerState
from quex.engine.analyzer.mega_state.core import MegaState_Target_DROP_OUT
from quex.engine.generator.state.transition.code import TextTransitionCode
import quex.engine.generator.state.drop_out as drop_out_coder
import quex.engine.generator.state.entry as entry_coder
import quex.engine.generator.state.transition.core as transition_block
import quex.engine.generator.mega_state.template as template
import quex.engine.generator.mega_state.path_walker as path_walker
from quex.engine.generator.languages.address import get_label, get_address
from quex.engine.generator.languages.variable_db import variable_db
from quex.engine.interval_handling import Interval
import sys
class Handler:
def __init__(self, TheState):
if isinstance(TheState, PathWalkerState):
self.require_data = path_walker.require_data
self.framework = path_walker.framework
self.state_key_str = "path_iterator - path_walker_%s_path_base" % TheState.index
self.debug_drop_out_str = "__quex_debug_path_walker_drop_out(%i, path_walker_%s_path_base, path_iterator);\n" \
% (TheState.index, TheState.index)
elif isinstance(TheState, TemplateState):
self.require_data = template.require_data
self.framework = template.framework
self.state_key_str = "state_key"
self.debug_drop_out_str = "__quex_debug_template_drop_out(%i, state_key);\n" % TheState.index
else:
assert False
self.state = TheState
def debug_info_map_state_key_to_state_index(self, txt):
txt.append("# define __QUEX_DEBUG_MAP_STATE_KEY_TO_STATE(X) ( \\\n")
for state_index in self.state.implemented_state_index_list()[:-1]:
state_key = self.state.map_state_index_to_state_key(state_index)
txt.append(" (X) == %i ? %i : \\\n" % (state_key, state_index))
state_index = self.state.implemented_state_index_list()[-1]
state_key = self.state.map_state_index_to_state_key(state_index)
txt.append(" (X) == %i ? %i : 0)" % (state_key, state_index))
if isinstance(self.state, PathWalkerState):
txt.append("\n# define __QUEX_DEBUG_MAP_PATH_BASE_TO_PATH_ID(PB) ( \\\n")
for path_id in xrange(len(self.state.path_list) - 1):
txt.append(" (PB) == path_walker_%i_path_%i ? %i : \\\n" \
% (self.state.index, path_id, path_id))
path_id = len(self.state.path_list) - 1
txt.append(" (PB) == path_walker_%i_path_%i ? %i : 0)" \
% (self.state.index, path_id, path_id))
def debug_info_undo_map_state_key_to_state_index(self, txt):
txt.append("\n# undef __QUEX_DEBUG_MAP_STATE_KEY_TO_STATE\n")
if isinstance(self.state, PathWalkerState):
txt.append("# undef __QUEX_DEBUG_MAP_PATH_BASE_TO_PATH_ID\n")
def do(txt, TheState, TheAnalyzer):
specific = Handler(TheState)
specific.debug_info_map_state_key_to_state_index(txt)
# (*) Entry _______________________________________________________________
entry_coder.do(txt, TheState, TheAnalyzer)
# (*) Access input character etc. _________________________________________
specific.framework(txt, TheState, TheAnalyzer)
# (*) Transition Map ______________________________________________________
prepare_transition_map(TheState, TheAnalyzer, specific.state_key_str)
transition_block.do(txt,
TheState.transition_map,
TheState.index,
TheAnalyzer.engine_type,
TheState.init_state_f,
TheAnalyzer = TheAnalyzer)
# (*) Drop Out ____________________________________________________________
drop_out_scheme_implementation(txt, TheState, TheAnalyzer,
specific.state_key_str,
specific.debug_drop_out_str)
# (*) Request necessary variable definition _______________________________
specific.require_data(TheState, TheAnalyzer)
specific.debug_info_undo_map_state_key_to_state_index(txt)
return
def drop_out_scheme_implementation(txt, TheState, TheAnalyzer, StateKeyString, DebugString):
"""DropOut Section:
The drop out section is the place where we come if the transition map
does not trigger to another state. We also land here if the reload fails.
The routing to the different drop-outs of the related states happens by
means of a switch statement, e.g.
_4711: /* Address of the drop out */
switch( state_key ) {
case 0:
... drop out of state 815 ...
case 1:
... drop out of state 541 ...
}
The switch statement is not necessary if all drop outs are the same,
of course.
"""
LanguageDB = Setup.language_db
# (*) Central Label for the Templates Drop Out
# (The rules for having or not having a label here are complicated,
# so rely on the label's usage database.)
txt.append("%s:\n" % get_label("$drop-out", TheState.index))
txt.append(" %s\n" % DebugString)
def implement_prototype(StateIndices, TheAnalyzer):
# There **must** be at least one element, at this point in time
assert len(StateIndices) != 0
prototype_i = StateIndices.__iter__().next()
prototype = TheAnalyzer.state_db[prototype_i]
result = []
drop_out_coder.do(result, prototype, TheAnalyzer, \
DefineLabelF=False, MentionStateIndexF=False)
return result
# (*) Drop Out Section(s)
if TheState.drop_out.uniform_f:
# uniform drop outs => no 'switch-case' required
txt.extend(implement_prototype(TheState.implemented_state_index_list(), TheAnalyzer))
return
# non-uniform drop outs => route by 'state_key'
case_list = []
for drop_out, state_index_set in TheState.drop_out.iteritems():
# state keys related to drop out
state_key_list = map(lambda i: TheState.map_state_index_to_state_key(i), state_index_set)
# drop out action
# Implement drop-out for each state key. 'state_key_list' combines
# states that implement the same drop-out behavior. Same drop-outs
# are implemented only once.
case_list.append( (state_key_list, implement_prototype(state_index_set, TheAnalyzer)) )
case_txt = LanguageDB.SELECTION(StateKeyString, case_list)
LanguageDB.INDENT(case_txt)
txt.extend(case_txt)
def prepare_transition_map(TheState, TheAnalyzer, StateKeyStr):
"""Generate targets in the transition map which the code generation can
handle. The transition map will consist of pairs of
(Interval, TextTransitionCode)
objects.
NOTE: A word about the reload procedure.
Reload can end either with success (new data has been loaded), or failure
(no more data available). In case of success the **only** the transition
step has to be repeated. Nothing else is effected. Stored positions are
adapted automatically.
By convention we redo the transition map, in case of reload success and
jump to the state's drop-out in case of failure. There is no difference
here in the template state example.
"""
# Transition map of the 'skeleton'
if TheState.transition_map_empty_f:
# Transition Map Empty:
# This happens, for example, if there are only keywords and no
# 'overlaying' identifier pattern. But, in this case also, there
# must be something that catches the 'buffer limit code'.
# => Define an 'all drop out' trigger_map, and then later
# => Adapt the trigger map, so that the 'buffer limit' is an
# isolated single interval.
TheState.transition_map = [ (Interval(-sys.maxint, sys.maxint), MegaState_Target_DROP_OUT) ]
for i, info in enumerate(TheState.transition_map):
interval, target = info
new_target = prepare_target(target, TheState, TheAnalyzer.state_db, StateKeyStr)
TheState.transition_map[i] = (interval, new_target)
return
def prepare_target(Target, TheState, StateDB, StateKeyStr):
LanguageDB = Setup.language_db
if Target.drop_out_f:
code = LanguageDB.GOTO_DROP_OUT(TheState.index)
return E_StateIndices.DROP_OUT
elif Target.target_state_index is not None:
# NOTE: Not all transitions of from 'x' to 'Target.target_state_index' may
# be relevant. For example, if the transition lies on a uniform path
# which is implemented by the MegaState. The MegaState indicates
# the irrelevance by deleting the transition_id.
# HOWEVER: If no transition_id is found, then transition_map is erroneous!
for from_state_index in TheState.implemented_state_index_list():
target_entry = StateDB[Target.target_state_index].entry
door_id = target_entry.get_door_id(Target.target_state_index, from_state_index)
if door_id is not None:
return TextTransitionCode([LanguageDB.GOTO_BY_DOOR_ID(door_id)])
else:
assert False, "TransitionID was not resolved in target state's entry."
elif Target.target_door_id is not None:
return TextTransitionCode([LanguageDB.GOTO_BY_DOOR_ID(Target.target_door_id)])
elif Target.scheme is not None:
label = "template_%i_target_%i[%s]" % (TheState.index, Target.scheme_id, StateKeyStr)
code = LanguageDB.GOTO_BY_VARIABLE(label)
require_scheme_variable(Target.scheme_id, Target.scheme, TheState, StateDB)
return TextTransitionCode([code])
else:
assert False
def require_scheme_variable(SchemeID, Scheme, TState, StateDB):
"""Defines the transition targets for each involved state. Note, that recursion
is handled as part of the general case, where all involved states target
a common door of the template state.
"""
LanguageDB = Setup.language_db
def get_code(AdrList):
return "".join(["{ "] + map(lambda adr: "QUEX_LABEL(%i), " % adr, AdrList) + [" }"])
assert len(Scheme) == len(TState.implemented_state_index_list())
def address(Target, StateKey, TheState):
if Target == E_StateIndices.DROP_OUT:
# All drop outs end up at the end of the transition map, where
# it is routed via the state_key to the state's particular drop out.
return get_address("$drop-out", TState.index, U=True, R=True)
else:
from_state_index = TheState.map_state_key_to_state_index(StateKey)
door_id = StateDB[Target].entry.get_door_id(Target, FromStateIndex=from_state_index)
return LanguageDB.ADDRESS_BY_DOOR_ID(door_id)
address_list = [ address(target_index, i, TState) for i, target_index in enumerate(Scheme) ]
variable_db.require_array("template_%i_target_%i",
ElementN = len(TState.implemented_state_index_list()),
Initial = get_code(address_list),
Index = (TState.index, SchemeID))
| bsd-2-clause | -6,559,487,895,619,935,000 | 47.122449 | 123 | 0.613401 | false |
roghu/py3_projects | tests/Numbers/test_credit_card_validator.py | 1 | 1051 | import pytest
from src.Numbers.credit_card_validator import CardValidator, CardError
class TestCreditCard:
def test_valiator_success(self):
card = CardValidator(4556821195558787)
assert card.__str__() == "************8787"
assert card.__repr__() == "<Credit Card: ************8787>"
def test_37_prefix_success(self):
card = CardValidator(372898003509418)
assert card.__str__() == "************418"
assert card.__repr__() == "<Credit Card: ************418>"
def test_length_failure(self):
with pytest.raises(CardError) as ex:
CardValidator(4000)
assert "'Card is invalid.'" in str(ex.value)
def test_luhn_number_failure(self):
with pytest.raises(CardError) as ex:
CardValidator(4123456789012345)
assert "'Card is invalid.'" in str(ex.value)
def test_number_prefix_failure(self):
with pytest.raises(CardError) as ex:
CardValidator(1000000000000000)
assert "'Card is invalid.'" in str(ex.value)
| mit | -7,047,360,668,135,310,000 | 34.033333 | 70 | 0.603235 | false |
scitokens/scitokens | tests/test_scitokens.py | 1 | 12413 |
"""
Test cases for the Validator and Enforcer classes from the scitokens module.
"""
import os
import sys
import time
import unittest
import cryptography.hazmat.backends
import cryptography.hazmat.primitives.asymmetric.rsa
# Allow unittests to be run from within the project base.
if os.path.exists("src"):
sys.path.append("src")
if os.path.exists("../src"):
sys.path.append("../src")
import scitokens
class TestValidation(unittest.TestCase):
"""
Tests related to the Validator object.
"""
def test_valid(self):
"""
Basic unit test coverage of the Validator object.
"""
def always_accept(value):
"""
A validator that accepts any value.
"""
if value or not value:
return True
validator = scitokens.Validator()
validator.add_validator("foo", always_accept)
token = scitokens.SciToken()
token["foo"] = "bar"
self.assertTrue(validator.validate(token))
self.assertTrue(validator(token))
class TestEnforcer(unittest.TestCase):
"""
Unit tests for the SciToken's Enforcer object.
"""
_test_issuer = "https://scitokens.org/unittest"
@staticmethod
def always_accept(value):
if value or not value:
return True
def setUp(self):
"""
Setup a sample token for testing the enforcer.
"""
now = time.time()
private_key = cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
backend=cryptography.hazmat.backends.default_backend()
)
self._token = scitokens.SciToken(key=private_key)
self._token["foo"] = "bar"
self._token["iat"] = int(now)
self._token["exp"] = int(now + 600)
self._token["iss"] = self._test_issuer
self._token["nbf"] = int(now)
# Scitoken v2
self._token2 = scitokens.SciToken(key=private_key)
self._token2["ver"] = "scitoken:2.0"
self._token2["foo"] = "bar"
self._token2["iat"] = int(now)
self._token2["exp"] = int(now + 600)
self._token2["iss"] = self._test_issuer
self._token2["nbf"] = int(now)
self._token2['wlcg.groups'] = ['groupA', 'groupB']
self._token2["aud"] = "ANY"
def test_enforce_v2(self):
"""
Test the Enforcer object for profile scitokens:2.0
Also, there is a non-validated attribute, foo. In 1.0, non-validated attributes
cause a validation error. In 2.0, they are ignored.
"""
with self.assertRaises(scitokens.scitokens.EnforcementError):
print(scitokens.Enforcer(None))
# No audience specified
enf = scitokens.Enforcer(self._test_issuer)
self.assertFalse(enf.test(self._token2, "read", "/"), msg=enf.last_failure)
self._token2["scp"] = "read:/"
self.assertFalse(enf.test(self._token2, "read", "/"), msg=enf.last_failure)
# Token is set to to ANY, so any audience will work
enf = scitokens.Enforcer(self._test_issuer, audience = "https://example.unl.edu")
self._token2["scp"] = "read:/"
self.assertTrue(enf.test(self._token2, "read", "/"), msg=enf.last_failure)
# Change the audience from ANY to https://example.com
self._token2["aud"] = "https://example.com"
self.assertFalse(enf.test(self._token2, "read", "/"), msg=enf.last_failure)
# Change back to ANY
self._token2["aud"] = "ANY"
self.assertTrue(enf.test(self._token2, "read", "/"), msg=enf.last_failure)
self._token2["scp"] = "read:/foo/bar"
self.assertFalse(enf.test(self._token2, "read", "/foo"), msg=enf.last_failure)
self.assertFalse(enf.test(self._token2, "write", "/foo/bar"), msg=enf.last_failure)
with self.assertRaises(scitokens.scitokens.InvalidPathError):
print(enf.test(self._token2, "write", "~/foo"))
def test_v2(self):
"""
Test the requirements for a v2
"""
# First, delete the aud
del self._token2["aud"]
enf = scitokens.Enforcer(self._test_issuer, audience="https://example.unl.edu")
self._token2["scope"] = "read:/foo/bar"
# Should fail, audience is required for 2.0 token
self.assertFalse(enf.test(self._token2, "read", "/foo/bar"), msg=enf.last_failure)
# Now set the audience to ANY
self._token2["aud"] = "ANY"
self.assertTrue(enf.test(self._token2, "read", "/foo/bar"), msg=enf.last_failure)
# Now to the correct audience
self._token2["aud"] = "https://example.unl.edu"
self.assertTrue(enf.test(self._token2, "read", "/foo/bar"), msg=enf.last_failure)
# Now to the wrong audience
self._token2["aud"] = "https://example.com"
self.assertFalse(enf.test(self._token2, "read", "/foo/bar"), msg=enf.last_failure)
# Arbitrary claims are allowed now in v2
self._token2["madeupclaim"] = "claimsdontmatter"
self._token2["aud"] = "ANY"
self.assertTrue(enf.test(self._token2, "read", "/foo/bar"), msg=enf.last_failure)
# Arbitrary claims should fail in 1.0
self._token["madeupclaim"] = "claimsdontmatter"
self._token["aud"] = "ANY"
self.assertFalse(enf.test(self._token, "read", "/foo/bar"), msg=enf.last_failure)
def test_enforce(self):
"""
Test the Enforcer object.
"""
with self.assertRaises(scitokens.scitokens.EnforcementError):
print(scitokens.Enforcer(None))
enf = scitokens.Enforcer(self._test_issuer)
enf.add_validator("foo", self.always_accept)
self.assertFalse(enf.test(self._token, "read", "/"), msg=enf.last_failure)
self._token["scp"] = "read:/"
self.assertTrue(enf.test(self._token, "read", "/"), msg=enf.last_failure)
enf = scitokens.Enforcer(self._test_issuer, audience = "https://example.unl.edu")
enf.add_validator("foo", self.always_accept)
self.assertTrue(enf.test(self._token, "read", "/"), msg=enf.last_failure)
self._token["scp"] = "read:/foo/bar"
self.assertFalse(enf.test(self._token, "read", "/foo"), msg=enf.last_failure)
self.assertTrue(enf.test(self._token, "read", "/foo/bar"), msg=enf.last_failure)
enf = scitokens.Enforcer(self._test_issuer)
enf.add_validator("foo", self.always_accept)
self.assertTrue(enf.test(self._token, "read", "/foo/bar"), msg=enf.last_failure)
self.assertFalse(enf.test(self._token, "write", "/foo/bar"), msg=enf.last_failure)
with self.assertRaises(scitokens.scitokens.InvalidPathError):
print(enf.test(self._token, "write", "~/foo"))
def test_enforce_scope(self):
"""
Test the Enforcer object.
"""
with self.assertRaises(scitokens.scitokens.EnforcementError):
print(scitokens.Enforcer(None))
enf = scitokens.Enforcer(self._test_issuer)
enf.add_validator("foo", self.always_accept)
self.assertFalse(enf.test(self._token, "read", "/"), msg=enf.last_failure)
self._token["scope"] = "read:/"
self.assertTrue(enf.test(self._token, "read", "/"), msg=enf.last_failure)
enf = scitokens.Enforcer(self._test_issuer, audience = "https://example.unl.edu")
enf.add_validator("foo", self.always_accept)
self.assertTrue(enf.test(self._token, "read", "/"), msg=enf.last_failure)
self._token["scope"] = "read:/foo/bar"
self.assertFalse(enf.test(self._token, "read", "/foo"), msg=enf.last_failure)
self.assertTrue(enf.test(self._token, "read", "/foo/bar"), msg=enf.last_failure)
enf = scitokens.Enforcer(self._test_issuer)
enf.add_validator("foo", self.always_accept)
self.assertTrue(enf.test(self._token, "read", "/foo/bar"), msg=enf.last_failure)
self.assertFalse(enf.test(self._token, "write", "/foo/bar"), msg=enf.last_failure)
with self.assertRaises(scitokens.scitokens.InvalidPathError):
print(enf.test(self._token, "write", "~/foo"))
def test_aud(self):
"""
Test the audience claim
"""
self._token['scp'] = 'read:/'
enf = scitokens.Enforcer(self._test_issuer)
enf.add_validator("foo", lambda path : True)
self._token['aud'] = "https://example.unl.edu"
self.assertFalse(enf.test(self._token, "read", "/"), msg=enf.last_failure)
enf = scitokens.Enforcer(self._test_issuer, audience = "https://example.unl.edu")
enf.add_validator("foo", lambda path : True)
self.assertTrue(enf.test(self._token, "read", "/"), msg=enf.last_failure)
def test_multiple_aud(self):
"""
Test multiple aud
"""
self._token['scp'] = 'read:/'
# Test multiple audiences
enf = scitokens.Enforcer(self._test_issuer, audience = ["https://example.unl.edu", "https://another.com"])
enf.add_validator("foo", self.always_accept)
self.assertTrue(enf.test(self._token, "read", "/"), msg=enf.last_failure)
self._token['aud'] = "https://another.com"
self.assertTrue(enf.test(self._token, "read", "/"), msg=enf.last_failure)
self._token['aud'] = "https://doesnotwork.com"
self.assertFalse(enf.test(self._token, "read", "/"), msg=enf.last_failure)
def test_getitem(self):
"""
Test the getters for the SciTokens object.
"""
self.assertEqual(self._token['foo'], 'bar')
with self.assertRaises(KeyError):
print(self._token['bar'])
self.assertEqual(self._token.get('baz'), None)
self.assertEqual(self._token.get('foo', 'baz'), 'bar')
self.assertEqual(self._token.get('foo', 'baz', verified_only=True), 'baz')
self._token.serialize()
self.assertEqual(self._token['foo'], 'bar')
self.assertEqual(self._token.get('foo', 'baz'), 'bar')
self.assertEqual(self._token.get('bar', 'baz'), 'baz')
self.assertEqual(self._token.get('bar', 'baz', verified_only=True), 'baz')
self._token['bar'] = '1'
self.assertEqual(self._token.get('bar', 'baz', verified_only=False), '1')
self.assertEqual(self._token.get('bar', 'baz', verified_only=True), 'baz')
def test_gen_acls(self):
"""
Test the generation of ACLs
"""
enf = scitokens.Enforcer(self._test_issuer)
enf.add_validator("foo", self.always_accept)
self._token['scope'] = 'read:/'
acls = enf.generate_acls(self._token)
self.assertTrue(len(acls), 1)
self.assertEqual(acls[0], ('read', '/'))
self._token['scope'] = 'read:/ write:/foo'
acls = enf.generate_acls(self._token)
self.assertTrue(len(acls), 2)
self.assertTrue(('read', '/') in acls)
self.assertTrue(('write', '/foo') in acls)
self._token['scope'] = 'read:/foo read://bar write:/foo write://bar'
acls = enf.generate_acls(self._token)
self.assertTrue(len(acls), 4)
self.assertTrue(('read', '/foo') in acls)
self.assertTrue(('write', '/foo') in acls)
self.assertTrue(('read', '/bar') in acls)
self.assertTrue(('write', '/bar') in acls)
self._token['exp'] = time.time() - 600
with self.assertRaises(scitokens.scitokens.ClaimInvalid):
print(enf.generate_acls(self._token))
self.assertTrue(enf.last_failure)
self._token['exp'] = time.time() + 600
self._token['scope'] = 'read:foo'
with self.assertRaises(scitokens.scitokens.InvalidAuthorizationResource):
print(enf.generate_acls(self._token))
self._token['scope'] = 'read'
with self.assertRaises(scitokens.scitokens.InvalidAuthorizationResource):
print(enf.generate_acls(self._token))
def test_sub(self):
"""
Verify that tokens with the `sub` set are accepted.
"""
self._token['sub'] = 'Some Great User'
enf = scitokens.Enforcer(self._test_issuer)
enf.add_validator("foo", self.always_accept)
self._token['scope'] = 'read:/'
acls = enf.generate_acls(self._token)
self.assertTrue(len(acls), 1)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -6,480,488,713,578,052,000 | 36.844512 | 114 | 0.596633 | false |
iShoto/testpy | samples/keras_cifar10_intro/test.py | 1 | 9903 | # coding: utf-8
import os
import numpy as np
import matplotlib.pyplot as plt
from scipy.misc import toimage
import pandas as pd
import time
#from sklearn.model_selection import KFold
#from sklearn.model_selection import train_test_split
from keras.datasets import cifar10
from keras.models import Sequential
from keras.models import model_from_json
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras.preprocessing.image import ImageDataGenerator
N_CLASS = 10
N_EPOCH = 50 # 100
BATCH_SIZE = 128
INPUT_DIM = (32, 32, 3)
DATA_AUGMENTATION = True
IDG_PARAM = {'featurewise_center': False,
'samplewise_center': False,
'featurewise_std_normalization': False,
'samplewise_std_normalization': False,
'zca_whitening': True, # False
'rotation_range': 0.,
'width_shift_range': 0.1, # 0.,
'height_shift_range': 0.1, # 0.,
'shear_range': 0.,
'zoom_range': 0.,
'channel_shift_range': 0.,
'fill_mode': 'nearest',
'cval': 0.,
'horizontal_flip': True,
'vertical_flip': False,
'rescale': None,
'preprocessing_function': None
}
DIR = './result/'
MODEL_FILE = 'model.json'
WEIGHT_FILE = 'weights.h5'
HISTORY_DATA_FILE = 'history.csv'
HISTORY_IMAGE_FILE = 'history.jpg'
PARAM_EVAL_FILE = 'param_eval.csv'
class Test:
def __init__(self):
"""
data augmentation
normalize
zca whitening
make validation data from training data
change learning rate on a way
"""
pass
def main(self):
# Training
start = time.clock()
data = self.get_data()
model = self.design_model(data[0])
result = self.train_model(data, model)
self.save(result)
print('Training Time: %s min' % round((time.clock()-start)/60., 1))
print('')
# Test
self.test_model(data)
def get_data(self):
# Load CIFAR-10
(X_train, y_train), (X_test, y_test) = cifar10.load_data()
self.__draw_sample_images(X_train, y_train)
# Normalize data
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255.0
X_test /= 255.0
# Onehot label
Y_train = np_utils.to_categorical(y_train, N_CLASS)
Y_test = np_utils.to_categorical(y_test, N_CLASS)
print('X_train.shape:', X_train.shape, 'Y_train.shape:', Y_train.shape)
print('X_test.shape:', X_test.shape, 'Y_test.shape:', Y_test.shape)
return X_train, Y_train, X_test, Y_test
def __draw_sample_images(self, X_train, y_train, stdout=False):
# Set background color to white
fig = plt.figure()
fig.patch.set_facecolor('white')
# Draw sample images
n_class = 10
pos = 1
for target_class in range(n_class):
# Get index list of a class
target_idx = []
for i in range(len(y_train)):
if y_train[i][0] == target_class:
target_idx.append(i)
# Draw random ten images for each class
np.random.shuffle(target_idx)
for idx in target_idx[:10]:
img = toimage(X_train[idx])
plt.subplot(10, 10, pos)
plt.imshow(img)
plt.axis('off')
pos += 1
plt.savefig(DIR+'cifar10.jpg', dpi=100)
if stdout == True:
plt.show()
def design_model(self, X_train):
# Initialize
model = Sequential()
# (Conv -> Relu) * 2 -> Pool -> Dropout
model.add(Convolution2D(32, 3, 3, border_mode='same', input_shape=X_train.shape[1:]))
model.add(Activation('relu'))
model.add(Convolution2D(32, 3, 3, border_mode='same'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
# (Conv -> Relu) * 2 -> Pool -> Dropout
model.add(Convolution2D(64, 3, 3, border_mode='same'))
model.add(Activation('relu'))
model.add(Convolution2D(64, 3, 3, border_mode='same'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
# Flatten
model.add(Flatten()) # 6*6*64
# FC -> Relu -> Dropout
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.5))
# FC -> Softmax
model.add(Dense(N_CLASS))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# output model summary!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# File ".\test.py", line 13, in <module>
# from keras.utils.visualize_util import plot
# ImportError: Failed to import pydot. You must install pydot and graphviz for `pydotprint` to work.
#model.summary()
#plot(model, show_shapes=True, to_file=os.path.join(DIR, 'model.png'))
model.summary()
return model
def train_model(self, data, model):
X_train, Y_train, X_test, Y_test = data
if not DATA_AUGMENTATION:
print('Not using data augmentation')
# Train the model
history = model.fit(X_train, Y_train,
batch_size=BATCH_SIZE,
nb_epoch=N_EPOCH,
verbose=1,
validation_data=(X_test, Y_test),
shuffle=True)
else:
print('Using real-time data augmentation')
# Make a generator for training data
train_datagen = ImageDataGenerator(featurewise_center=IDG_PARAM['featurewise_center'],
samplewise_center=IDG_PARAM['samplewise_center'],
featurewise_std_normalization=IDG_PARAM['featurewise_std_normalization'],
samplewise_std_normalization=IDG_PARAM['samplewise_std_normalization'],
zca_whitening=IDG_PARAM['zca_whitening'],
rotation_range=IDG_PARAM['rotation_range'],
width_shift_range=IDG_PARAM['width_shift_range'],
height_shift_range=IDG_PARAM['height_shift_range'],
shear_range=IDG_PARAM['shear_range'],
zoom_range=IDG_PARAM['zoom_range'],
channel_shift_range=IDG_PARAM['channel_shift_range'],
fill_mode=IDG_PARAM['fill_mode'],
cval=IDG_PARAM['cval'],
horizontal_flip=IDG_PARAM['horizontal_flip'],
vertical_flip=IDG_PARAM['vertical_flip'],
rescale=IDG_PARAM['rescale'],
preprocessing_function=IDG_PARAM['preprocessing_function'])
train_datagen.fit(X_train)
train_generator = train_datagen.flow(X_train, Y_train, batch_size=BATCH_SIZE)
# Make a generator for test data
test_datagen = ImageDataGenerator(zca_whitening=IDG_PARAM['zca_whitening'])
test_datagen.fit(X_test)
test_generator = test_datagen.flow(X_test, Y_test)
# Train the model
history = model.fit_generator(train_generator,
samples_per_epoch=X_train.shape[0],
nb_epoch=N_EPOCH,
validation_data=test_generator,
nb_val_samples=X_test.shape[0])
# Evaluate the model
if not DATA_AUGMENTATION:
loss, acc = model.evaluate(X_test, Y_test, verbose=0)
else:
loss, acc = model.evaluate_generator(test_generator, val_samples=X_test.shape[0])
print('Test loss: %s, Test acc: %s' % (loss, acc))
result = {'model': model, 'history': history, 'loss': loss, 'acc': acc}
return result
def save(self, result):
"""
Save model, weight, history, parameter and evaluation
"""
model = result['model']
history = result['history']
loss = result['loss']
acc = result['acc']
# Model
model_json = model.to_json()
# Weight
with open(os.path.join(DIR, MODEL_FILE), 'w') as json_file:
json_file.write(model_json)
model.save_weights(os.path.join(DIR, WEIGHT_FILE))
# History
self.__save_history(history)
self.__plot_history(history)
# Param and evaluation
dic = IDG_PARAM
dic.update({'n_epoch': N_EPOCH, 'batch_size': BATCH_SIZE, 'loss': loss, 'acc': acc})
if os.path.exists(DIR+PARAM_EVAL_FILE):
df = pd.read_csv(DIR+PARAM_EVAL_FILE)
df = pd.concat([df, pd.DataFrame([dic])])
else:
df = pd.DataFrame([dic])
df.to_csv(DIR+PARAM_EVAL_FILE, index=False)
def __save_history(self, history, stdout=False):
df = pd.DataFrame()
df['train_loss'] = history.history['loss']
df['train_acc'] = history.history['acc']
df['valid_loss'] = history.history['val_loss']
df['valid_acc'] = history.history['val_acc']
df.to_csv(DIR+HISTORY_DATA_FILE, index=False)
if stdout == True:
print(df)
def __plot_history(self, history, stdout=False):
# Set background color to white
fig = plt.figure()
fig.patch.set_facecolor('white')
fig.set_size_inches(16.0, 9.0, forward=True)
# Plot accuracy history
plt.subplot(1, 2, 1)
plt.plot(history.history['acc'], "o-", label="train_acc")
plt.plot(history.history['val_acc'], "o-", label="valid_acc")
plt.title('model accuracy')
plt.xlabel('epoch')
plt.ylabel('accuracy')
plt.xlim(0)
plt.ylim(0, 1)
plt.legend(loc="lower right")
# Plot loss history
plt.subplot(1, 2, 2)
plt.plot(history.history['loss'], "o-", label="train_loss",)
plt.plot(history.history['val_loss'], "o-", label="valid_loss")
plt.title('model loss')
plt.xlabel('epoch')
plt.ylabel('loss')
plt.xlim(0)
plt.ylim(0, max([history.history['loss'][0], history.history['val_loss'][0]]))
plt.legend(loc='upper right')
plt.savefig(DIR+HISTORY_IMAGE_FILE, dpi=100)
if stdout == True:
plt.show()
def test_model(self, data):
X_train, Y_train, X_test, Y_test = data
model_file = os.path.join(DIR, MODEL_FILE)
weight_file = os.path.join(DIR, WEIGHT_FILE)
with open(model_file, 'r') as fp:
model = model_from_json(fp.read())
model.load_weights(weight_file)
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
if not DATA_AUGMENTATION:
loss, acc = model.evaluate(X_test, Y_test, verbose=0)
else:
# Make a generator for test data
test_datagen = ImageDataGenerator(zca_whitening=True)
test_datagen.fit(X_test)
test_generator = test_datagen.flow(X_test, Y_test)
loss, acc = model.evaluate_generator(test_generator, val_samples=X_test.shape[0])
print('Test loss: %s, Test acc: %s' % (loss, acc))
print('')
if __name__ == "__main__":
Test().main() | mit | -2,263,601,383,694,599,700 | 28.215339 | 102 | 0.65051 | false |
minlexx/pyevemon | esi_client/apis/__init__.py | 1 | 1221 | from __future__ import absolute_import
# import apis into api package
from .alliance_api import AllianceApi
from .assets_api import AssetsApi
from .bookmarks_api import BookmarksApi
from .calendar_api import CalendarApi
from .character_api import CharacterApi
from .clones_api import ClonesApi
from .contacts_api import ContactsApi
from .corporation_api import CorporationApi
from .dogma_api import DogmaApi
from .fittings_api import FittingsApi
from .fleets_api import FleetsApi
from .incursions_api import IncursionsApi
from .industry_api import IndustryApi
from .insurance_api import InsuranceApi
from .killmails_api import KillmailsApi
from .location_api import LocationApi
from .loyalty_api import LoyaltyApi
from .mail_api import MailApi
from .market_api import MarketApi
from .opportunities_api import OpportunitiesApi
from .planetary_interaction_api import PlanetaryInteractionApi
from .routes_api import RoutesApi
from .search_api import SearchApi
from .skills_api import SkillsApi
from .sovereignty_api import SovereigntyApi
from .status_api import StatusApi
from .universe_api import UniverseApi
from .user_interface_api import UserInterfaceApi
from .wallet_api import WalletApi
from .wars_api import WarsApi
| gpl-3.0 | -7,706,126,048,192,302,000 | 36 | 62 | 0.837019 | false |
pbailis/bolton-sigmod2013-code | shim-code/tweet/stats.py | 1 | 3682 |
from pylab import *
convolen = -1
convolens = []
eventuals = []
explicits = []
replylens = []
convolendict = {}
bytes_per_explicit = 1
#mpl.rcParams['font.size'] = 12
mpl.rcParams['figure.figsize'] = 4, 1
#mpl.rcParams['lines.markersize'] = 14
#mpl.rcParams['lines.linewidth'] = 1.5
for line in open("conversations.out"):
if line == "\n":
if convolen != -1:
convolens.append(convolen)
if convolen not in convolendict:
convolendict[convolen] = 0
convolendict[convolen] += 1
convolen = 0
else:
convolen += 1
replylens.append(convolen)
explicits.append(convolen*bytes_per_explicit+len(" ".join(line.split()[1:])))
eventuals.append(len(line))
convolens.sort()
replylens.sort()
eventuals.sort()
explicits.sort()
explicitslabels = []
explicitsvalues = []
last = 0
for i in range(0, len(explicits)):
if explicits[i] != last:
explicitsvalues.append(float(last))
explicitslabels.append(float(i)/len(explicits))
last = explicits[i]
eventualslabels = []
eventualsvalues = []
last = 0
for i in range(0, len(eventuals)):
if eventuals[i] != last:
eventualsvalues.append(float(last))
eventualslabels.append(float(i)/len(eventuals))
last = eventuals[i]
plot(eventualsvalues, eventualslabels, label="Eventual")
plot(explicitsvalues, explicitslabels, "--", label="Explicit (Tweet + KDS)")
xlabel("ECDS Bytes per Write")
ylabel("CDF")
leg = legend(loc="lower right")
fr = leg.get_frame()
fr.set_lw(0)
ax = gca()
ax.xaxis.grid(True, which='major')
ax.xaxis.grid(False, which='minor')
ax.yaxis.grid(True, which='major')
ax.yaxis.grid(False, which='minor')
ax.set_axisbelow(True)
ax = gca()
ax.set_xscale("log")
xlim(xmin=50, xmax=10000)
savefig("twitter-storage.pdf", transparent=True, bbox_inches='tight', pad_inches=.1)
convolenslabels = []
convolensvalues = []
last = 0
for i in range(0, len(convolens)):
if convolens[i] != last:
convolensvalues.append(float(last))
convolenslabels.append(float(i)/len(convolens))
last = convolens[i]
replylenslabels = []
replylensvalues = []
last = 0
for i in range(0, len(replylens)):
if replylens[i] != last:
replylensvalues.append(float(last))
replylenslabels.append(float(i)/len(replylens))
last = replylens[i]
clf()
convolensvalues[0] = convolensvalues[1]
plot(convolensvalues, convolenslabels, label="Conversation Length")
plot(replylensvalues, replylenslabels, "--", label="Message Depth")
xlabel("Tweets")
ylabel("CDF")
leg = legend(loc="lower right")
fr = leg.get_frame()
fr.set_lw(0)
ax = gca()
ax.xaxis.grid(True, which='major')
ax.xaxis.grid(False, which='minor')
ax.yaxis.grid(True, which='major')
ax.yaxis.grid(False, which='minor')
ax.set_axisbelow(True)
ax = gca()
ax.set_xscale("log")
savefig("twitter-convos.pdf", transparent=True, bbox_inches='tight', pad_inches=.1)
print "Explicits:", average(explicits), std(explicits), explicits[int(.99*len(explicits))], max(explicits)
print "Eventuals:", average(eventuals), std(eventuals), eventuals[int(.99*len(eventuals))], max(eventuals)
print "Convos:", average(convolens), std(convolens), convolens[int(.5*len(convolens))], convolens[int(.99*len(convolens))], max(convolens)
print "Depths:", average(replylens), std(replylens), replylens[int(.5*len(replylens))], replylens[int(.99*len(replylens))], max(replylens)
f = open("hist.txt", 'w')
f.write("BlockSize\t1\n")
mkey = max(convolendict.keys())
for i in range(0, mkey):
if i not in convolendict:
f.write("%d\t0\n" % i)
else:
f.write("%d\t%d\n" % (i, convolendict[i]))
f.close()
| apache-2.0 | -303,628,926,099,308,540 | 26.274074 | 139 | 0.666486 | false |
Margherita-/camilla | lib/get_photo.py | 1 | 1068 | import flickr_api
import pickle
import os
# returns the urls of all photos in an album
def get_photos_url(photos):
lista_url = []
for photo in photos:
lista_url.append(photo.getPhotoFile('Medium'))
return lista_url
def give_me_photos():
if not os.path.exists('album.pickle'):
return ([], [])
fp1 = file('album.pickle', 'rb')
fp2 = file('photos.pickle', 'rb')
album = pickle.load(fp1)
photos = pickle.load(fp2)
return (album, photos)
def save_photos():
data = flickr_reader()
album, photos = data
fp1 = file('album.pickle', 'wb')
fp2 = file('photos.pickle', 'wb')
pickle.dump(album, fp1)
pickle.dump(photos, fp2)
fp1.close()
fp2.close()
#scarica i miei dati da Flickr
def flickr_reader():
user = flickr_api.Person.findByUserName('camillathecat')
photosets = user.getPhotosets()
lista_album = []
for album in photosets:
lista_album.append(album)
all_photos = {}
for album in lista_album:
all_photos[album.title] = get_photos_url(album.getPhotos())
return [lista_album, all_photos]
if __name__ == '__main__':
save_photos()
| gpl-3.0 | -1,397,051,139,154,848,500 | 20.36 | 61 | 0.679775 | false |
montenegroariel/sigos | apps/odontologicos/admin.py | 1 | 1425 | from django.contrib import admin
from .models import OrdenOdontologica, ArancelOdontologico, Entidad, Capitulo
class OrdenOdontologicaAdmin(admin.ModelAdmin):
list_display = [
'persona',
'localidad',
'lugar_trabajo',
'mes',
'anio'
]
search_fields = [
'persona',
'localidad',
'lugar_trabajo',
'mes',
'anio'
]
list_filter = [
'persona',
'localidad',
'lugar_trabajo',
'mes',
'anio'
]
class ArancelOdontologicoAdmin(admin.ModelAdmin):
list_display = [
'codigo',
'descripcion',
'entidad',
'capitulo',
'monto'
]
list_filter = [
'codigo',
'descripcion',
'entidad',
'capitulo',
'monto'
]
search_fields = [
'codigo',
'descripcion',
'monto'
]
class EntidadAdmin(admin.ModelAdmin):
list_display = ['descripcion']
list_filter = ['descripcion']
search_fields = ['descripcion']
class CapituloAdmin(admin.ModelAdmin):
list_display = ['descripcion']
list_filter = ['descripcion']
search_fields = ['descripcion']
admin.site.register(Entidad, EntidadAdmin)
admin.site.register(Capitulo, CapituloAdmin)
admin.site.register(OrdenOdontologica, OrdenOdontologicaAdmin)
admin.site.register(ArancelOdontologico, ArancelOdontologicoAdmin)
| gpl-3.0 | 9,189,499,504,768,650,000 | 19.070423 | 77 | 0.585965 | false |
google-research/data-driven-advection | datadrivenpdes/core/integrate.py | 1 | 4705 | # python3
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Integrate models over time."""
from typing import Dict, Union
import numpy as np
from datadrivenpdes.core import models
from datadrivenpdes.core import tensor_ops
import tensorflow as tf
nest = tf.contrib.framework.nest
xla = tf.contrib.compiler.xla
KeyedTensors = Dict[str, tf.Tensor]
# Note: Python's type system allows supplying substituting integers for floats
ArrayLike = Union[np.ndarray, np.generic, float]
def _xla_decorator(func):
def wrapper(*args):
return xla.compile(func, args)
return wrapper
def integrate_steps(
model: models.TimeStepModel,
state: KeyedTensors,
steps: ArrayLike,
initial_time: float = 0.0,
axis: int = 0,
xla_compile: bool = False,
) -> KeyedTensors:
"""Integrate some fixed number of time steps.
Args:
model: model to integrate.
state: starting value of the state.
steps: number of time steps at which the solution is saved.
initial_time: initial time for time integration.
axis: axis in result tensors along which the integrated solution is
stacked.
xla_compile: whether to compile with XLA or not.
Returns:
Time evolved states at the times specified in `times`. Each tensor has the
same shape as the inputs, with an additional dimension inserted to store
values at each requested time.
"""
# TODO(shoyer): explicitly include time?
del initial_time # unused
state = nest.map_structure(tf.convert_to_tensor, state)
steps = tf.convert_to_tensor(steps, dtype=tf.int32)
constant_state = {k: v for k, v in state.items()
if k in model.equation.constant_keys}
evolving_state = {k: v for k, v in state.items()
if k in model.equation.evolving_keys}
def advance_one_step(state):
return model.take_time_step({**state, **constant_state})
def advance_until_saved_step(evolving_state, start_stop):
"""Integrate until the next step at which to save results."""
start, stop = start_stop
result, _ = tf.while_loop(
lambda _, i: i < stop,
lambda state, i: (advance_one_step(state), i + 1),
loop_vars=(evolving_state, start),
)
return result
if xla_compile:
advance_until_saved_step = _xla_decorator(advance_until_saved_step)
starts = tf.concat([[0], steps[:-1]], axis=0)
integrated = tf.scan(advance_until_saved_step, [starts, steps],
initializer=evolving_state)
integrated_constants = nest.map_structure(
lambda x: tf.broadcast_to(x, steps.shape.as_list() + x.shape.as_list()),
constant_state)
integrated.update(integrated_constants)
return tensor_ops.moveaxis(integrated, 0, axis)
def integrate_times(
model: models.TimeStepModel,
state: KeyedTensors,
times: ArrayLike,
initial_time: float = 0.0,
axis: int = 0,
xla_compile: bool = False,
) -> KeyedTensors:
"""Returns time evolved states at the requested times.
TODO(shoyer): consider adding optional interpolation. Currently we require
that the requested times are *exact* multiples of the time step.
Args:
model: model to integrate.
state: starting value of the state.
times: time values at which the integrated solution is recorded.
initial_time: initial time for time integration.
axis: axis in result tensors along which the integrated solution is
stacked.
xla_compile: whether to compile with XLA or not.
Returns:
Time evolved states at the times specified in `times`. Each tensor has the
same shape as the inputs, with an additional dimension inserted to store
values at each requested time.
"""
dt = model.equation.get_time_step(model.grid)
approx_steps = (times - initial_time) / dt
steps = np.around(approx_steps).astype(int)
if not np.allclose(approx_steps, steps, atol=1e-8):
raise ValueError('evaluation times {} are not an integer multiple of the '
'time step {}: {}'.format(times, dt, approx_steps))
return integrate_steps(model, state, steps, initial_time, axis, xla_compile)
| apache-2.0 | 8,454,643,705,466,531,000 | 33.595588 | 80 | 0.687779 | false |
rvelhote/bitcoin-indicator | interface/indicator.py | 1 | 2682 | # MIT License
#
# Copyright (c) 2017 Ricardo Velhote
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import gi
import os
import time
gi.require_version('AppIndicator3', '0.1')
from gi.repository import AppIndicator3 as appindicator
class Indicator():
def __init__(self, name, icon_path, menu=None):
"""
Creates a new indicator for the application! The best indicator Jerry. The best.
:param name: The name to give the indicator
:param icon_path: A path for the icon that will identify the indicator
:param menu: An instance of interface.Menu with the list of menu items that belong to this indicator
"""
self.name = name
self.icon = os.path.abspath(icon_path)
self.category = appindicator.IndicatorCategory.SYSTEM_SERVICES
self.indicator = appindicator.Indicator.new(self.name, self.icon, self.category)
self.indicator.set_status(appindicator.IndicatorStatus.ACTIVE)
if menu is not None:
self.indicator.set_menu(menu)
def set_label(self, value):
"""
Defines a label for the indicator. This is merely a wrapper for the indicator set_label method.
:param value: A string containing a new label for the indicator
:return: None
"""
self.indicator.set_label(value, '')
self.indicator.get_menu().get_children()[0].get_child().set_text(time.strftime('%Y-%m-%d %H:%M:%S'))
def get_label(self):
"""
Obtain the label currently assigned to this indicator instance
:return: A string containing the current indicator label
"""
return self.indicator.get_label() | mit | 3,826,399,072,015,603,000 | 40.276923 | 108 | 0.707681 | false |
macosforge/ccs-calendarserver | txdav/caldav/resource.py | 1 | 4552 | ##
# Copyright (c) 2010-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
CalDAV resources.
"""
__all__ = [
"CalDAVResource",
"CalendarHomeResource",
"CalendarCollectionResource",
"CalendarObjectResource",
"ScheduleInboxResource",
"ScheduleOutboxResource",
]
import urllib
from twext.python.log import Logger
from txdav.xml.base import dav_namespace
from txweb2.http_headers import MimeType
from txweb2.http import RedirectResponse, Response
from txweb2.stream import MemoryStream
from twistedcaldav import caldavxml
from twistedcaldav.caldavxml import caldav_namespace
from twistedcaldav.config import config
from twistedcaldav.extensions import DAVResource
from twistedcaldav.ical import allowedComponents
class CalDAVResource(DAVResource):
"""
CalDAV resource.
"""
log = Logger()
def davComplianceClasses(self):
return (
tuple(super(CalDAVResource, self).davComplianceClasses()) +
config.CalDAVComplianceClasses
)
supportedCalendarComponentSet = caldavxml.SupportedCalendarComponentSet(
*[caldavxml.CalendarComponent(name=item) for item in allowedComponents]
)
class CalendarHomeResource(CalDAVResource):
"""
Calendar home resource.
This resource is backed by an L{ICalendarHome} implementation.
"""
class CalendarCollectionResource(CalDAVResource):
"""
Calendar collection resource.
This resource is backed by an L{ICalendar} implementation.
"""
#
# HTTP
#
def render(self, request):
if config.EnableMonolithicCalendars:
#
# Send listing instead of iCalendar data to HTML agents
# This is mostly useful for debugging...
#
# FIXME: Add a self-link to the dirlist with a query string so
# users can still download the actual iCalendar data?
#
# FIXME: Are there better ways to detect this than hacking in
# user agents?
#
# FIXME: In the meantime, make this a configurable regex list?
#
agent = request.headers.getHeader("user-agent")
if agent is not None and (
agent.startswith("Mozilla/") and agent.find("Gecko") != -1
):
renderAsHTML = True
else:
renderAsHTML = False
else:
renderAsHTML = True
if not renderAsHTML:
# Render a monolithic iCalendar file
if request.path[-1] != "/":
# Redirect to include trailing '/' in URI
return RedirectResponse(request.unparseURL(path=urllib.quote(urllib.unquote(request.path), safe=':/') + '/'))
def _defer(data):
response = Response()
response.stream = MemoryStream(str(data))
response.headers.setHeader("content-type", MimeType.fromString("text/calendar"))
return response
d = self.iCalendarRolledup(request)
d.addCallback(_defer)
return d
return super(CalDAVResource, self).render(request)
#
# WebDAV
#
def liveProperties(self):
return super(CalendarCollectionResource, self).liveProperties() + (
(dav_namespace, "owner"), # Private Events needs this but it is also OK to return empty
(caldav_namespace, "supported-calendar-component-set"),
(caldav_namespace, "supported-calendar-data"),
)
class CalendarObjectResource(CalDAVResource):
"""
Calendar object resource.
This resource is backed by an L{ICalendarObject} implementation.
"""
class ScheduleInboxResource(CalDAVResource):
"""
Schedule inbox resource.
This resource is backed by an XXXXXXX implementation.
"""
class ScheduleOutboxResource(CalDAVResource):
"""
Schedule outbox resource.
This resource is backed by an XXXXXXX implementation.
"""
| apache-2.0 | 2,048,175,554,441,219,300 | 27.993631 | 125 | 0.650264 | false |
ccxt/ccxt | python/ccxt/test/test_sync.py | 1 | 16970 | # -*- coding: utf-8 -*-
import argparse
import json
# import logging
import os
import sys
import time # noqa: F401
from os import _exit
from traceback import format_tb
# ------------------------------------------------------------------------------
# logging.basicConfig(level=logging.INFO)
# ------------------------------------------------------------------------------
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(root)
# ------------------------------------------------------------------------------
import ccxt # noqa: E402
from test_trade import test_trade # noqa: E402
from test_order import test_order # noqa: E402
from test_ohlcv import test_ohlcv # noqa: E402
from test_transaction import test_transaction # noqa: E402
# ------------------------------------------------------------------------------
class Argv(object):
verbose = False
nonce = None
exchange = None
symbol = None
pass
argv = Argv()
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', action='store_true', help='enable verbose output')
parser.add_argument('--nonce', type=int, help='integer')
parser.add_argument('exchange', type=str, help='exchange id in lowercase', nargs='?')
parser.add_argument('symbol', type=str, help='symbol in uppercase', nargs='?')
parser.parse_args(namespace=argv)
exchanges = {}
# ------------------------------------------------------------------------------
path = os.path.dirname(ccxt.__file__)
if 'site-packages' in os.path.dirname(ccxt.__file__):
raise Exception("You are running test_async.py/test.py against a globally-installed version of the library! It was previously installed into your site-packages folder by pip or pip3. To ensure testing against the local folder uninstall it first with pip uninstall ccxt or pip3 uninstall ccxt")
# ------------------------------------------------------------------------------
# string coloring functions
def style(s, style):
return str(s) # style + str (s) + '\033[0m'
def green(s):
return style(s, '\033[92m')
def blue(s):
return style(s, '\033[94m')
def yellow(s):
return style(s, '\033[93m')
def red(s):
return style(s, '\033[91m')
def pink(s):
return style(s, '\033[95m')
def bold(s):
return style(s, '\033[1m')
def underline(s):
return style(s, '\033[4m')
# print a colored string
def dump(*args):
print(' '.join([str(arg) for arg in args]))
# print an error string
def dump_error(*args):
string = ' '.join([str(arg) for arg in args])
print(string)
sys.stderr.write(string + "\n")
sys.stderr.flush()
# ------------------------------------------------------------------------------
def handle_all_unhandled_exceptions(type, value, traceback):
dump_error(yellow(type), yellow(value), '\n\n' + yellow('\n'.join(format_tb(traceback))))
_exit(1) # unrecoverable crash
sys.excepthook = handle_all_unhandled_exceptions
# ------------------------------------------------------------------------------
def test_order_book(exchange, symbol):
if exchange.has['fetchOrderBook']:
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
# dump(green(exchange.id), green(symbol), 'fetching order book...')
orderbook = exchange.fetch_order_book(symbol)
dump(
green(exchange.id),
green(symbol),
'order book',
orderbook['datetime'],
'bid: ' + str(orderbook['bids'][0][0] if len(orderbook['bids']) else 'N/A'),
'bidVolume: ' + str(orderbook['bids'][0][1] if len(orderbook['bids']) else 'N/A'),
'ask: ' + str(orderbook['asks'][0][0] if len(orderbook['asks']) else 'N/A'),
'askVolume: ' + str(orderbook['asks'][0][1] if len(orderbook['asks']) else 'N/A'))
else:
dump(yellow(exchange.id), 'fetch_order_book() supported')
# ------------------------------------------------------------------------------
def test_ohlcvs(exchange, symbol):
ignored_exchanges = [
'cex', # CEX can return historical candles for a certain date only
'okex', # okex fetchOHLCV counts "limit" candles from current time backwards
'okcoinusd', # okex base class
]
if exchange.id in ignored_exchanges:
return
if exchange.has['fetchOHLCV']:
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
timeframes = exchange.timeframes if exchange.timeframes else {'1d': '1d'}
timeframe = list(timeframes.keys())[0]
limit = 10
duration = exchange.parse_timeframe(timeframe)
since = exchange.milliseconds() - duration * limit * 1000 - 1000
ohlcvs = exchange.fetch_ohlcv(symbol, timeframe, since, limit)
for ohlcv in ohlcvs:
test_ohlcv(exchange, ohlcv, symbol, int(time.time() * 1000))
dump(green(exchange.id), 'fetched', green(len(ohlcvs)), 'OHLCVs')
else:
dump(yellow(exchange.id), 'fetching OHLCV not supported')
# ------------------------------------------------------------------------------
def test_tickers(exchange, symbol):
ignored_exchanges = [
'digifinex', # requires apiKey to call v2 tickers
]
if exchange.id in ignored_exchanges:
return
if exchange.has['fetchTickers']:
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
tickers = None
try:
# dump(green(exchange.id), 'fetching all tickers at once...')
tickers = exchange.fetch_tickers()
dump(green(exchange.id), 'fetched all', green(len(list(tickers.keys()))), 'tickers')
except Exception as e:
dump(green(exchange.id), 'failed to fetch all tickers, fetching multiple tickers at once...')
tickers = exchange.fetch_tickers([symbol])
dump(green(exchange.id), 'fetched', green(len(list(tickers.keys()))), 'tickers')
# ------------------------------------------------------------------------------
def get_active_symbols(exchange):
return [symbol for symbol in exchange.symbols if is_active_symbol(exchange, symbol)]
def is_active_symbol(exchange, symbol):
return ('.' not in symbol) and (('active' not in exchange.markets[symbol]) or (exchange.markets[symbol]['active']))
# ------------------------------------------------------------------------------
def test_ticker(exchange, symbol):
ignored_exchanges = [
'digifinex', # requires apiKey to call v2 tickers
]
if exchange.id in ignored_exchanges:
return
if exchange.has['fetchTicker']:
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
ticker = exchange.fetch_ticker(symbol)
dump(
green(exchange.id),
green(symbol),
'ticker',
ticker['datetime'],
'high: ' + str(ticker['high']),
'low: ' + str(ticker['low']),
'bid: ' + str(ticker['bid']),
'ask: ' + str(ticker['ask']),
'volume: ' + str(ticker['quoteVolume']))
else:
dump(green(exchange.id), green(symbol), 'fetch_ticker() not supported')
# ------------------------------------------------------------------------------
def test_trades(exchange, symbol):
if exchange.has['fetchTrades']:
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
# dump(green(exchange.id), green(symbol), 'fetching trades...')
trades = exchange.fetch_trades(symbol)
if trades:
test_trade(exchange, trades[0], symbol, int(time.time() * 1000))
dump(green(exchange.id), green(symbol), 'fetched', green(len(trades)), 'trades')
else:
dump(green(exchange.id), green(symbol), 'fetch_trades() not supported')
# ------------------------------------------------------------------------------
def test_orders(exchange, symbol):
if exchange.has['fetchOrders']:
skipped_exchanges = [
'bitmart',
'rightbtc',
]
if exchange.id in skipped_exchanges:
dump(green(exchange.id), green(symbol), 'fetch_orders() skipped')
return
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
# dump(green(exchange.id), green(symbol), 'fetching orders...')
orders = exchange.fetch_orders(symbol)
for order in orders:
test_order(exchange, order, symbol, int(time.time() * 1000))
dump(green(exchange.id), green(symbol), 'fetched', green(len(orders)), 'orders')
else:
dump(green(exchange.id), green(symbol), 'fetch_orders() not supported')
# ------------------------------------------------------------------------------
def test_closed_orders(exchange, symbol):
if exchange.has['fetchClosedOrders']:
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
# dump(green(exchange.id), green(symbol), 'fetching orders...')
orders = exchange.fetch_closed_orders(symbol)
for order in orders:
test_order(exchange, order, symbol, int(time.time() * 1000))
assert order['status'] == 'closed' or order['status'] == 'canceled'
dump(green(exchange.id), green(symbol), 'fetched', green(len(orders)), 'closed orders')
else:
dump(green(exchange.id), green(symbol), 'fetch_closed_orders() not supported')
# ------------------------------------------------------------------------------
def test_open_orders(exchange, symbol):
if exchange.has['fetchOpenOrders']:
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
# dump(green(exchange.id), green(symbol), 'fetching orders...')
orders = exchange.fetch_open_orders(symbol)
for order in orders:
test_order(exchange, order, symbol, int(time.time() * 1000))
assert order['status'] == 'open'
dump(green(exchange.id), green(symbol), 'fetched', green(len(orders)), 'open orders')
else:
dump(green(exchange.id), green(symbol), 'fetch_open_orders() not supported')
# ------------------------------------------------------------------------------
def test_transactions(exchange, code):
if exchange.has['fetchTransactions']:
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
transactions = exchange.fetch_transactions(code)
for transaction in transactions:
test_transaction(exchange, transaction, code, int(time.time() * 1000))
dump(green(exchange.id), green(code), 'fetched', green(len(transactions)), 'transactions')
else:
dump(green(exchange.id), green(code), 'fetch_transactions() not supported')
# ------------------------------------------------------------------------------
def test_symbol(exchange, symbol, code):
dump(green('SYMBOL: ' + symbol))
dump(green('CODE: ' + code))
test_ticker(exchange, symbol)
test_tickers(exchange, symbol)
test_ohlcvs(exchange, symbol)
if exchange.id == 'coinmarketcap':
response = exchange.fetchGlobal()
dump(green(response))
else:
test_order_book(exchange, symbol)
test_trades(exchange, symbol)
if (not hasattr(exchange, 'apiKey') or (len(exchange.apiKey) < 1)):
return
if exchange.has['signIn']:
exchange.sign_in()
test_orders(exchange, symbol)
test_open_orders(exchange, symbol)
test_closed_orders(exchange, symbol)
test_transactions(exchange, code)
exchange.fetch_balance()
dump(green(exchange.id), 'fetched balance')
# ------------------------------------------------------------------------------
def load_exchange(exchange):
exchange.load_markets()
def test_exchange(exchange, symbol=None):
dump(green('EXCHANGE: ' + exchange.id))
# delay = 2
keys = list(exchange.markets.keys())
# ..........................................................................
# public API
codes = [
'BTC',
'ETH',
'XRP',
'LTC',
'BCH',
'EOS',
'BNB',
'BSV',
'USDT',
'ATOM',
'BAT',
'BTG',
'DASH',
'DOGE',
'ETC',
'IOTA',
'LSK',
'MKR',
'NEO',
'PAX',
'QTUM',
'TRX',
'TUSD',
'USD',
'USDC',
'WAVES',
'XEM',
'XMR',
'ZEC',
'ZRX',
]
code = codes[0]
for i in range(0, len(codes)):
if codes[i] in exchange.currencies:
code = codes[i]
if not symbol:
symbol = keys[0]
symbols = [
'BTC/USD',
'BTC/USDT',
'BTC/CNY',
'BTC/EUR',
'BTC/ETH',
'ETH/BTC',
'ETH/USDT',
'BTC/JPY',
'LTC/BTC',
'USD/SLL',
]
for s in symbols:
if s in keys:
symbol = s
break
if symbol.find('.d') < 0:
test_symbol(exchange, symbol, code)
# ..........................................................................
# private API
# move to testnet/sandbox if possible before accessing the balance if possible
# if 'test' in exchange.urls:
# exchange.urls['api'] = exchange.urls['test']
# time.sleep(exchange.rateLimit / 1000)
# time.sleep(delay)
# amount = 1
# price = 0.0161
# marketBuy = exchange.create_market_buy_order(symbol, amount)
# print(marketBuy)
# time.sleep(delay)
# marketSell = exchange.create_market_sell_order(symbol, amount)
# print(marketSell)
# time.sleep(delay)
# limitBuy = exchange.create_limit_buy_order(symbol, amount, price)
# print(limitBuy)
# time.sleep(delay)
# limitSell = exchange.create_limit_sell_order(symbol, amount, price)
# print(limitSell)
# time.sleep(delay)
# ------------------------------------------------------------------------------
def try_all_proxies(exchange, proxies=['']):
current_proxy = 0
max_retries = len(proxies)
if exchange.proxy in proxies:
current_proxy = proxies.index(exchange.proxy)
for num_retries in range(0, max_retries):
try:
exchange.proxy = proxies[current_proxy]
dump(green(exchange.id), 'using proxy', '`' + exchange.proxy + '`')
current_proxy = (current_proxy + 1) % len(proxies)
load_exchange(exchange)
test_exchange(exchange)
except (ccxt.RequestTimeout, ccxt.AuthenticationError, ccxt.NotSupported, ccxt.DDoSProtection, ccxt.ExchangeNotAvailable, ccxt.ExchangeError) as e:
print({'type': type(e).__name__, 'num_retries': num_retries, 'max_retries': max_retries}, str(e)[0:200])
if (num_retries + 1) == max_retries:
dump_error(yellow('[' + type(e).__name__ + ']'), str(e)[0:200])
else:
# no exception
return True
# exception
return False
# ------------------------------------------------------------------------------
proxies = [
'',
'https://cors-anywhere.herokuapp.com/',
# 'https://crossorigin.me/',
]
# prefer local testing keys to global keys
keys_folder = os.path.dirname(root)
keys_global = os.path.join(keys_folder, 'keys.json')
keys_local = os.path.join(keys_folder, 'keys.local.json')
keys_file = keys_local if os.path.exists(keys_local) else keys_global
# load the api keys from config
with open(keys_file) as file:
config = json.load(file)
# instantiate all exchanges
for id in ccxt.exchanges:
if id == 'theocean':
continue
exchange = getattr(ccxt, id)
exchange_config = {'verbose': argv.verbose}
if sys.version_info[0] < 3:
exchange_config.update({'enableRateLimit': True})
if id in config:
exchange_config = ccxt.Exchange.deep_extend(exchange_config, config[id])
exchanges[id] = exchange(exchange_config)
# ------------------------------------------------------------------------------
def main():
if argv.exchange:
if argv.exchange != 'theocean':
exchange = exchanges[argv.exchange]
symbol = argv.symbol
if hasattr(exchange, 'skip') and exchange.skip:
dump(green(exchange.id), 'skipped')
else:
if symbol:
load_exchange(exchange)
test_symbol(exchange, symbol)
else:
try_all_proxies(exchange, proxies)
else:
for exchange in sorted(exchanges.values(), key=lambda x: x.id):
if hasattr(exchange, 'skip') and exchange.skip:
dump(green(exchange.id), 'skipped')
else:
try_all_proxies(exchange, proxies)
# ------------------------------------------------------------------------------
if __name__ == '__main__':
main()
| mit | -1,799,680,983,772,008,400 | 31.079395 | 297 | 0.527225 | false |
andrewfu0325/gem5-aladdin | sweeps/benchmarks/designsweeptypes.py | 1 | 1766 | # Design sweep class for gem5.
import os
from xenon.base.designsweeptypes import ExhaustiveSweep
from benchmarks import params
from generators import *
class Gem5DesignSweep(ExhaustiveSweep):
sweepable_params = [
# gem5 needs to know this to determine whether a cache should be attached
# to the accelerator.
params.memory_type
]
def __init__(self, name):
super(Gem5DesignSweep, self).__init__(name)
# Path to the source directory of the benchmark suite being swept.
# TODO: Find a way to encapsulate this in benchmark configurations without
# having it tied to Git history.
self.source_dir = ""
# Simulation mode.
# Valid options are:
# - aladdin: Run Aladdin only.
# - gem5-cache: Run Aladdin connected to gem5's memory system, but no CPU.
# - gem5-cpu: Run Aladdin in conjunction with a gem5 CPU model. In this case,
# Aladdin must be invoked by the user program running on the CPU.
self.simulator = ""
def validate(self):
super(Gem5DesignSweep, self).validate()
if not os.path.exists(self.source_dir):
raise IOError("Source directory %s does not exist!" % self.source_dir)
valid_simulators = ["gem5-cpu", "gem5-cache", "aladdin"]
if not self.simulator in valid_simulators:
raise ValueError("Attribute simulator has invalid value %s." % (
self.simulator, valid_simulators))
def generate_trace(self):
generator = trace_generator.TraceGenerator(self)
return generator.run()
def generate_dma_trace(self):
generator = trace_generator.TraceGenerator(self, dma=True)
return generator.run()
def generate_gem5_binary(self):
generator = gem5_binary_generator.Gem5BinaryGenerator(self)
return generator.run()
| bsd-3-clause | 174,195,536,780,014,200 | 32.320755 | 83 | 0.698754 | false |
hziling/flango | flango/template.py | 1 | 11989 | # -*- coding: utf-8 -*-
"""
flango.template
~~~~~~~~~~~~~~
template module provide a simple template system that compiles
templates to Python code which like django and tornado template
modules.
Usage
-----
Well, you can view the tests file directly for the usage under tests.
Basically::
>>> from flango import template
>>> template.Template('Hello, {{ name }}').render(name='flango')
Hello, flango
If, else, for...::
>>> template.Template('''
... {% for i in l %}
... {% if i > 3 %}
... {{ i }}
... {% else %}
... less than 3
... {% endif %}
... {% endfor %})
... ''' ).render(l=[2, 4])
less than 3
4
Then, user define class object maybe also works well::
>>> class A(object):
...
... def __init__(self, a, b):
... self.a = a
... self.b = b
...
>>> o = A("I am o.a", [1, 2, 3])
>>> template.Template('''
... {{ o.a }}
... {% for i in o.b %}
... {{ i }}
... {% endfor %}
... ''').render(o=o)
I am o.a
1
2
3
and Wow, function maybe suprise you::
>>> template.Template('{{ abs(-3) }}').render()
'3'
>>> template.Template('{{ len([1, 2, 3]) }}').render()
'3'
>>> template.Template('{{ [1, 2, 3].index(2) }}').render()
'1'
and complex function like lambda expression maybe works::
>>> template.Template('{{ list(map(lambda x: x * 2, [1, 2, 3])) }}').render()
'[2, 4, 6]'
and lastly, inheritance of template, extends and include::
{% extends 'base.html' %}
{% include 'included.html' %}
Hacking with fun and joy.
"""
import re
import os
import collections
# LRU Cache capacity:
_CACHE_CAPACITY = 128
class Scanner(object):
""" Scanner is a inner class of Template which provide
custom template source reading operations.
"""
def __init__(self, source):
# pattern for variable, function, block, statement.
self.pattern = re.compile(r'''
{{\s*(?P<var>.+?)\s*}} # variable: {{ name }} or function like: {{ abs(-2) }}
| # or
{%\s*(?P<endtag>end(if|for|while|block))\s*%} # endtag: {% endfor %}
| # or
{%\s*(?P<statement>(?P<keyword>\w+)\s*(.+?))\s*%} # statement: {% for i in range(10) %}
''', re.VERBOSE)
# the pre-text before token.
self.pretext = ''
# the remaining text which have not been processed.
self.remain = source
def next_token(self):
""" Get the next token which match the pattern semantic.
return `None` if there is no more tokens, otherwise,
return matched regular expression group of token `t`, get
the pre-text and the remain text at the same time.
"""
t = self.pattern.search(self.remain)
if not t:
return None
self.pretext = self.remain[:t.start()]
self.remain = self.remain[t.end():]
return t
@property
def empty(self):
""" Return `True` if the source have been processed."""
return self.remain == ''
class BaseNode(object):
""" Base abstract class for nodes.
Subclass of BaseNode must implement 'generate' interface for
output Python intermediate code generating.
"""
def __init__(self, text, indent, block):
self.text = text
self.indent = indent
self.block = block
def generate(self):
raise NotImplementedError()
class TextNode(BaseNode):
""" Node for normal text. """
def generate(self):
return '{0}_stdout.append(\'\'\'{1}\'\'\')\n'.format(' '*self.indent, self.text)
class VariableNode(BaseNode):
""" Node for variables: such as {{ name }}. """
def generate(self):
return '{0}_stdout.append({1})\n'.format(' '*self.indent, self.text)
class KeyNode(BaseNode):
""" Node for keywords like if else... """
def generate(self):
return '{0}{1}\n'.format(' '*self.indent, self.text)
class TemplateException(Exception):
pass
class Template(object):
""" Main class for compiled template instance.
A initialized template instance will parse and compile
all the template source to Python intermediate code,
and instance function `render` will use Python builtin function
`exec` to execute the intermediate code in Python
runtime.
As function `exec` own very strong power and the ability to
execute all the python code in the runtime with given
namespace dict, so this template engine can perform all
the python features even lambda function. But, function
`exec` also has a huge problem in security, so be careful
and be serious, and I am very serious too.
"""
def __init__(self, source, path='', autoescape=False):
if not source:
raise ValueError('Invalid parameter')
self.scanner = Scanner(source)
# path for extends and include
self.path = path
self.nodes = []
# parent template
self.parent = None
self.autoescape = autoescape
self._parse()
# compiled intermediate code.
self.intermediate = self._compile()
def _parse(self):
python_keywords = ['if', 'for', 'while', 'try', 'else', 'elif', 'except', 'finally']
indent = 0
block_stack = []
def block_stack_top():
return block_stack[-1] if block_stack else None
while not self.scanner.empty:
token = self.scanner.next_token()
if not token:
self.nodes.append(TextNode(self.scanner.remain, indent, block_stack_top()))
break
# get the pre-text before token.
if self.scanner.pretext:
self.nodes.append(TextNode(self.scanner.pretext, indent, block_stack_top()))
variable, endtag, tag, statement, keyword, suffix = token.groups()
if variable:
node_text = 'escape(str({0}))'.format(variable) if self.autoescape else variable
self.nodes.append(VariableNode(node_text, indent, block_stack_top()))
elif endtag:
if tag != 'block':
indent -= 1
continue
# block placeholder in parent template nodes
if not self.parent:
node_text = 'endblock%{0}'.format(block_stack_top())
self.nodes.append(KeyNode(node_text, indent, block_stack_top()))
block_stack.pop()
elif statement:
if keyword == 'include':
filename = re.sub(r'\'|\"', '', suffix)
nodes = Loader(self.path).load(filename).nodes
for node in nodes:
node.indent += indent
self.nodes.extend(nodes)
elif keyword == 'extends':
if self.nodes:
raise TemplateException('Template syntax error: extends tag must be '
'at the beginning of the file.')
filename = re.sub(r'\'|\"', '', suffix)
self.parent = Loader(self.path).load(filename)
elif keyword == 'block':
block_stack.append(suffix)
if not self.parent:
node_text = 'block%{0}'.format(suffix)
self.nodes.append(KeyNode(node_text, indent, block_stack_top()))
elif keyword in python_keywords:
node_text = '{0}:'.format(statement)
if keyword in ['else', 'elif', 'except', 'finally']:
key_indent = indent - 1
else:
key_indent = indent
indent += 1
self.nodes.append(KeyNode(node_text, key_indent, block_stack_top()))
else:
raise TemplateException('Invalid keyword: {0}.'.format(keyword))
else:
raise TemplateException('Template syntax error.')
def _compile(self):
block = {}
if self.parent:
generate_code = ''.join(node.generate() for node in self.parent.nodes)
pattern = re.compile(r'block%(?P<start_block>\w+)(?P<block_code>.*?)endblock%(?P<end_block>\w+)', re.S)
for node in self.nodes:
block.setdefault(node.block, []).append(node.generate())
for token in pattern.finditer(generate_code):
block_name = token.group('start_block')
if block_name != token.group('end_block'):
raise TemplateException('Template syntax error.')
block_code = ''.join(block[block_name]) if block_name in block.keys() else token.group('block_code')
generate_code = generate_code.replace(token.group(), block_code)
else:
generate_code = ''.join(node.generate() for node in self.nodes)
return compile(generate_code, '<string>', 'exec')
def render(self, **context):
# `context['_stdout']`: Compiled template source code
# which is a Python list, contain all the output
# statement of Python code.
context.update({'_stdout': [], 'escape': escape})
exec(self.intermediate, context)
return re.sub(r'(\s+\n)+', r'\n', ''.join(map(str, context['_stdout'])))
class LRUCache(object):
""" Simple LRU cache for template instance caching.
in fact, the OrderedDict in collections module or
@functools.lru_cache is working well too.
"""
def __init__(self, capacity):
self.capacity = capacity
self.cache = collections.OrderedDict()
def get(self, key):
""" Return -1 if catched KeyError exception."""
try:
value = self.cache.pop(key)
self.cache[key] = value
return value
except KeyError:
return -1
def set(self, key, value):
try:
self.cache.pop(key)
except KeyError:
if len(self.cache) >= self.capacity:
self.cache.popitem(last=False)
self.cache[key] = value
class Loader(object):
""" A template Loader which loads the environments of
main application, or just give the template system a root
directory to search the template files.
loader = template.Loader("home/to/root/of/templates/")
loader.load("index.html").render()
Loader class use a LRU cache system to cache the recently used
templates for performance consideration.
"""
def __init__(self, path='', engine=Template, cache_capacity=_CACHE_CAPACITY):
self.path = path
self.engine = engine
self.cache = LRUCache(capacity=cache_capacity)
def load(self, filename):
if not self.path.endswith(os.sep) and self.path != '':
self.path = self.path + os.sep
p = ''.join([self.path, filename])
cache_instance = self.cache.get(p)
if cache_instance != -1:
return cache_instance
if not os.path.isfile(p):
raise TemplateException('Template file {0} is not exist.'.format(p))
with open(p) as f:
self.cache.set(p, self.engine(f.read(), path=self.path))
return self.cache.get(p)
def escape(content):
""" Escapes a string's HTML. """
return content.replace('&', '&').replace('<', '<').replace('>', '>')\
.replace('"', '"').replace("'", ''')
| mit | 4,007,241,840,754,139,600 | 33.059659 | 116 | 0.534573 | false |
pablobesada/tw | frontend/gnip/genderclassifier.py | 1 | 1721 | import re
from pprint import pprint
from mongo import MongoManager
from datetime import datetime, timedelta
class GenderClassifier(object):
cached_names_database = {}
@classmethod
def getNamesDatabase(cls, **kwargs):
max_age = kwargs.get('max_age', timedelta(seconds=0))
if not max_age or not cls.cached_names_database or (datetime.now() - cls.cached_names_database['fetch_time'] > max_age):
namesdb = MongoManager.find("gender_names")
res = {}
for name in namesdb:
res[name["name"].lower()] = name["gender"]
cls.cached_names_database = {'data': res, 'fetch_time': datetime.now()}
return cls.cached_names_database['data']
@classmethod
def extractGender(cls, name):
#nname = re.sub(ur'[_]+', u' ', name, flags=re.UNICODE)
nname = re.sub(ur'[_\-\.]', u' ', name)
nname = re.sub(ur'[^\w ]+', u'', nname)
words = [w.lower() for w in name.split() if len(w) > 1]
names = cls.getNamesDatabase(max_age = timedelta(seconds=300)) #5 minutes
k = 100
M = 0
F = 0
for w in words:
g = names.get(w, "U")
if g == "M": M += k
elif g == "F": F += k
k -=1
if M+F == 0: return "U"
if M>F: return "M"
return "F"
if __name__ == "__main__":
print GenderClassifier.getNamesDatabase()
tweets = MongoManager.findTweets("tweets_g1", limit=40)
for t in tweets:
g = GenderClassifier.extractGender(t.getDisplayName())
print t.getDisplayName(), g
for n in ("pablo romina XX", "romina pablo"):
g = GenderClassifier.extractGender(n)
print n, g
| apache-2.0 | 3,422,839,984,736,192,500 | 31.471698 | 128 | 0.561302 | false |
iulian787/spack | var/spack/repos/builtin/packages/qt/package.py | 1 | 23696 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
from spack.operating_systems.mac_os import macos_version
import llnl.util.tty as tty
import itertools
import os
import sys
MACOS_VERSION = macos_version() if sys.platform == 'darwin' else None
class Qt(Package):
"""Qt is a comprehensive cross-platform C++ application framework."""
homepage = 'http://qt.io'
# Alternative location 'http://download.qt.io/official_releases/qt/'
url = 'http://download.qt.io/archive/qt/5.7/5.7.0/single/qt-everywhere-opensource-src-5.7.0.tar.gz'
list_url = 'http://download.qt.io/archive/qt/'
list_depth = 3
maintainers = ['sethrj']
phases = ['configure', 'build', 'install']
version('5.15.2', sha256='3a530d1b243b5dec00bc54937455471aaa3e56849d2593edb8ded07228202240')
version('5.14.2', sha256='c6fcd53c744df89e7d3223c02838a33309bd1c291fcb6f9341505fe99f7f19fa')
version('5.14.1', sha256='6f17f488f512b39c2feb57d83a5e0a13dcef32999bea2e2a8f832f54a29badb8')
version('5.14.0', sha256='be9a77cd4e1f9d70b58621d0753be19ea498e6b0da0398753e5038426f76a8ba')
version('5.13.1', sha256='adf00266dc38352a166a9739f1a24a1e36f1be9c04bf72e16e142a256436974e')
version('5.12.7', sha256='873783a0302129d98a8f63de9afe4520fb5f8d5316be8ad7b760c59875cd8a8d')
version('5.12.5', sha256='a2299e21db7767caf98242767bffb18a2a88a42fee2d6a393bedd234f8c91298')
version('5.12.2', sha256='59b8cb4e728450b21224dcaaa40eb25bafc5196b6988f2225c394c6b7f881ff5')
version('5.11.3', sha256='859417642713cee2493ee3646a7fee782c9f1db39e41d7bb1322bba0c5f0ff4d')
version('5.11.2', sha256='c6104b840b6caee596fa9a35bc5f57f67ed5a99d6a36497b6fe66f990a53ca81')
version('5.10.0', sha256='936d4cf5d577298f4f9fdb220e85b008ae321554a5fcd38072dc327a7296230e')
version('5.9.1', sha256='7b41a37d4fe5e120cdb7114862c0153f86c07abbec8db71500443d2ce0c89795')
version('5.9.0', sha256='f70b5c66161191489fc13c7b7eb69bf9df3881596b183e7f6d94305a39837517')
version('5.8.0', sha256='9dc5932307ae452855863f6405be1f7273d91173dcbe4257561676a599bd58d3')
version('5.7.1', sha256='c86684203be61ae7b33a6cf33c23ec377f246d697bd9fb737d16f0ad798f89b7')
version('5.7.0', sha256='4661905915d6265243e17fe59852930a229cf5b054ce5af5f48b34da9112ab5f')
version('5.5.1', sha256='c7fad41a009af1996b62ec494e438aedcb072b3234b2ad3eeea6e6b1f64be3b3')
version('5.4.2', sha256='cfc768c55f0a0cd232bed914a9022528f8f2e50cb010bf0e4f3f62db3dfa17bd')
version('5.4.0', sha256='1739633424bde3d89164ae6ff1c5c913be38b9997e451558ef873aac4bbc408a')
version('5.3.2', sha256='c8d3fd2ead30705c6673c5e4af6c6f3973346b4fb2bd6079c7be0943a5b0282d')
version('5.2.1', sha256='84e924181d4ad6db00239d87250cc89868484a14841f77fb85ab1f1dbdcd7da1')
version('4.8.7', sha256='e2882295097e47fe089f8ac741a95fef47e0a73a3f3cdf21b56990638f626ea0')
version('4.8.6', sha256='8b14dd91b52862e09b8e6a963507b74bc2580787d171feda197badfa7034032c')
version('4.8.5', sha256='eb728f8268831dc4373be6403b7dd5d5dde03c169ad6882f9a8cb560df6aa138')
version('3.3.8b', sha256='1b7a1ff62ec5a9cb7a388e2ba28fda6f960b27f27999482ebeceeadb72ac9f6e')
variant('debug', default=False,
description="Build debug version.")
variant('dbus', default=False,
description="Build with D-Bus support.")
variant('doc', default=False,
description="Build QDoc and documentation.")
variant('examples', default=False,
description="Build examples.")
variant('framework', default=bool(MACOS_VERSION),
description="Build as a macOS Framework package.")
variant('gtk', default=False,
description="Build with gtkplus.")
variant('gui', default=True,
description='Build the Qt GUI module and dependencies')
variant('opengl', default=False,
description="Build with OpenGL support.")
variant('phonon', default=False,
description="Build with phonon support.")
variant('shared', default=True,
description='Build shared libraries.')
variant('sql', default=True,
description="Build with SQL support.")
variant('ssl', default=True,
description="Build with OpenSSL support.")
variant('tools', default=True,
description="Build tools, including Qt Designer.")
variant('webkit', default=False,
description="Build the Webkit extension")
# Patches for qt@3
patch('qt3-accept.patch', when='@3')
patch('qt3-headers.patch', when='@3')
# Patches for qt@4
patch('qt4-configure-gcc.patch', when='@4:4.8.6 %gcc')
patch('qt4-87-configure-gcc.patch', when='@4.8.7 %gcc')
patch('qt4-tools.patch', when='@4+tools')
patch('qt4-mac.patch', when='@4.8.7 platform=darwin')
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=925811
patch("qt4-qforeach.patch", when="@4 %gcc@9:")
# Patches for qt@4:
# https://github.com/spack/spack/issues/1517
patch('qt4-pcre.patch', when='@4')
patch('qt5-pcre.patch', when='@5:')
# https://bugreports.qt.io/browse/QTBUG-74196
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=89585
patch('qt4-asm-volatile.patch', when='@4')
patch('qt5-asm-volatile.patch', when='@5.0.0:5.12.1')
# Patches for qt@5
# https://bugreports.qt.io/browse/QTBUG-74219
patch('qt5-btn_trigger_happy.patch', when='@5.7:5.12')
# https://bugreports.qt.io/browse/QTBUG-57656
patch('qt5-8-framework.patch', when='@5.8.0 +framework')
# https://bugreports.qt.io/browse/QTBUG-58038
patch('qt5-8-freetype.patch', when='@5.8.0 +gui')
# https://codereview.qt-project.org/c/qt/qtbase/+/245425
patch('https://github.com/qt/qtbase/commit/a52d7861edfb5956de38ba80015c4dd0b596259b.patch',
sha256='c49b228c27e3ad46ec3af4bac0e9985af5b5b28760f238422d32e14f98e49b1e',
working_dir='qtbase',
when='@5.10:5.12.0 %gcc@9:')
# https://github.com/Homebrew/homebrew-core/pull/5951
patch('qt5-restore-pc-files.patch', when='@5.9:5.11 platform=darwin')
# https://github.com/spack/spack/issues/14400
patch('qt5-11-intel-overflow.patch', when='@5.11 %intel')
patch('qt5-12-intel-overflow.patch', when='@5.12:5.14.0 %intel')
# https://bugreports.qt.io/browse/QTBUG-78937
patch('qt5-12-configure.patch', when='@5.12')
# https://bugreports.qt.io/browse/QTBUG-93402
patch('qt5-15-gcc-10.patch', when='@5.12.7:5.15 %gcc@8:')
patch('qt514.patch', when='@5.14')
conflicts('%gcc@10:', when='@5.9:5.12.6 +opengl')
# Build-only dependencies
depends_on("pkgconfig", type='build')
depends_on("flex", when='+webkit', type='build')
depends_on("bison", when='+webkit', type='build')
depends_on("python", when='@5.7.0:', type='build')
# Dependencies, then variant- and version-specific dependencies
depends_on("icu4c")
depends_on("jpeg")
depends_on("libmng")
depends_on("libtiff")
depends_on("libxml2")
depends_on("zlib")
depends_on("freetype", when='+gui')
depends_on("gperf", when='+webkit')
depends_on("gtkplus", when='+gtk')
depends_on("openssl", when='+ssl')
depends_on("sqlite+column_metadata", when='+sql', type=('build', 'run'))
depends_on("[email protected]", when='@3')
depends_on("libsm", when='@3')
depends_on("pcre+multibyte", when='@5.0:5.8')
depends_on("inputproto", when='@:5.8')
depends_on("openssl@:1.0.999", when='@4:5.9+ssl')
depends_on("glib", when='@4:')
depends_on("libpng", when='@4:')
depends_on("dbus", when='@4:+dbus')
depends_on("gl", when='@4:+opengl')
depends_on("harfbuzz", when='@5:')
depends_on("double-conversion", when='@5.7:')
depends_on("pcre2+multibyte", when='@5.9:')
depends_on("llvm", when='@5.11: +doc')
# gcc@4 is not supported as of [email protected]
# https://doc.qt.io/qt-5.14/supported-platforms.html
conflicts('%gcc@:4.99', when='@5.14:')
# Non-macOS dependencies and special macOS constraints
if MACOS_VERSION is None:
depends_on("fontconfig", when='+gui')
depends_on("libsm", when='+gui')
depends_on("libx11", when='+gui')
depends_on("libxcb", when='+gui')
depends_on("libxkbcommon", when='+gui')
depends_on("xcb-util-image", when='+gui')
depends_on("xcb-util-keysyms", when='+gui')
depends_on("xcb-util-renderutil", when='+gui')
depends_on("xcb-util-wm", when='+gui')
depends_on("libxext", when='+gui')
depends_on("libxrender", when='+gui')
conflicts('+framework',
msg="QT cannot be built as a framework except on macOS.")
else:
conflicts('platform=darwin', when='@4.8.6',
msg="QT 4 for macOS is only patched for 4.8.7")
use_xcode = True
# Mapping for compilers/systems in the QT 'mkspecs'
compiler_mapping = {'intel': ('icc',),
'apple-clang': ('clang-libc++', 'clang'),
'clang': ('clang-libc++', 'clang'),
'gcc': ('g++',)}
platform_mapping = {'darwin': 'macx'}
def url_for_version(self, version):
# URL keeps getting more complicated with every release
url = self.list_url
if version >= Version('4.0'):
url += str(version.up_to(2)) + '/'
else:
url += str(version.up_to(1)) + '/'
if version >= Version('4.8'):
url += str(version) + '/'
if version >= Version('5'):
url += 'single/'
url += 'qt-'
if version >= Version('4.6'):
url += 'everywhere-'
elif version >= Version('2.1'):
url += 'x11-'
if version >= Version('5.10.0'):
url += 'src-'
elif version >= Version('4.0'):
url += 'opensource-src-'
elif version >= Version('3'):
url += 'free-'
# 5.9 only has xz format. From 5.2.1 -> 5.8.0 .gz or .xz were possible
if version >= Version('5.9'):
url += str(version) + '.tar.xz'
else:
url += str(version) + '.tar.gz'
return url
def setup_build_environment(self, env):
env.set('MAKEFLAGS', '-j{0}'.format(make_jobs))
if self.version >= Version('5.11'):
# QDoc uses LLVM as of 5.11; remove the LLVM_INSTALL_DIR to
# disable
try:
llvm_path = self.spec['llvm'].prefix
except KeyError:
# Prevent possibly incompatible system LLVM from being found
llvm_path = "/spack-disable-llvm"
env.set('LLVM_INSTALL_DIR', llvm_path)
def setup_run_environment(self, env):
env.set('QTDIR', self.prefix)
def setup_dependent_build_environment(self, env, dependent_spec):
env.set('QTDIR', self.prefix)
def setup_dependent_package(self, module, dependent_spec):
module.qmake = Executable(join_path(self.spec.prefix.bin, 'qmake'))
def get_mkspec(self):
"""Determine the mkspecs root directory and QT platform.
"""
spec = self.spec
cname = spec.compiler.name
pname = spec.architecture.platform
# Transform spack compiler name to a list of possible QT compilers
cnames = self.compiler_mapping.get(cname, [cname])
# Transform platform name to match those in QT
pname = self.platform_mapping.get(pname, pname)
qtplat = None
mkspec_dir = 'qtbase/mkspecs' if spec.satisfies('@5:') else 'mkspecs'
for subdir, cname in itertools.product(('', 'unsupported/'), cnames):
platdirname = "".join([subdir, pname, "-", cname])
tty.debug("Checking for platform '{0}' in {1}".format(
platdirname, mkspec_dir))
if os.path.exists(os.path.join(mkspec_dir, platdirname)):
qtplat = platdirname
break
else:
tty.warn("No matching QT platform was found in {0} "
"for platform '{1}' and compiler {2}".format(
mkspec_dir, pname, ",".join(cnames)))
return (mkspec_dir, qtplat)
@when('@4 platform=darwin')
def patch(self):
ogl = self.spec['opengl'] if '+opengl' in self.spec else None
deployment_target = str(MACOS_VERSION.up_to(2))
patches = {
'MACOSX_DEPLOYMENT_TARGET': deployment_target,
'PREFIX': self.prefix,
'OPENGL_INCDIR': ogl.prefix.include if ogl else "",
'OPENGL_LIBS': ogl.libs.ld_flags if ogl else "",
}
def repl(match):
# Replace the original config variable value with the one chosen
# here if it is mentioned in 'patches'; otherwise return the
# original value.
return patches.get(match.group(1), match.group(0))
files_to_filter = [
"configure",
"mkspecs/common/mac.conf",
"mkspecs/common/unix.conf",
"mkspecs/common/gcc-base-macx.conf",
"mkspecs/common/gcc-base.conf",
"qmake/generators/unix/unixmake.cpp",
"qmake/qmake.pri",
"src/tools/bootstrap/bootstrap.pro"
]
if '%clang' in self.spec or '%apple-clang' in self.spec:
files_to_filter += [
"mkspecs/unsupported/macx-clang-libc++/qmake.conf",
"mkspecs/common/clang.conf"
]
elif '%gcc' in self.spec:
files_to_filter += [
"mkspecs/common/g++-macx.conf",
"mkspecs/darwin-g++/qmake.conf"
]
# Filter inserted configure variables
filter_file(r'@([a-zA-Z0-9_]+)@', repl, *files_to_filter)
# Remove debug build
files_to_filter = [
"src/3rdparty/webkit/Source/WebKit.pri",
"src/3rdparty/webkit/Source/WebKit/qt/declarative/declarative.pro",
"src/imports/qimportbase.pri",
"src/plugins/qpluginbase.pri",
"src/qbase.pri",
"tools/designer/src/components/lib/lib.pro",
"tools/designer/src/lib/lib.pro",
"tools/designer/src/plugins/activeqt/activeqt.pro",
"tools/designer/src/plugins/plugins.pri",
"tools/designer/src/uitools/uitools.pro",
]
filter_file(r'(\+=.*)debug_and_release', r'\1', *files_to_filter)
@when('@4: %gcc') # *NOT* darwin/mac gcc
def patch(self):
(mkspec_dir, platform) = self.get_mkspec()
def conf(name):
return os.path.join(mkspec_dir, 'common', name + '.conf')
# Fix qmake compilers in the default mkspec
filter_file('^QMAKE_CC .*', 'QMAKE_CC = cc', conf('g++-base'))
filter_file('^QMAKE_CXX .*', 'QMAKE_CXX = c++', conf('g++-base'))
# Don't error out on undefined symbols
filter_file('^QMAKE_LFLAGS_NOUNDEF .*', 'QMAKE_LFLAGS_NOUNDEF = ',
conf('g++-unix'))
if self.spec.satisfies('@4'):
# The gnu98 flag is necessary to build with GCC 6 and other modern
# compilers (see http://stackoverflow.com/questions/10354371/);
# be permissive because of the abundance of older code, and hide
# all warnings because there are so many of them with newer
# compilers
with open(conf('gcc-base'), 'a') as f:
f.write("QMAKE_CXXFLAGS += -std=gnu++98 -fpermissive -w\n")
@when('@4: %intel')
def patch(self):
(mkspec_dir, platform) = self.get_mkspec()
conf_file = os.path.join(mkspec_dir, platform, "qmake.conf")
# Intel's `ar` equivalent might not be in the path: replace it with
# explicit
xiar = os.path.join(os.path.dirname(self.compiler.cc), 'xiar')
filter_file(r'\bxiar\b', xiar, conf_file)
if self.spec.satisfies('@4'):
with open(conf_file, 'a') as f:
f.write("QMAKE_CXXFLAGS += -std=gnu++98\n")
@when('@4 %clang')
def patch(self):
(mkspec_dir, platform) = self.get_mkspec()
conf_file = os.path.join(mkspec_dir, platform, "qmake.conf")
with open(conf_file, 'a') as f:
f.write("QMAKE_CXXFLAGS += -std=gnu++98\n")
@property
def common_config_args(self):
spec = self.spec
version = self.version
# incomplete list is here http://doc.qt.io/qt-5/configure-options.html
config_args = [
'-prefix', self.prefix,
'-v',
'-opensource',
'-{0}opengl'.format('' if '+opengl' in spec else 'no-'),
'-{0}'.format('debug' if '+debug' in spec else 'release'),
'-confirm-license',
'-optimized-qmake',
'-no-pch',
]
def use_spack_dep(spack_pkg, qt_name=None):
pkg = spec[spack_pkg]
config_args.append('-system-' + (qt_name or spack_pkg))
if not pkg.external:
config_args.extend(pkg.libs.search_flags.split())
config_args.extend(pkg.headers.include_flags.split())
if '+gui' in spec:
use_spack_dep('freetype')
if not MACOS_VERSION:
config_args.append('-fontconfig')
elif version < Version('5.15'):
# Linux-only QT5 dependencies
config_args.append('-system-xcb')
else:
config_args.append('-no-freetype')
config_args.append('-no-gui')
if '+ssl' in spec:
pkg = spec['openssl']
config_args.append('-openssl-linked')
config_args.extend(pkg.libs.search_flags.split())
config_args.extend(pkg.headers.include_flags.split())
else:
config_args.append('-no-openssl')
if '+sql' in spec:
use_spack_dep('sqlite')
else:
comps = ['db2', 'ibase', 'oci', 'tds', 'mysql', 'odbc', 'psql',
'sqlite', 'sqlite2']
config_args.extend("-no-sql-" + component for component in comps)
if '+shared' in spec:
config_args.append('-shared')
else:
config_args.append('-static')
if version >= Version('5'):
use_spack_dep('pcre' if spec.satisfies('@5.0:5.8') else 'pcre2',
'pcre')
use_spack_dep('harfbuzz')
if version >= Version('5.7'):
use_spack_dep('double-conversion', 'doubleconversion')
if version <= Version('5.7.1'):
config_args.append('-no-openvg')
else:
# FIXME: those could work for other versions
use_spack_dep('libpng')
use_spack_dep('jpeg', 'libjpeg')
use_spack_dep('zlib')
if '@:5.7.0' in spec:
config_args.extend([
# NIS is deprecated in more recent glibc,
# but qt-5.7.1 does not recognize this option
'-no-nis',
])
# COMPONENTS
if '~examples' in spec:
config_args.extend(['-nomake', 'examples'])
if '~tools' in spec:
config_args.extend(['-nomake', 'tools'])
if '+dbus' in spec:
dbus = spec['dbus'].prefix
config_args.append('-dbus-linked')
config_args.append('-I%s/dbus-1.0/include' % dbus.lib)
config_args.append('-I%s/dbus-1.0' % dbus.include)
config_args.append('-L%s' % dbus.lib)
else:
config_args.append('-no-dbus')
if MACOS_VERSION:
config_args.append('-{0}framework'.format(
'' if '+framework' in spec else 'no-'))
(_, qtplat) = self.get_mkspec()
if qtplat is not None:
config_args.extend(['-platform', qtplat])
return config_args
@when('@3')
def configure(self, spec, prefix):
# A user reported that this was necessary to link Qt3 on ubuntu.
# However, if LD_LIBRARY_PATH is not set the qt build fails, check
# and set LD_LIBRARY_PATH if not set, update if it is set.
if os.environ.get('LD_LIBRARY_PATH'):
os.environ['LD_LIBRARY_PATH'] += os.pathsep + os.getcwd() + '/lib'
else:
os.environ['LD_LIBRARY_PATH'] = os.pathsep + os.getcwd() + '/lib'
configure('-prefix', prefix,
'-v',
'-thread',
'-shared',
'-release',
'-fast')
@when('@4')
def configure(self, spec, prefix):
config_args = self.common_config_args
config_args.extend([
'-fast',
'-no-declarative-debug',
'-{0}gtkstyle'.format('' if '+gtk' in spec else 'no-'),
'-{0}webkit'.format('' if '+webkit' in spec else 'no-'),
'-{0}phonon'.format('' if '+phonon' in spec else 'no-'),
'-arch', str(spec.target.family),
'-xmlpatterns',
])
# Disable phonon backend until gstreamer is setup as dependency
if '+phonon' in self.spec:
config_args.append('-no-phonon-backend')
if '~examples' in self.spec:
config_args.extend(['-nomake', 'demos'])
if MACOS_VERSION:
sdkpath = which('xcrun')('--show-sdk-path', output=str).strip()
config_args.extend([
'-cocoa',
'-sdk', sdkpath])
configure(*config_args)
@when('@5')
def configure(self, spec, prefix):
config_args = self.common_config_args
version = self.version
config_args.extend([
'-no-eglfs',
'-no-directfb',
'-{0}gtk{1}'.format(
'' if '+gtk' in spec else 'no-',
'' if version >= Version('5.7') else 'style')
])
if MACOS_VERSION:
config_args.extend([
'-no-xcb-xlib',
'-no-pulseaudio',
'-no-alsa',
])
if version < Version('5.12'):
config_args.append('-no-xinput2')
if '~webkit' in spec:
config_args.extend([
'-skip',
'webengine' if version >= Version('5.7') else 'qtwebkit',
])
if spec.satisfies('@5.7'):
config_args.extend(['-skip', 'virtualkeyboard'])
if version >= Version('5.8'):
# relies on a system installed wayland, i.e. no spack package yet
# https://wayland.freedesktop.org/ubuntu16.04.html
# https://wiki.qt.io/QtWayland
config_args.extend(['-skip', 'wayland'])
if '~opengl' in spec:
if version >= Version('5.10'):
config_args.extend([
'-skip', 'webglplugin',
'-skip', 'qt3d',
])
if version >= Version('5.14'):
config_args.extend(['-skip', 'qtquick3d'])
if version >= Version('5.15'):
config_args.extend(['-skip', 'qtlocation'])
configure(*config_args)
def build(self, spec, prefix):
make()
def install(self, spec, prefix):
make("install")
# Documentation generation requires the doc tools to be installed.
# @when @run_after currently seems to ignore the 'when' restriction.
@run_after('install')
def install_docs(self):
if '+doc' in self.spec:
make('docs')
make('install_docs')
| lgpl-2.1 | -1,463,404,528,243,253,200 | 38.625418 | 108 | 0.579887 | false |
arunkgupta/gramps | gramps/plugins/import/importgeneweb.py | 1 | 36597 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2006 Martin Hawlisch, Donald N. Allingham
# Copyright (C) 2008 Brian G. Matherly
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"Import from GeneWeb"
#-------------------------------------------------------------------------
#
# standard python modules
#
#-------------------------------------------------------------------------
import re
import time
from gramps.gen.ggettext import gettext as _
from gramps.gen.ggettext import ngettext
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
LOG = logging.getLogger(".ImportGeneWeb")
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from gramps.gen.errors import GedcomError
from gramps.gen.lib import Attribute, AttributeType, ChildRef, Citation, Date, Event, EventRef, EventRoleType, EventType, Family, FamilyRelType, Name, NameType, Note, Person, PersonRef, Place, Source
from gramps.gen.db import DbTxn
from htmlentitydefs import name2codepoint
_date_parse = re.compile('([kmes~?<>]+)?([0-9/]+)([J|H|F])?(\.\.)?([0-9/]+)?([J|H|F])?')
_text_parse = re.compile('0\((.*)\)')
_mod_map = {
'>' : Date.MOD_AFTER,
'<' : Date.MOD_BEFORE,
'~' : Date.MOD_ABOUT,
}
_cal_map = {
'J' : Date.CAL_JULIAN,
'H' : Date.CAL_HEBREW,
'F' : Date.CAL_FRENCH,
}
#-------------------------------------------------------------------------
#
#
#
#-------------------------------------------------------------------------
def importData(database, filename, user):
global callback
try:
g = GeneWebParser(database,filename)
except IOError,msg:
user.notify_error(_("%s could not be opened\n") % filename,str(msg))
return
try:
status = g.parse_geneweb_file()
except IOError,msg:
errmsg = _("%s could not be opened\n") % filename
user.notify_error(errmsg,str(msg))
return
#-------------------------------------------------------------------------
# For a description of the file format see
# http://cristal.inria.fr/~ddr/GeneWeb/en/gwformat.htm
#-------------------------------------------------------------------------
class GeneWebParser(object):
def __init__(self, dbase, file):
self.db = dbase
self.f = open(file,"rU")
self.filename = file
self.encoding = 'iso-8859-1'
def get_next_line(self):
self.lineno += 1
line = self.f.readline()
if line:
try:
line = unicode(line.strip())
except UnicodeDecodeError:
line = unicode(line.strip(),self.encoding)
else:
line = None
return line
def parse_geneweb_file(self):
with DbTxn(_("GeneWeb import"), self.db, batch=True) as self.trans:
self.db.disable_signals()
t = time.time()
self.lineno = 0
self.index = 0
self.fam_count = 0
self.indi_count = 0
self.fkeys = []
self.ikeys = {}
self.pkeys = {}
self.skeys = {}
self.current_mode = None
self.current_family = None
self.current_husband_handle = None
self.current_child_birthplace_handle = None
self.current_child_source_handle = None
try:
while 1:
line = self.get_next_line()
if line is None:
break
if line == "":
continue
fields = line.split(" ")
LOG.debug("LINE: %s" %line)
if fields[0] == "fam":
self.current_mode = "fam"
self.read_family_line(line,fields)
elif fields[0] == "rel":
self.current_mode = "rel"
self.read_relationship_person(line,fields)
elif fields[0] == "src":
self.read_source_line(line,fields)
elif fields[0] in ("wit", "wit:"):
self.read_witness_line(line,fields)
elif fields[0] == "cbp":
self.read_children_birthplace_line(line,fields)
elif fields[0] == "csrc":
self.read_children_source_line(line,fields)
elif fields[0] == "beg" and self.current_mode == "fam":
self.read_children_lines()
elif fields[0] == "beg" and self.current_mode == "rel":
self.read_relation_lines()
elif fields[0] == "comm":
self.read_family_comment(line,fields)
elif fields[0] == "notes":
self.read_person_notes_lines(line,fields)
elif fields[0] == "notes-db":
self.read_database_notes_lines(line,fields)
elif fields[0] == "end":
self.current_mode = None
elif fields[0] == "encoding:":
self.encoding = fields[1]
else:
LOG.warn("parse_geneweb_file(): Token >%s< unknown. line %d skipped: %s" %
(fields[0],self.lineno,line))
except GedcomError, err:
self.errmsg(str(err))
t = time.time() - t
msg = ngettext('Import Complete: %d second','Import Complete: %d seconds', t ) % t
self.db.enable_signals()
self.db.request_rebuild()
LOG.debug(msg)
LOG.debug("Families: %d" % len(self.fkeys))
LOG.debug("Individuals: %d" % len(self.ikeys))
return None
def read_family_line(self,line,fields):
self.current_husband_handle = None
self.current_child_birthplace_handle = None
self.current_child_source_handle = None
self.current_family = Family()
self.db.add_family(self.current_family,self.trans)
#self.db.commit_family(self.current_family,self.trans)
self.fkeys.append(self.current_family.get_handle())
idx = 1;
LOG.debug("\nHusband:")
(idx, husband) = self.parse_person(fields,idx,Person.MALE,None)
if husband:
self.current_husband_handle = husband.get_handle()
self.current_family.set_father_handle(husband.get_handle())
self.db.commit_family(self.current_family,self.trans)
husband.add_family_handle(self.current_family.get_handle())
self.db.commit_person(husband,self.trans)
LOG.debug("Marriage:")
idx = self.parse_marriage(fields,idx)
LOG.debug("Wife:")
(idx,wife) = self.parse_person(fields,idx,Person.FEMALE,None)
if wife:
self.current_family.set_mother_handle(wife.get_handle())
self.db.commit_family(self.current_family,self.trans)
wife.add_family_handle(self.current_family.get_handle())
self.db.commit_person(wife,self.trans)
return None
def read_relationship_person(self,line,fields):
LOG.debug("\Relationships:")
(idx,person) = self.parse_person(fields,1,Person.UNKNOWN,None)
if person:
self.current_relationship_person_handle = person.get_handle()
def read_relation_lines(self):
if not self.current_relationship_person_handle:
LOG.warn("Unknown person for relationship in line %d!" % self.lineno)
return None
rel_person = self.db.get_person_from_handle(self.current_relationship_person_handle)
while 1:
line = self.get_next_line()
if line is None or line == "end":
break
if line == "":
continue
# match relationship type and related person
line_re = re.compile("^- ([^:]+): (.*)$")
matches = line_re.match(line)
if matches:
#split related person into fields
fields = matches.groups()[1].split(" ")
if fields:
(idx,asso_p) = self.parse_person(fields,0,Person.UNKNOWN,None)
pref = PersonRef()
pref.set_relation(matches.groups()[0])
LOG.warn("TODO: Handle association types properly")
pref.set_reference_handle(asso_p.get_handle())
rel_person.add_person_ref(pref)
self.db.commit_person(rel_person,self.trans)
else:
LOG.warn("Invalid name of person in line %d" % self.lineno)
else:
LOG.warn("Invalid relationship in line %d" % self.lineno)
break
self.current_mode = None
return None
def read_source_line(self,line,fields):
if not self.current_family:
LOG.warn("Unknown family of child in line %d!" % self.lineno)
return None
source = self.get_or_create_source(self.decode(fields[1]))
self.current_family.add_citation(source.get_handle())
self.db.commit_family(self.current_family,self.trans)
return None
def read_witness_line(self,line,fields):
LOG.debug("Witness:")
if fields[1] == "m:":
(idx,wit_p) = self.parse_person(fields,2,Person.MALE,None)
elif fields[1] == "f:":
(idx,wit_p) = self.parse_person(fields,2,Person.FEMALE,None)
else:
(idx,wit_p) = self.parse_person(fields,1,None,None)
if wit_p:
mev = None
# search marriage event
for evr in self.current_family.get_event_ref_list():
ev = self.db.get_event_from_handle(evr.get_reference_handle())
if ev.get_type() == EventType.MARRIAGE:
mev = ev # found.
if not mev: # No marriage event found create a new one
mev = self.create_event(EventType.MARRIAGE, None, None, None, None)
mar_ref = EventRef()
mar_ref.set_reference_handle(mev.get_handle())
self.current_family.add_event_ref(mar_ref)
wit_ref = EventRef()
wit_ref.set_role(EventRoleType(EventRoleType.WITNESS))
wit_ref.set_reference_handle(mev.get_handle())
wit_p.add_event_ref(wit_ref)
self.db.commit_person(wit_p,self.trans)
return None
def read_children_lines(self):
father_surname = "Dummy"
if not self.current_husband_handle:
LOG.warn("Unknown father for child in line %d!" % self.lineno)
return None
husb = self.db.get_person_from_handle(self.current_husband_handle)
father_surname = husb.get_primary_name().get_surname()
if not self.current_family:
LOG.warn("Unknown family of child in line %d!" % self.lineno)
return None
while 1:
line = self.get_next_line()
if line is None:
break
if line == "":
continue
fields = line.split(" ")
if fields[0] == "-":
LOG.debug("Child:")
child = None
if fields[1] == "h":
(idx,child) = self.parse_person(fields,2,Person.MALE,father_surname)
elif fields[1] == "f":
(idx,child) = self.parse_person(fields,2,Person.FEMALE,father_surname)
else:
(idx,child) = self.parse_person(fields,1,Person.UNKNOWN,father_surname)
if child:
childref = ChildRef()
childref.set_reference_handle(child.get_handle())
self.current_family.add_child_ref( childref)
self.db.commit_family(self.current_family,self.trans)
child.add_parent_family_handle( self.current_family.get_handle())
if self.current_child_birthplace_handle:
birth = None
birth_ref = child.get_birth_ref()
if birth_ref:
birth = self.db.get_event_from_handle(birth_ref.ref)
if not birth:
birth = self.create_event(EventType.BIRTH)
birth_ref = EventRef()
birth_ref.set_reference_handle(birth.get_handle())
child.set_birth_ref(birth_ref)
birth.set_place_handle(self.current_child_birthplace_handle)
self.db.commit_event(birth,self.trans)
if self.current_child_source_handle:
child.add_citation(self.current_child_source_handle)
self.db.commit_person(child,self.trans)
else:
break
self.current_mode = None
return None
def read_children_birthplace_line(self,line,fields):
cbp = self.get_or_create_place(self.decode(fields[1]))
if cbp:
self.current_child_birthplace_handle = cbp.get_handle()
return None
def read_children_source_line(self,line,fields):
csrc = self.get_or_create_source(self.decode(fields[1]))
self.current_child_source_handle = csrc
return None
def read_family_comment(self,line,fields):
if not self.current_family:
LOG.warn("Unknown family of child in line %d!" % self.lineno)
return None
n = Note()
n.set(line)
self.db.add_note(n,self.trans)
self.current_family.add_note(n.handle)
self.db.commit_family(self.current_family,self.trans)
return None
def _read_notes_lines(self, note_tag):
note_txt = ""
while True:
line = self.get_next_line()
if line is None:
break
fields = line.split(" ")
if fields[0] == "end" and fields[1] == note_tag:
break
elif fields[0] == "beg":
continue
else:
if note_txt:
note_txt = note_txt + "\n" + line
else:
note_txt = note_txt + line
if note_txt:
n = Note()
n.set(note_txt)
self.db.add_note(n,self.trans)
return n.handle
return None
def read_person_notes_lines(self,line,fields):
(idx,person) = self.parse_person(fields,1,None,None)
note_handle = self._read_notes_lines( fields[0])
if note_handle:
person.add_note(note_handle)
self.db.commit_person(person,self.trans)
def read_database_notes_lines(self,line,fields):
note_handle = self._read_notes_lines( fields[0])
def parse_marriage(self,fields,idx):
mariageDataRe = re.compile("^[+#-0-9].*$")
mar_date = None
mar_place = None
mar_source = None
sep_date = None
div_date = None
married = 1
engaged = 0
# skip to marriage date in case person contained unmatches tokens
#Alex: this failed when fields[idx] was an empty line. Fixed.
#while idx < len(fields) and not fields[idx][0] == "+":
while idx < len(fields) and not (fields[idx] and fields[idx][0] == "+"):
if fields[idx]:
LOG.warn(("parse_marriage(): Unknown field: " +
"'%s' in line %d!") % (fields[idx], self.lineno))
idx += 1
while idx < len(fields) and mariageDataRe.match(fields[idx]):
field = fields[idx]
idx += 1
if field.startswith("+"):
field = field[1:]
mar_date = self.parse_date(self.decode(field))
LOG.debug(" Married at: %s" % field)
elif field.startswith("-"):
field = field[1:]
div_date = self.parse_date(self.decode(field))
LOG.debug(" Div at: %s" % field)
elif field == "#mp" and idx < len(fields):
mar_place = self.get_or_create_place(self.decode(fields[idx]))
LOG.debug(" Marriage place: %s" % fields[idx])
idx += 1
elif field == "#ms" and idx < len(fields):
mar_source = self.get_or_create_source(self.decode(fields[idx]))
LOG.debug(" Marriage source: %s" % fields[idx])
idx += 1
elif field == "#sep" and idx < len(fields):
sep_date = self.parse_date(self.decode(fields[idx]))
LOG.debug(" Seperated since: %s" % fields[idx])
idx += 1
elif field == "#nm":
LOG.debug(" Are not married.")
married = 0
elif field == "#noment":
LOG.debug(" Not mentioned.")
elif field == "#eng":
LOG.debug(" Are engaged.")
engaged = 1
else:
LOG.warn(("parse_marriage(): Unknown field " +
"'%s'for mariage in line %d!") % (field, self.lineno))
if mar_date or mar_place or mar_source:
mar = self.create_event(
EventType.MARRIAGE, None, mar_date, mar_place, mar_source)
mar_ref = EventRef()
mar_ref.set_reference_handle(mar.get_handle())
mar_ref.set_role(EventRoleType.FAMILY)
self.current_family.add_event_ref(mar_ref)
self.current_family.set_relationship(
FamilyRelType(FamilyRelType.MARRIED))
if div_date:
div = self.create_event(
EventType.DIVORCE, None, div_date, None, None)
div_ref = EventRef()
div_ref.set_reference_handle(div.get_handle())
div_ref.set_role(EventRoleType.FAMILY)
self.current_family.add_event_ref(div_ref)
if sep_date or engaged:
sep = self.create_event(
EventType.ENGAGEMENT, None, sep_date, None, None)
sep_ref = EventRef()
sep_ref.set_reference_handle(sep.get_handle())
sep_ref.set_role(EventRoleType.FAMILY)
self.current_family.add_event_ref(sep_ref)
if not married:
self.current_family.set_relationship(
FamilyRelType(FamilyRelType.UNMARRIED))
self.db.commit_family(self.current_family,self.trans)
return idx
def parse_person(self,fields,idx,gender,father_surname):
if not father_surname:
if not idx < len(fields):
LOG.warn("Missing surname of person in line %d!" % self.lineno)
surname =""
else:
surname = self.decode(fields[idx])
idx += 1
else:
surname = father_surname
if not idx < len(fields):
LOG.warn("Missing firstname of person in line %d!" % self.lineno)
firstname = ""
else:
firstname = self.decode(fields[idx])
idx += 1
if idx < len(fields) and father_surname:
noSurnameRe = re.compile("^[({\[~><?0-9#].*$")
if not noSurnameRe.match(fields[idx]):
surname = self.decode(fields[idx])
idx += 1
LOG.debug("Person: %s %s" % (firstname, surname))
person = self.get_or_create_person(firstname,surname)
name = Name()
name.set_type( NameType(NameType.BIRTH))
name.set_first_name(firstname)
surname_obj = name.get_primary_surname()
surname_obj.set_surname(surname)
person.set_primary_name(name)
if person.get_gender() == Person.UNKNOWN and gender is not None:
person.set_gender(gender)
self.db.commit_person(person,self.trans)
personDataRe = re.compile("^[kmes0-9<>~#\[({!].*$")
dateRe = re.compile("^[kmes0-9~<>?]+.*$")
source = None
birth_parsed = False
birth_date = None
birth_place = None
birth_source = None
bapt_date = None
bapt_place = None
bapt_source = None
death_date = None
death_place = None
death_source = None
death_cause = None
crem_date = None
bur_date = None
bur_place = None
bur_source = None
public_name = None
firstname_aliases = []
nick_names = []
name_aliases = []
surname_aliases = []
while idx < len(fields) and personDataRe.match(fields[idx]):
field = fields[idx]
idx += 1
if field.startswith('('):
LOG.debug("Public Name: %s" % field)
public_name = self.decode(field[1:-1])
elif field.startswith('{'):
LOG.debug("Firstsname Alias: %s" % field)
firstname_aliases.append(self.decode(field[1:-1]))
elif field.startswith('['):
LOG.debug("Title: %s" % field)
titleparts = self.decode(field[1:-1]).split(":")
tname = ttitle = tplace = tstart = tend = tnth = None
try:
tname = titleparts[0]
ttitle = titleparts[1]
if titleparts[2]:
tplace = self.get_or_create_place(titleparts[2])
tstart = self.parse_date(titleparts[3])
tend = self.parse_date(titleparts[4])
tnth = titleparts[5]
except IndexError: # not all parts are written all the time
pass
if tnth: # Append title numer to title
ttitle += ", " + tnth
title = self.create_event(
EventType.NOB_TITLE, ttitle, tstart, tplace)
# TODO: Geneweb has a start date and an end date, and therefore
# supports stuff like: FROM about 1955 TO between 1998 and 1999
# gramps only supports one single date or range.
if tname and tname != "*":
n = Note()
n.set(tname)
self.db.add_note(n,self.trans)
title.add_note( n.handle)
title_ref = EventRef()
title_ref.set_reference_handle(title.get_handle())
person.add_event_ref(title_ref)
elif field == '#nick' and idx < len(fields):
LOG.debug("Nick Name: %s" % fields[idx])
nick_names.append(self.decode(fields[idx]))
idx += 1
elif field == '#occu' and idx < len(fields):
LOG.debug("Occupation: %s" % fields[idx])
occu = self.create_event(
EventType.OCCUPATION, self.decode(fields[idx]))
occu_ref = EventRef()
occu_ref.set_reference_handle(occu.get_handle())
person.add_event_ref(occu_ref)
idx += 1
elif field == '#alias' and idx < len(fields):
LOG.debug("Name Alias: %s" % fields[idx])
name_aliases.append(self.decode(fields[idx]))
idx += 1
elif field == '#salias' and idx < len(fields):
LOG.debug("Surname Alias: %s" % fields[idx])
surname_aliases.append(self.decode(fields[idx]))
idx += 1
elif field == '#image' and idx < len(fields):
LOG.debug("Image: %s" % fields[idx])
idx += 1
elif field == '#src' and idx < len(fields):
LOG.debug("Source: %s" % fields[idx])
source = self.get_or_create_source(self.decode(fields[idx]))
idx += 1
elif field == '#bs' and idx < len(fields):
LOG.debug("Birth Source: %s" % fields[idx])
birth_source = self.get_or_create_source(self.decode(fields[idx]))
idx += 1
elif field[0] == '!':
LOG.debug("Baptize at: %s" % fields[idx])
bapt_date = self.parse_date(self.decode(fields[idx][1:]))
idx += 1
elif field == '#bp' and idx < len(fields):
LOG.debug("Birth Place: %s" % fields[idx])
birth_place = self.get_or_create_place(self.decode(fields[idx]))
idx += 1
elif field == '#pp' and idx < len(fields):
LOG.debug("Baptize Place: %s" % fields[idx])
bapt_place = self.get_or_create_place(self.decode(fields[idx]))
idx += 1
elif field == '#ps' and idx < len(fields):
LOG.debug("Baptize Source: %s" % fields[idx])
bapt_source = self.get_or_create_source(self.decode(fields[idx]))
idx += 1
elif field == '#dp' and idx < len(fields):
LOG.debug("Death Place: %s" % fields[idx])
death_place = self.get_or_create_place(self.decode(fields[idx]))
idx += 1
elif field == '#ds' and idx < len(fields):
LOG.debug("Death Source: %s" % fields[idx])
death_source = self.get_or_create_source(self.decode(fields[idx]))
idx += 1
elif field == '#buri' and idx < len(fields):
LOG.debug("Burial Date: %s" % fields[idx])
bur_date = self.parse_date(self.decode(fields[idx]))
idx += 1
elif field == '#crem' and idx < len(fields):
LOG.debug("Cremention Date: %s" % fields[idx])
crem_date = self.parse_date(self.decode(fields[idx]))
idx += 1
elif field == '#rp' and idx < len(fields):
LOG.debug("Burial Place: %s" % fields[idx])
bur_place = self.get_or_create_place(self.decode(fields[idx]))
idx += 1
elif field == '#rs' and idx < len(fields):
LOG.debug("Burial Source: %s" % fields[idx])
bur_source = self.get_or_create_source(self.decode(fields[idx]))
idx += 1
elif field == '#apubl':
LOG.debug("This is a public record")
elif field == '#apriv':
LOG.debug("This is a private record")
person.set_privacy(True)
elif field == '#h':
LOG.debug("This is a restricted record")
#TODO: Gramps does currently not feature this level
person.set_privacy(True)
elif dateRe.match(field):
if not birth_parsed:
LOG.debug("Birth Date: %s" % field)
birth_date = self.parse_date(self.decode(field))
birth_parsed = True
else:
LOG.debug("Death Date: %s" % field)
death_date = self.parse_date(self.decode(field))
if field == "mj":
death_cause = "Died joung"
elif field.startswith("k"):
death_cause = "Killed"
elif field.startswith("m"):
death_cause = "Murdered"
elif field.startswith("e"):
death_cause = "Executed"
elif field.startswith("d"):
death_cause = "Disappeared"
#TODO: Set special death types more properly
else:
LOG.warn(("parse_person(): Unknown field " +
"'%s' for person in line %d!") % (field, self.lineno))
if public_name:
name = person.get_primary_name()
name.set_type(NameType(NameType.BIRTH))
person.add_alternate_name(name)
name = Name()
name.set_type(NameType(NameType.AKA))
name.set_first_name(public_name)
surname_obj = name.get_primary_surname()
surname_obj.set_surname(surname)
person.set_primary_name(name)
for aka in nick_names:
name = Attribute()
name.set_type(AttributeType(AttributeType.NICKNAME))
name.set_value(aka)
person.add_attribute(name)
for aka in firstname_aliases:
name = Name()
name.set_type(NameType(NameType.AKA))
name.set_first_name(aka)
surname_obj = name.get_primary_surname()
surname_obj.set_surname(surname)
person.add_alternate_name(name)
for aka in name_aliases:
name = Name()
name.set_type(NameType(NameType.AKA))
name.set_first_name(aka)
surname_obj = name.get_primary_surname()
surname_obj.set_surname(surname)
person.add_alternate_name(name)
for aka in surname_aliases:
name = Name()
name.set_type(NameType(NameType.AKA))
if public_name:
name.set_first_name(public_name)
else:
name.set_first_name(firstname)
surname_obj = name.get_primary_surname()
surname_obj.set_surname(aka)
person.add_alternate_name(name)
if source:
person.add_citation(source.get_handle())
if birth_date or birth_place or birth_source:
birth = self.create_event(EventType.BIRTH, None, birth_date, birth_place, birth_source)
birth_ref = EventRef()
birth_ref.set_reference_handle( birth.get_handle())
person.set_birth_ref( birth_ref)
if bapt_date or bapt_place or bapt_source:
babt = self.create_event(EventType.BAPTISM, None, bapt_date, bapt_place, bapt_source)
babt_ref = EventRef()
babt_ref.set_reference_handle( babt.get_handle())
person.add_event_ref( babt_ref)
if death_date or death_place or death_source or death_cause:
death = self.create_event(EventType.DEATH, None, death_date, death_place, death_source)
if death_cause:
death.set_description(death_cause)
self.db.commit_event(death,self.trans)
death_ref = EventRef()
death_ref.set_reference_handle( death.get_handle())
person.set_death_ref( death_ref)
if bur_date:
bur = self.create_event(EventType.BURIAL, None, bur_date, bur_place, bur_source)
bur_ref = EventRef()
bur_ref.set_reference_handle( bur.get_handle())
person.add_event_ref( bur_ref)
if crem_date:
crem = self.create_event(EventType.CREMATION, None, crem_date, bur_place, bur_source)
crem_ref = EventRef()
crem_ref.set_reference_handle( crem.get_handle())
person.add_event_ref(crem_ref)
self.db.commit_person(person,self.trans)
return (idx,person)
def parse_date(self,field):
if field == "0":
return None
date = Date()
matches = _text_parse.match(field)
if matches:
groups = matches.groups()
date.set_as_text(groups[0])
date.set_modifier(Date.MOD_TEXTONLY)
return date
matches = _date_parse.match(field)
if matches:
groups = matches.groups()
mod = _mod_map.get(groups[0],Date.MOD_NONE)
if groups[3] == "..":
mod = Date.MOD_SPAN
cal2 = _cal_map.get(groups[5],Date.CAL_GREGORIAN)
sub2 = self.sub_date(groups[4])
else:
sub2 = (0,0,0)
cal1 = _cal_map.get(groups[2],Date.CAL_GREGORIAN)
sub1 = self.sub_date(groups[1])
date.set(Date.QUAL_NONE,mod, cal1,
(sub1[0],sub1[1],sub1[2],0,sub2[0],sub2[1],sub2[2],0))
return date
else:
return None
def sub_date(self,data):
vals = data.split('/')
if len(vals) == 1:
return (0,0,int(vals[0]))
elif len(vals) == 2:
return (0,int(vals[0]),int(vals[1]))
else:
return (int(vals[0]),int(vals[1]),int(vals[2]))
def create_event(self,type,desc=None,date=None,place=None,source=None):
event = Event()
if type:
event.set_type(EventType(type))
if desc:
event.set_description(desc)
if date:
event.set_date_object(date)
if place:
event.set_place_handle(place.get_handle())
if source:
event.add_citation(source.get_handle())
self.db.add_event(event,self.trans)
self.db.commit_event(event,self.trans)
return event
def get_or_create_person(self,firstname,lastname):
person = None
mykey = firstname+lastname
if mykey in self.ikeys and firstname != "?" and lastname != "?":
person = self.db.get_person_from_handle(self.ikeys[mykey])
else:
person = Person()
self.db.add_person(person,self.trans)
self.db.commit_person(person,self.trans)
self.ikeys[mykey] = person.get_handle()
return person
def get_or_create_place(self,place_name):
place = None
if place_name in self.pkeys:
place = self.db.get_place_from_handle(self.pkeys[place_name])
else:
place = Place()
place.set_title(place_name)
self.db.add_place(place,self.trans)
self.db.commit_place(place,self.trans)
self.pkeys[place_name] = place.get_handle()
return place
def get_or_create_source(self,source_name):
source = None
if source_name in self.skeys:
source = self.db.get_source_from_handle(self.skeys[source_name])
else:
source = Source()
source.set_title(source_name)
self.db.add_source(source,self.trans)
self.db.commit_source(source,self.trans)
self.skeys[source_name] = source.get_handle()
citation = Citation()
citation.set_reference_handle(source.get_handle())
self.db.add_citation(citation, self.trans)
self.db.commit_citation(citation, self.trans)
return citation
def decode(self,s):
s = s.replace('_',' ')
charref_re = re.compile('(&#)(x?)([0-9a-zA-Z]+)(;)')
for match in charref_re.finditer(s):
try:
if match.group(2): # HEX
nchar = unichr(int(match.group(3),16))
else: # Decimal
nchar = unichr(int(match.group(3)))
s = s.replace(match.group(0), nchar)
except UnicodeDecodeError:
pass
# replace named entities
entref_re = re.compile('(&)([a-zA-Z]+)(;)')
for match in entref_re.finditer(s):
try:
if match.group(2) in name2codepoint:
nchar = unichr(name2codepoint[match.group(2)])
s = s.replace(match.group(0), nchar)
except UnicodeDecodeError:
pass
return( s)
def debug( self, txt):
if enable_debug:
print txt
| gpl-2.0 | 4,967,616,865,743,798,000 | 39.438674 | 199 | 0.51067 | false |
bkuczenski/lca-tools | antelope_catalog/data_sources/ecoinvent.py | 1 | 4573 | import os
import re
from .data_source import DataSource, DataCollection
from .ecoinvent_lcia import EcoinventLciaConfig, EI_LCIA_SPREADSHEETS
FILE_PREFIX = ('current_Version_', 'ecoinvent ')
FILE_EXT = ('7z', 'zip')
ECOINVENT_SYS_MODELS = ('apos', 'conseq', 'cutoff')
MODELMAP = {
'apos': ('apos',),
'conseq': ('consequential', 'consequential_longterm'),
'cutoff': ('cutoff',)
}
class Ecoinvent3Base(DataSource):
_ds_type = 'EcospoldV2Archive'
def __init__(self, data_root, version, model, **kwargs):
assert model in ECOINVENT_SYS_MODELS
super(Ecoinvent3Base, self).__init__(data_root=data_root, **kwargs)
self._version = version
self._model = model
@property
def _lci_ref(self):
if self.lci_source is not None:
yield 'local.ecoinvent.lci.%s.%s' % (self._version, self._model)
@property
def _inv_ref(self):
if self.inv_source is not None:
yield 'local.ecoinvent.%s.%s' % (self._version, self._model)
@property
def references(self):
for x in self._lci_ref:
yield x
for x in self._inv_ref:
yield x
def interfaces(self, ref):
yield 'inventory'
def make_resources(self, ref):
if ref in self._lci_ref:
yield self._make_resource(ref, self.lci_source, interfaces='inventory', prefix='datasets')
elif ref in self._inv_ref:
yield self._make_resource(ref, self.inv_source, interfaces='inventory', prefix='datasets')
def _fname(self, ftype=None):
precheck = os.path.join(self.root, self._model)
if ftype is not None:
precheck += '_%s' % ftype
if os.path.isdir(precheck):
return precheck
for pf in FILE_PREFIX:
for mod in MODELMAP[self._model]:
if ftype is None:
pname = '%s%s_%s_ecoSpold02' % (pf, self._version, mod)
else:
pname = '%s%s_%s_%s_ecoSpold02' % (pf, self._version, mod, ftype)
dsource = os.path.join(self.root, self._version, pname)
if os.path.isdir(dsource):
return dsource
for ext in FILE_EXT:
fname = '%s.%s' % (pname, ext)
source = os.path.join(self.root, self._version, fname)
if os.path.exists(source):
return source
@property
def inv_source(self):
return self._fname()
@property
def lci_source(self):
return self._fname('lci')
class EcoinventConfig(DataCollection):
"""
Ecoinvent Configurator
This DataCollection generates LcResource objects for ecoinvent archives. The only required input is the root
folder containing the ecoinvent data. Within this folder should be subfolders named after the major and minor
version of the database (e.g. "3.3"). Within the subfolders should be the archives.
Archives must be named according to ecoinvent's various conventions:
#1#2_#3_ecoSpold02#4
where
#1 is one of ('current_Version_' or 'ecoinvent ')
#2 is the major and minor version ('2.2', '3.01', '3.1', '3.2', etc)
#3 is the system model ('apos', 'cutoff', 'consequential', or 'consequential_longterm')
#4 is either omitted (in the case of an expanded directory) or an archive extension ('.zip' or '.7z')
Within the archives, either compressed or expanded, the datasets are assumed to be in a subfolder called 'datasets'
The class does not currently support loading undefined data collections, but this could be added in the future.
"""
@property
def ecoinvent_versions(self):
for d in os.listdir(self._root):
if os.path.isdir(os.path.join(self._root, d)):
if re.match('[23]\.[0-9]+', d):
yield d
if d.lower() == 'lcia':
yield d
def factory(self, data_root, **kwargs):
for v in self.ecoinvent_versions:
if v.lower() == 'lcia':
lcia_path = os.path.join(data_root, v)
for ver, info in EI_LCIA_SPREADSHEETS.items():
if os.path.exists(os.path.join(lcia_path, info.filename)):
yield EcoinventLciaConfig(lcia_path, version=ver)
else:
for m in ECOINVENT_SYS_MODELS:
yield Ecoinvent3Base(data_root, v, m, **kwargs)
yield EcoinventLciaConfig(os.path.join(data_root, v), version=v)
| gpl-2.0 | -8,512,925,242,131,286,000 | 36.483607 | 119 | 0.586486 | false |
richardjuan/nots_parser | core/parser.py | 1 | 3044 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
class Parser:
def __init__(self,name):
self.rules = []
self.buffer = ""
self.full_sentence = ""
self.flags = [ 0,0,0,0,0,0 ] # secces , nl , end_instrc, interupt, no print , autoexec sentence
self.eatch_run = ""
self.separator = "\n"
self.argsep = " "
self.load_rules("core/rules_fld/%s" % name)
self.fl = self.load_functions("function_fld.%s" % name)
self.fl.__init__(self)
def load_functions(self,function_name):
return __import__(function_name, globals(), locals(), ['*'], -1)
def load_rules(self,file_name):
f = open( file_name, "r" )
for line in f:
if line[0] == "#":
continue
line = line.strip().split(":")
if line[1][0:3] == '"\\x' :
line[1] = line[1][3:-1].decode('hex')
if line[1][0:1] == '"' and line[1][-1] == '"':
line[1] = line[1][1:-1]
self.rules.append( line )
if line[0] == "*":
self.eatch_run = "self.fl.%s" % line[1]
if line[0] == "|":
self.separator = "%s" % line[1]
f.close()
def add_char_to_buffer(self,c):
self.buffer = "%s%s" % (self.buffer,c)
def rem_char_from_buffer(self,c = 1):
if len(self.buffer) > 0:
self.buffer = self.buffer[:-1*c]
return True
else:
return False
def rem_char_from_sentence(self,c = 1):
if len(self.full_sentence) > 0:
self.full_sentence = self.full_sentence[:-1*c]
return True
else:
return False
def add_char_to_sentence(self,c):
self.full_sentence = "%s%s" % (self.full_sentence,c)
def store_buffer_arg(self):
tmp = self.buffer
if tmp != "":
if self.full_sentence != "":
self.full_sentence = "%s %s" % (self.full_sentence,self.buffer)
else:
self.full_sentence = "%s" % self.buffer
self.buffer = ""
return tmp
def flush(self):
self.buffer = ""
self.full_sentence = ""
return True
def load_first_arg(self):
index = 0
self.buffer= ""
while self.full_sentence[index] != " ":
self.buffer = "%s%s" % (self.buffer, self.full_sentence[index] )
index = index + 1
self.full_sentence = self.full_sentence[index:]
def run(self):
found = False
self.load_first_arg()
for rule in self.rules:
if rule[0] == "+.":
if self.buffer == rule[1]:
exec("self.fl.%s" % rule[2])
found = True
return found
def feed(self,char):
for rule in self.rules:
if rule[0] == ".":
if rule[1] == char :
exec("self.fl.%s" % rule[2])
if self.flags[3] == 1:
self.flags[3] = 0
return True
self.add_char_to_sentence(char)
if char != self.argsep:
self.add_char_to_buffer(char)
else:
self.buffer = ""
if self.buffer != "":
for rule in self.rules:
if rule[0] == ".+":
if self.buffer.strip() == rule[1]:
exec("self.fl.%s" % rule[2])
if self.flags[3] == 1:
self.flags[3] = 0
return True
if self.flags[4] == 1:
self.flags[4] = 0
char = ""
code = compile(self.eatch_run,'<string>','exec')
exec code
return True
#self.fl.printtofile("%s\n" % (self.buffer))
| mit | -3,129,648,057,993,112,600 | 21.382353 | 97 | 0.567346 | false |
lkloh/aimbat-lite | scripts/egalign1.py | 1 | 2209 | #!/usr/bin/env python
"""
Example python script for seismogram alignments by SAC p1
Xiaoting Lou ([email protected])
03/07/2012
"""
from pylab import *
import sys
import matplotlib.transforms as transforms
from pysmo.aimbat.sacpickle import loadData, SacDataHdrs
from pysmo.aimbat.plotphase import getOptions, sacp1, sacp2, sacprs
from pysmo.aimbat.ttconfig import PPConfig, QCConfig, CCConfig, MCConfig
def axes1(npick=2):
fig = figure(figsize=(9.5,12.7))
axs = [fig.add_subplot(1,npick,i+1) for i in range(npick) ]
subplots_adjust(bottom=.04, top=0.97, left=.065, right=.905, wspace=.4, hspace=.1)
return axs
def getwin(gsac, opts, pick='t2'):
'Get time window from array stack'
sacdh = gsac.stkdh
twh0, twh1 = opts.pppara.twhdrs
tw0 = sacdh.gethdr(twh0)
tw1 = sacdh.gethdr(twh1)
tref = sacdh.gethdr(pick)
tw = tw0-tref, tw1-tref
print('Time window wrt {:s}: [{:.1f} {:.1f}] s'.format(pick, tw[0], tw[1]))
return tw
def plotwin(ax, tw, pppara):
'Plot time window'
tw0, tw1 = tw
ymin, ymax = ax.get_ylim()
a, col = pppara.alphatwfill, pppara.colortwfill
ax.fill([tw0,tw1,tw1,tw0], [ymin,ymin,ymax,ymax], col, alpha=a, edgecolor=col)
def load():
'load data'
opts, ifiles = getOptions()
pppara = PPConfig()
ccpara = CCConfig()
gsac = loadData(ifiles, opts, pppara)
if opts.filemode == 'pkl':
opts.fstack = None
else:
opts.fstack = ccpara.fstack
gsac.stkdh = SacDataHdrs(opts.fstack, opts.delta)
opts.pppara = pppara
opts.ccpara = ccpara
return gsac, opts
if __name__ == '__main__':
gsac, opts = load()
saclist = gsac.saclist
xxlim = -20, 20
reltimes = [0, 3]
npick = len(reltimes)
axs = axes1(npick)
twa = -10, 10
twb = getwin(gsac, opts, 't2')
twins = [twa, twb]
tts = ['Predicted', 'Measured']
for i in range(npick):
opts.reltime = reltimes[i]
ax = axs[i]
sacp1(saclist, opts, ax)
ax.set_xlim(xxlim)
plotwin(ax, twins[i], opts.pppara)
ax.set_title(tts[i])
labs = 'ab'
for ax, lab in zip(axs, labs):
tt = '(' + lab + ')'
trans = transforms.blended_transform_factory(ax.transAxes, ax.transAxes)
ax.text(-.05, 1, tt, transform=trans, va='center', ha='right', size=16)
savefig('egalignp1.pdf', format='pdf')
show()
| gpl-3.0 | -4,794,161,238,946,575,000 | 24.102273 | 83 | 0.674966 | false |
ajylee/gpaw-rtxs | gpaw/test/complex.py | 1 | 1451 | from gpaw import GPAW, restart
from ase.structure import molecule
from gpaw.test import equal
Eini0 = -17.6122060535
Iini0 = 12
esolvers = ['cg', 'rmm-diis', 'dav']
E0 = {'cg': -17.612151335507559,
'rmm-diis': -17.612184220369553,
'dav': -17.612043641621657}
I0 = {'cg': 6, 'rmm-diis': 7, 'dav': 8}
calc = GPAW(xc='LDA',
eigensolver='cg',
convergence={'eigenstates': 3.5e-5},
#txt=None,
dtype=complex)
mol = molecule('N2')
mol.center(vacuum=3.0)
mol.set_calculator(calc)
Eini = mol.get_potential_energy()
Iini = calc.get_number_of_iterations()
print ('%10s: %12.6f eV in %3d iterations' %
('init(cg)', Eini, Iini))
equal(Eini, Eini0, 1E-8)
equal(Iini, Iini0, 12)
calc.write('N2.gpw', mode='all')
del calc, mol
E = {}
I = {}
for esolver in esolvers:
mol, calc = restart('N2.gpw', txt=None)
if (calc.wfs.dtype!=complex or
calc.wfs.kpt_u[0].psit_nG.dtype!=complex):
raise AssertionError('ERROR: restart failed to read complex WFS')
calc.scf.reset()
calc.set(convergence={'eigenstates': 3.5e-9})
calc.set(eigensolver=esolver)
E[esolver]=mol.get_potential_energy()
I[esolver]=calc.get_number_of_iterations()
print ('%10s: %12.6f eV in %3d iterations' %
(esolver, E[esolver], I[esolver]))
for esolver in esolvers:
equal(E[esolver], E0[esolver], 1E-8)
equal(I[esolver], I0[esolver], 0)
| gpl-3.0 | -8,990,381,093,668,258,000 | 24.017241 | 73 | 0.61337 | false |
csachs/tunable | tunable/modulehelper.py | 1 | 2113 | # -*- coding: utf-8 -*-
"""
documentation
"""
import sys
import argparse
import importlib
import warnings
from itertools import product
class ModuleHelper(object):
Exception = Exception
Warning = Warning
Ignore = 1
error_mode = Exception
prefixes = [""]
modules = {}
@classmethod
def add_prefix(cls, prefix):
cls.prefixes.append(prefix)
@classmethod
def load_module(cls, module_str):
if module_str in cls.modules:
return
sys.path.insert(0, '.')
module_ = None
names = [
"%s%s" % (prefix, name,)
for name, prefix
in product([module_str, module_str.lower()], reversed(cls.prefixes))
]
for name in names:
try:
module_ = importlib.import_module(name)
break
except ImportError:
pass
if module_ is None:
error_msg = "Attempted to load any of %r, but could not load any module." % (names,)
if cls.error_mode == Exception:
raise ImportError(error_msg)
elif cls.error_mode == Warning:
warnings.warn(error_msg, ImportWarning)
else:
raise RuntimeError('Invalid error mode.')
cls.modules[module_str] = module_
class ImportAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
ModuleHelper.load_module(values)
@classmethod
def register_and_preparse(cls, parser, args=None, short='m', long='module'):
actions = parser._actions.copy()
option = parser._option_string_actions.copy()
parser._actions.clear()
parser._option_string_actions.clear()
parser.add_argument(parser.prefix_chars[0:1] + short,
parser.prefix_chars[0:1]*2 + long, type=str, action=cls.ImportAction)
parser.parse_known_args(args=args)
for action in actions:
parser._actions.insert(0, action)
parser._option_string_actions.update(option)
| mit | -7,339,069,100,482,631,000 | 24.768293 | 97 | 0.574539 | false |
swcurran/tfrs | backend/api/serializers.py | 1 | 9172 | """
REST API Documentation for the NRS TFRS Credit Trading Application
The Transportation Fuels Reporting System is being designed to streamline compliance reporting for transportation fuel suppliers in accordance with the Renewable & Low Carbon Fuel Requirements Regulation.
OpenAPI spec version: v1
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from rest_framework import serializers
from .models.CreditTrade import CreditTrade
from .models.CreditTradeHistory import CreditTradeHistory
from .models.CreditTradeStatus import CreditTradeStatus
from .models.CreditTradeType import CreditTradeType
from .models.CreditTradeZeroReason import CreditTradeZeroReason
from .models.CurrentUserViewModel import CurrentUserViewModel
from .models.Organization import Organization
from .models.OrganizationActionsType import OrganizationActionsType
from .models.OrganizationAttachment import OrganizationAttachment
from .models.OrganizationBalance import OrganizationBalance
from .models.OrganizationHistory import OrganizationHistory
from .models.OrganizationStatus import OrganizationStatus
from .models.Permission import Permission
from .models.PermissionViewModel import PermissionViewModel
from .models.Role import Role
from .models.RolePermission import RolePermission
from .models.RolePermissionViewModel import RolePermissionViewModel
from .models.RoleViewModel import RoleViewModel
from .models.User import User
from .models.UserDetailsViewModel import UserDetailsViewModel
from .models.UserRole import UserRole
from .models.UserRoleViewModel import UserRoleViewModel
from .models.UserViewModel import UserViewModel
class CreditTradeSerializer(serializers.ModelSerializer):
class Meta:
model = CreditTrade
fields = ('id', 'status', 'initiator', 'respondent',
'type', 'number_of_credits',
'fair_market_value_per_credit', 'zero_reason',
'trade_effective_date')
class CreditTradeHistorySerializer(serializers.ModelSerializer):
class Meta:
model = CreditTradeHistory
fields = ('id', 'credit_trade', 'user', 'credit_trade_update_time',
'respondent', 'status', 'type',
'number_of_credits', 'fair_market_value_per_credit',
'zero_reason', 'trade_effective_date',
'note', 'is_internal_history_record')
class CreditTradeStatusSerializer(serializers.ModelSerializer):
class Meta:
model = CreditTradeStatus
fields = (
'id', 'status', 'description', 'effective_date', 'expiration_date',
'display_order')
class CreditTradeTypeSerializer(serializers.ModelSerializer):
class Meta:
model = CreditTradeType
fields = (
'id', 'the_type', 'description', 'effective_date', 'expiration_date',
'display_order', 'is_gov_only_type')
class CreditTradeZeroReasonSerializer(serializers.ModelSerializer):
class Meta:
model = CreditTradeZeroReason
fields = (
'id', 'reason', 'description', 'effective_date', 'expiration_date',
'display_order')
class CurrentUserViewModelSerializer(serializers.ModelSerializer):
class Meta:
model = CurrentUserViewModel
fields = ('id', 'first_name', 'last_name', 'email', 'active', 'user_roles',
'sm_authorization_id', 'sm_authorization_directory')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = (
'id', 'name', 'status', 'actions_type',
'created_date')
class OrganizationActionsTypeSerializer(serializers.ModelSerializer):
class Meta:
model = OrganizationActionsType
fields = (
'id', 'the_type', 'description', 'effective_date', 'expiration_date',
'display_order')
class OrganizationAttachmentSerializer(serializers.ModelSerializer):
class Meta:
model = OrganizationAttachment
fields = (
'id', 'organization', 'file_name', 'file_location', 'description',
'compliance_year')
class OrganizationBalanceSerializer(serializers.ModelSerializer):
class Meta:
model = OrganizationBalance
fields = '__all__'
class OrganizationHistorySerializer(serializers.ModelSerializer):
class Meta:
model = OrganizationHistory
fields = ('id', 'organization', 'history_text')
class OrganizationStatusSerializer(serializers.ModelSerializer):
class Meta:
model = OrganizationStatus
fields = (
'id', 'status', 'description', 'effective_date', 'expiration_date',
'display_order')
class PermissionSerializer(serializers.ModelSerializer):
class Meta:
model = Permission
fields = ('id', 'code', 'name', 'description')
class PermissionViewModelSerializer(serializers.ModelSerializer):
class Meta:
model = PermissionViewModel
fields = ('id', 'code', 'name', 'description')
class RoleSerializer(serializers.ModelSerializer):
class Meta:
model = Role
fields = ('id', 'name', 'description', 'is_government_role')
class RolePermissionSerializer(serializers.ModelSerializer):
class Meta:
model = RolePermission
fields = ('id', 'role', 'permission')
class RolePermissionViewModelSerializer(serializers.ModelSerializer):
class Meta:
model = RolePermissionViewModel
fields = ('id', 'role_id', 'permission_id')
class RoleViewModelSerializer(serializers.ModelSerializer):
class Meta:
model = RoleViewModel
fields = ('id', 'name', 'description')
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = (
'id', 'first_name', 'last_name', 'email', 'authorization_id',
'authorization_guid', 'authorization_directory', 'display_name')
class UserDetailsViewModelSerializer(serializers.ModelSerializer):
class Meta:
model = UserDetailsViewModel
fields = (
'id', 'first_name', 'last_name', 'email', 'active', 'permissions')
class UserRoleSerializer(serializers.ModelSerializer):
class Meta:
model = UserRole
fields = ('id', 'user', 'role')
class UserRoleViewModelSerializer(serializers.ModelSerializer):
class Meta:
model = UserRoleViewModel
fields = ('id', 'effective_date', 'expiration_date', 'role_id', 'authorization_id')
class UserViewModelSerializer(serializers.ModelSerializer):
class Meta:
model = UserViewModel
fields = ('id', 'first_name', 'last_name', 'email', 'active', 'sm_authorization_id',
'user_roles')
class CreditTradeCreateSerializer(serializers.ModelSerializer):
class Meta:
model = CreditTrade
fields = '__all__'
class CreditTradeApproveSerializer(serializers.ModelSerializer):
class Meta:
model = CreditTrade
fields = ('id', 'trade_effective_date', 'note',)
read_only_fields = ('status', 'number_of_credits',
'type',
'fair_market_value_per_credit',
'zero_reason',
)
class CreditTrade2Serializer(serializers.ModelSerializer):
status = CreditTradeStatusSerializer(read_only=True)
initiator = OrganizationSerializer(read_only=True)
respondent = OrganizationSerializer(read_only=True)
type = CreditTradeTypeSerializer(read_only=True)
zero_reason = CreditTradeZeroReasonSerializer(read_only=True)
credits_from = OrganizationSerializer(read_only=True)
credits_to = OrganizationSerializer(read_only=True)
class Meta:
model = CreditTrade
fields = ('id', 'status', 'initiator', 'respondent',
'type', 'number_of_credits',
'fair_market_value_per_credit', 'zero_reason',
'trade_effective_date', 'credits_from', 'credits_to')
# exclude = ('note',)
class CreditTradeHistory2Serializer(serializers.ModelSerializer):
status = CreditTradeStatusSerializer(read_only=True)
initiator = OrganizationSerializer(read_only=True)
respondent = OrganizationSerializer(read_only=True)
type = CreditTradeTypeSerializer(read_only=True)
zero_reason = CreditTradeZeroReasonSerializer(read_only=True)
class Meta:
model = CreditTradeHistory
fields = '__all__'
class CreditTradeHistoryCreateSerializer(serializers.ModelSerializer):
class Meta:
model = CreditTradeHistory
fields = '__all__'
| apache-2.0 | 7,038,989,066,852,127,000 | 33.611321 | 208 | 0.68502 | false |
mganeva/mantid | scripts/SANS/sans/common/configurations.py | 1 | 1321 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
""" The SANSConfigurations class holds instrument-specific configs to centralize instrument-specific magic numbers"""
# pylint: disable=too-few-public-methods
from __future__ import (absolute_import, division, print_function)
class Configurations(object):
class LARMOR(object):
# The full wavelength range of the instrument
wavelength_full_range_low = 0.5
wavelength_full_range_high = 13.5
class SANS2D(object):
# The full wavelength range of the instrument
wavelength_full_range_low = 2.0
wavelength_full_range_high = 14.0
class LOQ(object):
# The full wavelength range of the instrument
wavelength_full_range_low = 2.2
wavelength_full_range_high = 10.0
# The default prompt peak range for LOQ
prompt_peak_correction_min = 19000.0
prompt_peak_correction_max = 20500.0
class ZOOM(object):
# The full wavelength range of the instrument
wavelength_full_range_low = 1.75
wavelength_full_range_high = 16.5
| gpl-3.0 | 1,857,660,004,533,276,400 | 34.702703 | 117 | 0.688115 | false |
sawdog/OraclePyDoc | oraclepydoc/oraschemasql.py | 1 | 12170 | """ Oracle catalog queries """
class OracleCatalogStatement:
def __init__(self, userSql='', ownerSql='', ownerColumn='owner'):
self._userSql = userSql
self._ownerSql = ownerSql
self._ownerColumn = ownerColumn
def userSql(self):
return self._userSql
def ownerSql(self, inClause=None):
if inClause == None:
return self._ownerSql % 'where 1=1'
return self._ownerSql % ('where %s in (%s)' % (self._ownerColumn, inClause))
OracleCatalog = {
'tables' : OracleCatalogStatement(
userSql="""select table_name, partitioned, secondary, cluster_name,
iot_type, temporary, nested, tablespace_name
from user_tables""",
ownerSql="""select owner||'.'||table_name, partitioned, secondary, cluster_name,
iot_type, temporary, nested, tablespace_name
from all_tables %s"""
),
'tab_partitions': OracleCatalogStatement(
userSql='''select table_name, partition_name,
tablespace_name, high_value,
partition_position
from user_tab_partitions order by table_name, partition_position''',
ownerSql='''select table_owner||'.'||table_name, partition_name,
tablespace_name, high_value,
partition_position
from all_tab_partitions %s order by table_owner||'.'||table_name, partition_position''',
ownerColumn='table_owner'
),
'tab_comments' : OracleCatalogStatement(
userSql="""SELECT table_name, comments
FROM user_tab_comments
WHERE comments is not null""",
ownerSql="""SELECT owner||'.'||table_name, comments
FROM all_tab_comments
%s
and comments is not null"""
),
'col_comments' : OracleCatalogStatement(
userSql="""SELECT table_name, column_name, comments
FROM user_col_comments
where comments is not null""",
ownerSql="""SELECT owner||'.'||table_name, column_name, comments
FROM all_col_comments
%s
and comments is not null"""
),
'columns' : OracleCatalogStatement(
userSql="""select table_name, column_name, data_type , data_length, data_precision,
data_scale, nullable, column_id, data_default
from user_tab_columns
order by table_name, column_id""",
ownerSql="""select owner||'.'||table_name, column_name, data_type , data_length, data_precision,
data_scale, nullable, column_id, data_default
from all_tab_columns
%s
order by owner||'.'||table_name, column_id"""
),
'constraints' : OracleCatalogStatement(
userSql="""select table_name, constraint_name, constraint_type, search_condition, r_owner,
r_constraint_name , delete_rule
from user_constraints where r_owner is null or r_owner = user""",
ownerSql="""select owner||'.'||table_name, owner||'.'||constraint_name, constraint_type, search_condition, r_owner,
r_owner||'.'||r_constraint_name , delete_rule
from all_constraints
%s
--and r_owner is null --or r_owner = user"""
),
'cons_columns' : OracleCatalogStatement(
userSql="""select constraint_name, table_name, column_name, position from
user_cons_columns""",
ownerSql="""select owner||'.'||constraint_name, owner||'.'||table_name, column_name, position
from all_cons_columns
%s"""
),
'views' : OracleCatalogStatement(
userSql="""select view_name, text from user_views""",
ownerSql="""select owner||'.'||view_name, text from all_views %s"""
),
'mviews' : OracleCatalogStatement(
userSql="""select mview_name, container_name, query, updatable from user_mviews""",
ownerSql="""select owner||'.'||mview_name, container_name, query, updatable from all_mviews %s"""
),
'indexes' : OracleCatalogStatement(
userSql="""select index_name, table_name, index_type, uniqueness, include_column, generated, secondary
from user_indexes""",
ownerSql="""select owner||'.'||index_name, owner||'.'||table_name, index_type, uniqueness, include_column, generated, secondary
from all_indexes %s"""
),
'ind_columns' : OracleCatalogStatement(
userSql="""select index_name, table_name, column_name, column_position from user_ind_columns""",
ownerSql="""select index_owner||'.'||index_name, table_owner||'.'||table_name, column_name, column_position
from all_ind_columns %s""",
ownerColumn='index_owner'
),
'ind_expressions' : OracleCatalogStatement(
userSql="""select index_name, table_name, column_expression, column_position from user_ind_expressions""",
ownerSql="""select index_owner||'.'||index_name, table_owner||'.'||table_name, column_expression, column_position
from all_ind_expressions %s""",
ownerColumn='index_owner'
),
'updatable_columns' : OracleCatalogStatement(
userSql="""select table_name, column_name, insertable, updatable, deletable
from all_updatable_columns
where table_name in (select view_name from user_views)""",
ownerSql="""select owner||'.'||table_name, column_name, insertable, updatable, deletable
from all_updatable_columns %s"""
),
'triggers' : OracleCatalogStatement(
userSql="""select trigger_name, trigger_type, triggering_event, base_object_type, table_name,
column_name, referencing_names, when_clause, status, description, action_type, trigger_body
from user_triggers""",
ownerSql="""select owner||'.'||trigger_name, trigger_type, triggering_event, base_object_type, table_owner||'.'||table_name,
column_name, referencing_names, when_clause, status, description, action_type, trigger_body
from all_triggers
%s"""
),
'trigger_cols' : OracleCatalogStatement(
userSql="select trigger_name, table_name, column_name, column_list, column_usage from user_trigger_cols",
ownerSql="""select trigger_owner||'.'||trigger_name, table_owner||'.'||table_name, column_name, column_list, column_usage
from all_trigger_cols
%s""",
ownerColumn='trigger_owner'
),
'arguments' : OracleCatalogStatement(
userSql="""select object_name, package_name, argument_name, position, data_type, default_value, in_out, pls_type,
data_scale, data_precision, data_length
from user_arguments""",
ownerSql="""select owner||'.'||object_name, package_name, argument_name, position, data_type, default_value, in_out, pls_type,
data_scale, data_precision, data_length
from all_arguments
%s"""
),
'source' : OracleCatalogStatement(
userSql="select name, type, line, text from user_source where type not like 'TYPE%' order by name, line",
ownerSql="""select owner||'.'||name, type, line, text
from all_source
%s
and type not like 'TYPE%%' order by name, line"""
),
'sequences' : OracleCatalogStatement(
userSql="""select sequence_name, min_value, max_value, increment_by, cycle_flag, order_flag, cache_size
from user_sequences""",
ownerSql="""select sequence_owner||'.'||sequence_name, min_value, max_value, increment_by, cycle_flag, order_flag, cache_size
from all_sequences
%s""",
ownerColumn='sequence_owner'
),
'types' : OracleCatalogStatement(
userSql="""select type_name, type_oid, typecode, attributes, methods,
predefined, incomplete
from user_types""",
ownerSql="""select owner||'.'||type_name, type_oid, typecode, attributes, methods,
predefined, incomplete
from all_types
%s"""
),
'type_attrs' : OracleCatalogStatement(
userSql="""select type_name, attr_name, attr_type_mod, attr_type_owner,
attr_type_name, length, precision, scale, character_set_name,
attr_no
from user_type_attrs""",
ownerSql="""select owner||'.'||type_name, attr_name, attr_type_mod, attr_type_owner,
attr_type_name, length, precision, scale, character_set_name,
attr_no
from all_type_attrs
%s"""
),
'type_methods' : OracleCatalogStatement(
userSql="""select type_name, method_name, method_type, parameters, results
from user_type_methods""",
ownerSql="""select owner||'.'||type_name, method_name, method_type, parameters, results
from all_type_methods %s"""
),
'jobs' : OracleCatalogStatement(
userSql="""select job, log_user, priv_user, schema_user, total_time, broken,
interval, failures, what
from user_jobs""",
ownerSql="""select job, log_user, priv_user, schema_user, total_time, broken,
interval, failures, what
from all_jobs %s""",
ownerColumn='priv_user'
),
'dependencies' : OracleCatalogStatement(
userSql = """select name, referenced_owner, referenced_name, referenced_link_name,
referenced_type, dependency_type
from user_dependencies""",
ownerSql = """select owner||'.'||name, referenced_owner, referenced_name, referenced_link_name,
referenced_type, dependency_type
from all_dependencies
%s"""
)
}
if __name__ == '__main__':
print OracleCatalog['tables'].userSql()
print OracleCatalog['tables'].ownerSql()
print OracleCatalog['tab_partitions'].userSql()
print OracleCatalog['tab_partitions'].ownerSql("'BUAP','FOO'")
print OracleCatalog['tab_comments'].ownerSql("'BUAP','FOO'")
print OracleCatalog['dependencies'].userSql()
print OracleCatalog['dependencies'].ownerSql("'FOO', 'BAR'")
| gpl-2.0 | 2,138,350,719,873,260,500 | 49.920502 | 143 | 0.505916 | false |
jomoore/threepins | puzzle/admin.py | 1 | 4020 | """
Admin views for loading and editing puzzles.
Puzzles and blank grids are viewed as whole units using inline
elements. Some extra fields are added to upload XML and ipuz files
instead of relying on manual data entry.
"""
import json
from xml.etree import ElementTree
from django.contrib import admin
from django.db.models import CharField
from django.forms import TextInput, FileField, ModelForm
from puzzle.models import Puzzle, Entry, Blank, Block
XMLNS = '{http://crossword.info/xml/rectangular-puzzle}'
def import_from_xml(xml, puzzle):
"""Load a puzzle from Crossword Compiler XML format into the database."""
# pylint: disable=no-member
# false +ve on xml.etree.ElementTree.Element (v1.64)
crossword = ElementTree.parse(xml).find('*/%scrossword' % XMLNS)
for word in crossword.iter('%sword' % XMLNS):
xraw = word.attrib['x'].split('-')
yraw = word.attrib['y'].split('-')
xstart = int(xraw[0])
ystart = int(yraw[0])
down = len(yraw) > 1
clue = crossword.find('*/%sclue[@word="%s"]' % (XMLNS, word.attrib['id'])).text
if 'solution' in word.attrib:
answer = word.attrib['solution']
else:
answer = ''
if down:
for y in range(ystart, int(yraw[1]) + 1):
answer += crossword.find('*/%scell[@x="%d"][@y="%d"]' %
(XMLNS, xstart, y)).attrib['solution'].lower()
else:
for x in range(xstart, int(xraw[1]) + 1):
answer += crossword.find('*/%scell[@x="%d"][@y="%d"]' %
(XMLNS, x, ystart)).attrib['solution'].lower()
# XML is 1-based, model is 0-based
xstart -= 1
ystart -= 1
entry = Entry(puzzle=puzzle, clue=clue, answer=answer, x=xstart, y=ystart, down=down)
entry.save()
def import_blank_from_ipuz(ipuz, blank):
"""Load a blank grid from an ipuz file into the database."""
data = json.loads(ipuz.read().decode('latin_1'))
for y, row in enumerate(data['puzzle']):
for x, cell in enumerate(row):
if cell == "#":
block = Block(blank=blank, x=x, y=y)
block.save()
class PuzzleImportForm(ModelForm):
"""Add an XML import field."""
file_import = FileField(label='Import from XML', required=False)
class Meta:
model = Puzzle
fields = ['number', 'user', 'pub_date', 'comments']
class EntryInline(admin.StackedInline):
"""Increase the length of the text field for puzzle clues."""
model = Entry
formfield_overrides = {CharField: {'widget': TextInput(attrs={'size':'100'})}}
class PuzzleAdmin(admin.ModelAdmin):
"""Show entries inline and allow import from XML"""
form = PuzzleImportForm
inlines = [EntryInline]
def save_model(self, request, obj, form, change):
super(PuzzleAdmin, self).save_model(request, obj, form, change)
xml_file = form.cleaned_data.get('file_import', None)
if xml_file:
import_from_xml(xml_file, obj)
class BlankImportForm(ModelForm):
"""Add an ipuz import field."""
file_import = FileField(label='Import from ipuz', required=False)
class Meta:
model = Blank
fields = ['display_order']
class BlockInline(admin.TabularInline):
"""Show blocks in a table."""
model = Block
class BlankAdmin(admin.ModelAdmin):
"""Show blocks inline and allow import from ipuz."""
form = BlankImportForm
inlines = [BlockInline]
save_as = True
def save_model(self, request, obj, form, change):
super(BlankAdmin, self).save_model(request, obj, form, change)
ipuz_file = form.cleaned_data.get('file_import', None)
if ipuz_file:
import_blank_from_ipuz(ipuz_file, obj)
admin.site.site_header = "Three Pins Administration"
admin.site.site_title = "Three Pins"
admin.site.register(Puzzle, PuzzleAdmin)
admin.site.register(Blank, BlankAdmin)
| mit | 8,092,349,561,535,401,000 | 36.570093 | 93 | 0.614179 | false |
ayepezv/GAD_ERP | addons/lunch/models/lunch.py | 1 | 13268 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import datetime
from odoo import api, fields, models, _
from odoo.exceptions import AccessError, ValidationError
import odoo.addons.decimal_precision as dp
class LunchOrder(models.Model):
"""
A lunch order contains one or more lunch order line(s). It is associated to a user for a given
date. When creating a lunch order, applicable lunch alerts are displayed.
"""
_name = 'lunch.order'
_description = 'Lunch Order'
_order = 'date desc'
def _default_previous_order_ids(self):
prev_order = self.env['lunch.order.line'].search([('user_id', '=', self.env.uid), ('product_id.active', '!=', False)], limit=20, order='id desc')
# If we return return prev_order.ids, we will have duplicates (identical orders).
# Therefore, this following part removes duplicates based on product_id and note.
return {
(order.product_id, order.note): order.id
for order in prev_order
}.values()
user_id = fields.Many2one('res.users', 'User', required=True, readonly=True,
states={'new': [('readonly', False)]},
default=lambda self: self.env.uid)
date = fields.Date('Date', required=True, readonly=True,
states={'new': [('readonly', False)]},
default=fields.Date.context_today)
order_line_ids = fields.One2many('lunch.order.line', 'order_id', 'Products',
ondelete="cascade", readonly=True, copy=True,
states={'new': [('readonly', False)], False: [('readonly', False)]})
total = fields.Float(compute='_compute_total', string="Total", store=True)
state = fields.Selection([('new', 'New'),
('confirmed', 'Received'),
('cancelled', 'Cancelled')],
'Status', readonly=True, index=True, copy=False, default='new',
compute='_compute_order_state', store=True)
alerts = fields.Text(compute='_compute_alerts_get', string="Alerts")
previous_order_ids = fields.Many2many('lunch.order.line', compute='_compute_previous_order_ids',
default=lambda self: self._default_previous_order_ids())
company_id = fields.Many2one('res.company', related='user_id.company_id', store=True)
currency_id = fields.Many2one('res.currency', related='company_id.currency_id', readonly=True, store=True)
cash_move_balance = fields.Monetary(compute='_compute_cash_move_balance', multi='cash_move_balance')
balance_visible = fields.Boolean(compute='_compute_cash_move_balance', multi='cash_move_balance')
@api.one
@api.depends('order_line_ids')
def _compute_total(self):
"""
get and sum the order lines' price
"""
self.total = sum(
orderline.price for orderline in self.order_line_ids)
@api.multi
def name_get(self):
return [(order.id, '%s %s' % (_('Lunch Order'), '#%d' % order.id)) for order in self]
@api.depends('state')
def _compute_alerts_get(self):
"""
get the alerts to display on the order form
"""
alert_msg = [alert.message
for alert in self.env['lunch.alert'].search([])
if alert.display]
if self.state == 'new':
self.alerts = alert_msg and '\n'.join(alert_msg) or False
@api.depends('user_id')
def _compute_previous_order_ids(self):
self.previous_order_ids = self._default_previous_order_ids()
@api.one
@api.depends('user_id')
def _compute_cash_move_balance(self):
domain = [('user_id', '=', self.user_id.id)]
lunch_cash = self.env['lunch.cashmove'].read_group(domain, ['amount', 'user_id'], ['user_id'])
if len(lunch_cash):
self.cash_move_balance = lunch_cash[0]['amount']
self.balance_visible = (self.user_id == self.env.user) or self.user_has_groups('lunch.group_lunch_manager')
@api.one
@api.constrains('date')
def _check_date(self):
"""
Prevents the user to create an order in the past
"""
date_order = datetime.datetime.strptime(self.date, '%Y-%m-%d')
date_today = datetime.datetime.strptime(fields.Date.context_today(self), '%Y-%m-%d')
if (date_order < date_today):
raise ValidationError(_('The date of your order is in the past.'))
@api.one
@api.depends('order_line_ids.state')
def _compute_order_state(self):
"""
Update the state of lunch.order based on its orderlines. Here is the logic:
- if at least one order line is cancelled, the order is set as cancelled
- if no line is cancelled but at least one line is not confirmed, the order is set as new
- if all lines are confirmed, the order is set as confirmed
"""
if not self.order_line_ids:
self.state = 'new'
else:
isConfirmed = True
for orderline in self.order_line_ids:
if orderline.state == 'cancelled':
self.state = 'cancelled'
return
elif orderline.state == 'confirmed':
continue
else:
isConfirmed = False
if isConfirmed:
self.state = 'confirmed'
else:
self.state = 'new'
return
class LunchOrderLine(models.Model):
_name = 'lunch.order.line'
_description = 'lunch order line'
name = fields.Char(related='product_id.name', string="Product Name", readonly=True)
order_id = fields.Many2one('lunch.order', 'Order', ondelete='cascade', required=True)
product_id = fields.Many2one('lunch.product', 'Product', required=True)
category_id = fields.Many2one('lunch.product.category', string='Product Category',
related='product_id.category_id', readonly=True, store=True)
date = fields.Date(string='Date', related='order_id.date', readonly=True, store=True)
supplier = fields.Many2one('res.partner', string='Vendor', related='product_id.supplier',
readonly=True, store=True)
user_id = fields.Many2one('res.users', string='User', related='order_id.user_id',
readonly=True, store=True)
note = fields.Text('Note')
price = fields.Float(related='product_id.price', readonly=True, store=True,
digits=dp.get_precision('Account'))
state = fields.Selection([('new', 'New'),
('confirmed', 'Received'),
('ordered', 'Ordered'),
('cancelled', 'Cancelled')],
'Status', readonly=True, select=True, default='new')
cashmove = fields.One2many('lunch.cashmove', 'order_id', 'Cash Move')
currency_id = fields.Many2one('res.currency', related='order_id.currency_id')
@api.one
def order(self):
"""
The order_line is ordered to the vendor but isn't received yet
"""
if self.user_has_groups("lunch.group_lunch_manager"):
self.state = 'ordered'
else:
raise AccessError(_("Only your lunch manager processes the orders."))
@api.one
def confirm(self):
"""
confirm one or more order line, update order status and create new cashmove
"""
if self.user_has_groups("lunch.group_lunch_manager"):
if self.state != 'confirmed':
values = {
'user_id': self.user_id.id,
'amount': -self.price,
'description': self.product_id.name,
'order_id': self.id,
'state': 'order',
'date': self.date,
}
self.env['lunch.cashmove'].create(values)
self.state = 'confirmed'
else:
raise AccessError(_("Only your lunch manager sets the orders as received."))
@api.one
def cancel(self):
"""
cancel one or more order.line, update order status and unlink existing cashmoves
"""
if self.user_has_groups("lunch.group_lunch_manager"):
self.state = 'cancelled'
self.cashmove.unlink()
else:
raise AccessError(_("Only your lunch manager cancels the orders."))
class LunchProduct(models.Model):
""" Products available to order. A product is linked to a specific vendor. """
_name = 'lunch.product'
_description = 'lunch product'
name = fields.Char('Product', required=True)
category_id = fields.Many2one('lunch.product.category', 'Category', required=True)
description = fields.Text('Description')
price = fields.Float('Price', digits=dp.get_precision('Account'))
supplier = fields.Many2one('res.partner', 'Vendor')
active = fields.Boolean(default=True)
class LunchProductCategory(models.Model):
""" Category of the product such as pizza, sandwich, pasta, chinese, burger... """
_name = 'lunch.product.category'
_description = 'lunch product category'
name = fields.Char('Category', required=True)
class LunchCashMove(models.Model):
""" Two types of cashmoves: payment (credit) or order (debit) """
_name = 'lunch.cashmove'
_description = 'lunch cashmove'
user_id = fields.Many2one('res.users', 'User', required=True,
default=lambda self: self.env.uid)
date = fields.Date('Date', required=True, default=fields.Date.context_today)
amount = fields.Float('Amount', required=True, help='Can be positive (payment) or negative (order or payment if user wants to get his money back)')
description = fields.Text('Description', help='Can be an order or a payment')
order_id = fields.Many2one('lunch.order.line', 'Order', ondelete='cascade')
state = fields.Selection([('order', 'Order'), ('payment', 'Payment')],
'Is an order or a payment', default='payment')
@api.multi
def name_get(self):
return [(cashmove.id, '%s %s' % (_('Lunch Cashmove'), '#%d' % cashmove.id)) for cashmove in self]
class LunchAlert(models.Model):
""" Alerts to display during a lunch order. An alert can be specific to a
given day, weekly or daily. The alert is displayed from start to end hour. """
_name = 'lunch.alert'
_description = 'Lunch Alert'
display = fields.Boolean(compute='_compute_display_get')
message = fields.Text('Message', required=True)
alert_type = fields.Selection([('specific', 'Specific Day'),
('week', 'Every Week'),
('days', 'Every Day')],
string='Recurrency', required=True, select=True, default='specific')
specific_day = fields.Date('Day', default=fields.Date.context_today)
monday = fields.Boolean('Monday')
tuesday = fields.Boolean('Tuesday')
wednesday = fields.Boolean('Wednesday')
thursday = fields.Boolean('Thursday')
friday = fields.Boolean('Friday')
saturday = fields.Boolean('Saturday')
sunday = fields.Boolean('Sunday')
start_hour = fields.Float('Between', oldname='active_from', required=True, default=7)
end_hour = fields.Float('And', oldname='active_to', required=True, default=23)
active = fields.Boolean(default=True)
@api.multi
def name_get(self):
return [(alert.id, '%s %s' % (_('Alert'), '#%d' % alert.id)) for alert in self]
@api.one
def _compute_display_get(self):
"""
This method check if the alert can be displayed today
if alert type is specific : compare specific_day(date) with today's date
if alert type is week : check today is set as alert (checkbox true) eg. self['monday']
if alert type is day : True
return : Message if can_display_alert is True else False
"""
days_codes = {'0': 'sunday',
'1': 'monday',
'2': 'tuesday',
'3': 'wednesday',
'4': 'thursday',
'5': 'friday',
'6': 'saturday'}
can_display_alert = {
'specific': (self.specific_day == fields.Date.context_today(self)),
'week': self[days_codes[datetime.datetime.now().strftime('%w')]],
'days': True
}
if can_display_alert[self.alert_type]:
mynow = fields.Datetime.context_timestamp(self, datetime.datetime.now())
hour_to = int(self.end_hour)
min_to = int((self.end_hour - hour_to) * 60)
to_alert = datetime.time(hour_to, min_to)
hour_from = int(self.start_hour)
min_from = int((self.start_hour - hour_from) * 60)
from_alert = datetime.time(hour_from, min_from)
if from_alert <= mynow.time() <= to_alert:
self.display = True
else:
self.display = False
| gpl-3.0 | 2,116,819,551,086,005,000 | 43.226667 | 153 | 0.579891 | false |
JockeTF/fimfarchive | tests/converters/test_alpha_beta.py | 1 | 3630 | """
Alpha to beta converter tests.
"""
#
# Fimfarchive, preserves stories from Fimfiction.
# Copyright (C) 2015 Joakim Soderlund
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import json
from copy import deepcopy
from typing import Any, Dict
import arrow
import pytest
from fimfarchive.converters import AlphaBetaConverter
from fimfarchive.flavors import MetaFormat
def to_null(data: Dict[str, Any], *keys: str) -> None:
"""
Nulls the requested keys.
"""
for key in keys:
data[key] = None
def to_utc(data: Dict[str, Any], *keys: str) -> None:
"""
Converts the requested keys to UTC time strings.
"""
for key in keys:
value = data.get(key)
if value is None:
continue
time = arrow.get(value).to('utc')
data[key] = time.isoformat()
@pytest.fixture(scope='module')
def data():
"""
Returns test data from JSON.
"""
path = f'{__file__[:-3]}.json'
with open(path, 'rt') as fobj:
return json.load(fobj)
class TestAlphaBetaConverter:
"""
AlphaBetaConverter tests.
"""
@pytest.fixture
def converter(self):
"""
Returns an alpha beta converter instance.
"""
return AlphaBetaConverter()
@pytest.fixture(params=range(1))
def pair(self, request, data):
"""
Returns meta test data pairs.
"""
return data['pairs'][request.param]
@pytest.fixture
def alpha(self, pair):
"""
Returns meta in alpha format.
"""
return deepcopy(pair['alpha'])
@pytest.fixture
def beta(self, pair):
"""
Returns meta in beta format.
"""
return deepcopy(pair['beta'])
@pytest.fixture
def expected(self, beta):
"""
Returns the expected meta result.
"""
data = deepcopy(beta)
data['archive'] = {
'date_checked': None,
'date_created': None,
'date_fetched': None,
'date_updated': None,
'path': None,
}
to_null(data, 'color', 'date_published')
to_utc(data, 'date_modified', 'date_updated')
to_null(data['author'], *(
'avatar',
'bio_html',
'date_joined',
'num_blog_posts',
'num_followers',
'num_stories',
))
for chapter in data['chapters']:
to_null(chapter, 'date_published')
to_utc(chapter, 'date_modified')
data['tags'] = [
tag for tag in data['tags']
if tag['type'] in {'content', 'genre', 'series'}
]
return data
def test_conversion(self, converter, story, expected, alpha):
"""
Tests conversion of story meta from alpha to beta format.
"""
story = story.merge(flavors=[MetaFormat.ALPHA], meta=alpha)
converted = converter(story)
assert MetaFormat.BETA in converted.flavors
assert expected == converted.meta
| gpl-3.0 | -2,410,826,926,577,086,000 | 23.527027 | 71 | 0.588154 | false |
juice-ryang/online-judge | OnlineJudgeServer/db.py | 1 | 2516 | from datetime import datetime
from enum import Enum
from json import dumps
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy_utils import ChoiceType as EnumType
db = SQLAlchemy()
class JudgeStatus(Enum):
PENDING = 0
STARTED = 1
FAILED = 2
FINISHED = 3
class JudgeFeedback(db.Model):
__tablename__ = "feedback"
filename = db.Column(db.String(36), primary_key=True) # TODO: UUID
cur_idx = db.Column(db.Integer, default=0)
max_idx = db.Column(db.Integer, nullable=False)
status = db.Column(
EnumType(
JudgeStatus,
impl=db.Integer(),
),
default=JudgeStatus['PENDING'],
)
cur_json_idx = db.Column(db.Integer, default=0)
expected_output = db.Column(db.String(1024), nullable=True)
actual_output = db.Column(db.String(1024), nullable=True)
created = db.Column(db.DateTime, default=datetime.now)
updated = db.Column(db.DateTime, nullable=False)
def __setattr__(self, key, value):
super().__setattr__(key, value)
super().__setattr__('updated', datetime.now())
def __str__(self):
output = {}
for key in self.__dict__:
if key[0] == '_':
pass
elif key in ('updated', 'created', 'status'):
output[key] = str(getattr(self, key))
elif key in ('expected_output', 'actual_output'):
value = getattr(self, key)
if value is not None:
output[key] = value # XXX: JudgeFeedback._Brrrrify(value)
else:
value = getattr(self, key)
if value is not None:
output[key] = value
return dumps(output, sort_keys=True, indent=2)
@staticmethod
def _Brrrrify(inputs, before='\n', after='<br>', ignores=('\r',)):
"""please god save us."""
inputs = list(inputs)
while inputs.count(before):
inputs[inputs.index(before)] = after
for ign in ignores:
while inputs.count(ign):
inputs.remove(ign)
return ''.join(inputs)
def monkeypatch_db_celery(app, celery):
"""Let Celery can change the content of DB with App context."""
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
| gpl-3.0 | 3,522,701,366,097,554,400 | 30.45 | 78 | 0.569952 | false |
perkinslr/pypyjs | addedLibraries/twisted/internet/tcp.py | 1 | 42288 | # -*- test-case-name: twisted.test.test_tcp -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Various asynchronous TCP/IP classes.
End users shouldn't use this module directly - use the reactor APIs instead.
"""
from __future__ import division, absolute_import
# System Imports
import types
import socket
import sys
import operator
import struct
from zope.interface import implementer
from twisted.python.compat import _PY3, lazyByteSlice
from twisted.python.runtime import platformType
from twisted.python import versions, deprecate
try:
# Try to get the memory BIO based startTLS implementation, available since
# pyOpenSSL 0.10
from twisted.internet._newtls import (
ConnectionMixin as _TLSConnectionMixin,
ClientMixin as _TLSClientMixin,
ServerMixin as _TLSServerMixin)
except ImportError:
# There is no version of startTLS available
class _TLSConnectionMixin(object):
TLS = False
class _TLSClientMixin(object):
pass
class _TLSServerMixin(object):
pass
if platformType == 'win32':
# no such thing as WSAEPERM or error code 10001 according to winsock.h or MSDN
EPERM = object()
from errno import WSAEINVAL as EINVAL
from errno import WSAEWOULDBLOCK as EWOULDBLOCK
from errno import WSAEINPROGRESS as EINPROGRESS
from errno import WSAEALREADY as EALREADY
from errno import WSAECONNRESET as ECONNRESET
from errno import WSAEISCONN as EISCONN
from errno import WSAENOTCONN as ENOTCONN
from errno import WSAEINTR as EINTR
from errno import WSAENOBUFS as ENOBUFS
from errno import WSAEMFILE as EMFILE
# No such thing as WSAENFILE, either.
ENFILE = object()
# Nor ENOMEM
ENOMEM = object()
EAGAIN = EWOULDBLOCK
from errno import WSAECONNRESET as ECONNABORTED
from twisted.python.win32 import formatError as strerror
else:
from errno import EPERM
from errno import EINVAL
from errno import EWOULDBLOCK
from errno import EINPROGRESS
from errno import EALREADY
from errno import ECONNRESET
from errno import EISCONN
from errno import ENOTCONN
from errno import EINTR
from errno import ENOBUFS
from errno import EMFILE
from errno import ENFILE
from errno import ENOMEM
from errno import EAGAIN
from errno import ECONNABORTED
from os import strerror
from errno import errorcode
# Twisted Imports
from twisted.internet import base, address, fdesc
from twisted.internet.task import deferLater
from twisted.python import log, failure, reflect
from twisted.python.util import untilConcludes
from twisted.internet.error import CannotListenError
from twisted.internet import abstract, main, interfaces, error
# Not all platforms have, or support, this flag.
_AI_NUMERICSERV = getattr(socket, "AI_NUMERICSERV", 0)
# The type for service names passed to socket.getservbyname:
if _PY3:
_portNameType = str
else:
_portNameType = types.StringTypes
class _SocketCloser(object):
"""
@ivar _shouldShutdown: Set to C{True} if C{shutdown} should be called
before callling C{close} on the underlying socket.
@type _shouldShutdown: C{bool}
"""
_shouldShutdown = True
def _closeSocket(self, orderly):
# The call to shutdown() before close() isn't really necessary, because
# we set FD_CLOEXEC now, which will ensure this is the only process
# holding the FD, thus ensuring close() really will shutdown the TCP
# socket. However, do it anyways, just to be safe.
skt = self.socket
try:
if orderly:
if self._shouldShutdown:
skt.shutdown(2)
else:
# Set SO_LINGER to 1,0 which, by convention, causes a
# connection reset to be sent when close is called,
# instead of the standard FIN shutdown sequence.
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER,
struct.pack("ii", 1, 0))
except socket.error:
pass
try:
skt.close()
except socket.error:
pass
class _AbortingMixin(object):
"""
Common implementation of C{abortConnection}.
@ivar _aborting: Set to C{True} when C{abortConnection} is called.
@type _aborting: C{bool}
"""
_aborting = False
def abortConnection(self):
"""
Aborts the connection immediately, dropping any buffered data.
@since: 11.1
"""
if self.disconnected or self._aborting:
return
self._aborting = True
self.stopReading()
self.stopWriting()
self.doRead = lambda *args, **kwargs: None
self.doWrite = lambda *args, **kwargs: None
self.reactor.callLater(0, self.connectionLost,
failure.Failure(error.ConnectionAborted()))
@implementer(interfaces.ITCPTransport, interfaces.ISystemHandle)
class Connection(_TLSConnectionMixin, abstract.FileDescriptor, _SocketCloser,
_AbortingMixin):
"""
Superclass of all socket-based FileDescriptors.
This is an abstract superclass of all objects which represent a TCP/IP
connection based socket.
@ivar logstr: prefix used when logging events related to this connection.
@type logstr: C{str}
"""
def __init__(self, skt, protocol, reactor=None):
abstract.FileDescriptor.__init__(self, reactor=reactor)
self.socket = skt
self.socket.setblocking(0)
self.fileno = skt.fileno
self.protocol = protocol
def getHandle(self):
"""Return the socket for this connection."""
return self.socket
def doRead(self):
"""Calls self.protocol.dataReceived with all available data.
This reads up to self.bufferSize bytes of data from its socket, then
calls self.dataReceived(data) to process it. If the connection is not
lost through an error in the physical recv(), this function will return
the result of the dataReceived call.
"""
try:
data = self.socket.recv(self.bufferSize)
except socket.error as se:
if se.args[0] == EWOULDBLOCK:
return
else:
return main.CONNECTION_LOST
return self._dataReceived(data)
def _dataReceived(self, data):
if not data:
return main.CONNECTION_DONE
rval = self.protocol.dataReceived(data)
if rval is not None:
offender = self.protocol.dataReceived
warningFormat = (
'Returning a value other than None from %(fqpn)s is '
'deprecated since %(version)s.')
warningString = deprecate.getDeprecationWarningString(
offender, versions.Version('Twisted', 11, 0, 0),
format=warningFormat)
deprecate.warnAboutFunction(offender, warningString)
return rval
def writeSomeData(self, data):
"""
Write as much as possible of the given data to this TCP connection.
This sends up to C{self.SEND_LIMIT} bytes from C{data}. If the
connection is lost, an exception is returned. Otherwise, the number
of bytes successfully written is returned.
"""
# Limit length of buffer to try to send, because some OSes are too
# stupid to do so themselves (ahem windows)
limitedData = lazyByteSlice(data, 0, self.SEND_LIMIT)
try:
return untilConcludes(self.socket.send, limitedData)
except socket.error as se:
if se.args[0] in (EWOULDBLOCK, ENOBUFS):
return 0
else:
return main.CONNECTION_LOST
def _closeWriteConnection(self):
try:
self.socket.shutdown(1)
except socket.error:
pass
p = interfaces.IHalfCloseableProtocol(self.protocol, None)
if p:
try:
p.writeConnectionLost()
except:
f = failure.Failure()
log.err()
self.connectionLost(f)
def readConnectionLost(self, reason):
p = interfaces.IHalfCloseableProtocol(self.protocol, None)
if p:
try:
p.readConnectionLost()
except:
log.err()
self.connectionLost(failure.Failure())
else:
self.connectionLost(reason)
def connectionLost(self, reason):
"""See abstract.FileDescriptor.connectionLost().
"""
# Make sure we're not called twice, which can happen e.g. if
# abortConnection() is called from protocol's dataReceived and then
# code immediately after throws an exception that reaches the
# reactor. We can't rely on "disconnected" attribute for this check
# since twisted.internet._oldtls does evil things to it:
if not hasattr(self, "socket"):
return
abstract.FileDescriptor.connectionLost(self, reason)
self._closeSocket(not reason.check(error.ConnectionAborted))
protocol = self.protocol
del self.protocol
del self.socket
del self.fileno
protocol.connectionLost(reason)
logstr = "Uninitialized"
def logPrefix(self):
"""Return the prefix to log with when I own the logging thread.
"""
return self.logstr
def getTcpNoDelay(self):
return operator.truth(self.socket.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY))
def setTcpNoDelay(self, enabled):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, enabled)
def getTcpKeepAlive(self):
return operator.truth(self.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_KEEPALIVE))
def setTcpKeepAlive(self, enabled):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, enabled)
class _BaseBaseClient(object):
"""
Code shared with other (non-POSIX) reactors for management of general
outgoing connections.
Requirements upon subclasses are documented as instance variables rather
than abstract methods, in order to avoid MRO confusion, since this base is
mixed in to unfortunately weird and distinctive multiple-inheritance
hierarchies and many of these attributes are provided by peer classes
rather than descendant classes in those hierarchies.
@ivar addressFamily: The address family constant (C{socket.AF_INET},
C{socket.AF_INET6}, C{socket.AF_UNIX}) of the underlying socket of this
client connection.
@type addressFamily: C{int}
@ivar socketType: The socket type constant (C{socket.SOCK_STREAM} or
C{socket.SOCK_DGRAM}) of the underlying socket.
@type socketType: C{int}
@ivar _requiresResolution: A flag indicating whether the address of this
client will require name resolution. C{True} if the hostname of said
address indicates a name that must be resolved by hostname lookup,
C{False} if it indicates an IP address literal.
@type _requiresResolution: C{bool}
@cvar _commonConnection: Subclasses must provide this attribute, which
indicates the L{Connection}-alike class to invoke C{__init__} and
C{connectionLost} on.
@type _commonConnection: C{type}
@ivar _stopReadingAndWriting: Subclasses must implement in order to remove
this transport from its reactor's notifications in response to a
terminated connection attempt.
@type _stopReadingAndWriting: 0-argument callable returning C{None}
@ivar _closeSocket: Subclasses must implement in order to close the socket
in response to a terminated connection attempt.
@type _closeSocket: 1-argument callable; see L{_SocketCloser._closeSocket}
@ivar _collectSocketDetails: Clean up references to the attached socket in
its underlying OS resource (such as a file descriptor or file handle),
as part of post connection-failure cleanup.
@type _collectSocketDetails: 0-argument callable returning C{None}.
@ivar reactor: The class pointed to by C{_commonConnection} should set this
attribute in its constructor.
@type reactor: L{twisted.internet.interfaces.IReactorTime},
L{twisted.internet.interfaces.IReactorCore},
L{twisted.internet.interfaces.IReactorFDSet}
"""
addressFamily = socket.AF_INET
socketType = socket.SOCK_STREAM
def _finishInit(self, whenDone, skt, error, reactor):
"""
Called by subclasses to continue to the stage of initialization where
the socket connect attempt is made.
@param whenDone: A 0-argument callable to invoke once the connection is
set up. This is C{None} if the connection could not be prepared
due to a previous error.
@param skt: The socket object to use to perform the connection.
@type skt: C{socket._socketobject}
@param error: The error to fail the connection with.
@param reactor: The reactor to use for this client.
@type reactor: L{twisted.internet.interfaces.IReactorTime}
"""
if whenDone:
self._commonConnection.__init__(self, skt, None, reactor)
reactor.callLater(0, whenDone)
else:
reactor.callLater(0, self.failIfNotConnected, error)
def resolveAddress(self):
"""
Resolve the name that was passed to this L{_BaseBaseClient}, if
necessary, and then move on to attempting the connection once an
address has been determined. (The connection will be attempted
immediately within this function if either name resolution can be
synchronous or the address was an IP address literal.)
@note: You don't want to call this method from outside, as it won't do
anything useful; it's just part of the connection bootstrapping
process. Also, although this method is on L{_BaseBaseClient} for
historical reasons, it's not used anywhere except for L{Client}
itself.
@return: C{None}
"""
if self._requiresResolution:
d = self.reactor.resolve(self.addr[0])
d.addCallback(lambda n: (n,) + self.addr[1:])
d.addCallbacks(self._setRealAddress, self.failIfNotConnected)
else:
self._setRealAddress(self.addr)
def _setRealAddress(self, address):
"""
Set the resolved address of this L{_BaseBaseClient} and initiate the
connection attempt.
@param address: Depending on whether this is an IPv4 or IPv6 connection
attempt, a 2-tuple of C{(host, port)} or a 4-tuple of C{(host,
port, flow, scope)}. At this point it is a fully resolved address,
and the 'host' portion will always be an IP address, not a DNS
name.
"""
self.realAddress = address
self.doConnect()
def failIfNotConnected(self, err):
"""
Generic method called when the attemps to connect failed. It basically
cleans everything it can: call connectionFailed, stop read and write,
delete socket related members.
"""
if (self.connected or self.disconnected or
not hasattr(self, "connector")):
return
self._stopReadingAndWriting()
try:
self._closeSocket(True)
except AttributeError:
pass
else:
self._collectSocketDetails()
self.connector.connectionFailed(failure.Failure(err))
del self.connector
def stopConnecting(self):
"""
If a connection attempt is still outstanding (i.e. no connection is
yet established), immediately stop attempting to connect.
"""
self.failIfNotConnected(error.UserError())
def connectionLost(self, reason):
"""
Invoked by lower-level logic when it's time to clean the socket up.
Depending on the state of the connection, either inform the attached
L{Connector} that the connection attempt has failed, or inform the
connected L{IProtocol} that the established connection has been lost.
@param reason: the reason that the connection was terminated
@type reason: L{Failure}
"""
if not self.connected:
self.failIfNotConnected(error.ConnectError(string=reason))
else:
self._commonConnection.connectionLost(self, reason)
self.connector.connectionLost(reason)
class BaseClient(_BaseBaseClient, _TLSClientMixin, Connection):
"""
A base class for client TCP (and similiar) sockets.
@ivar realAddress: The address object that will be used for socket.connect;
this address is an address tuple (the number of elements dependent upon
the address family) which does not contain any names which need to be
resolved.
@type realAddress: C{tuple}
@ivar _base: L{Connection}, which is the base class of this class which has
all of the useful file descriptor methods. This is used by
L{_TLSServerMixin} to call the right methods to directly manipulate the
transport, as is necessary for writing TLS-encrypted bytes (whereas
those methods on L{Server} will go through another layer of TLS if it
has been enabled).
"""
_base = Connection
_commonConnection = Connection
def _stopReadingAndWriting(self):
"""
Implement the POSIX-ish (i.e.
L{twisted.internet.interfaces.IReactorFDSet}) method of detaching this
socket from the reactor for L{_BaseBaseClient}.
"""
if hasattr(self, "reactor"):
# this doesn't happen if we failed in __init__
self.stopReading()
self.stopWriting()
def _collectSocketDetails(self):
"""
Clean up references to the socket and its file descriptor.
@see: L{_BaseBaseClient}
"""
del self.socket, self.fileno
def createInternetSocket(self):
"""(internal) Create a non-blocking socket using
self.addressFamily, self.socketType.
"""
s = socket.socket(self.addressFamily, self.socketType)
s.setblocking(0)
fdesc._setCloseOnExec(s.fileno())
return s
def doConnect(self):
"""
Initiate the outgoing connection attempt.
@note: Applications do not need to call this method; it will be invoked
internally as part of L{IReactorTCP.connectTCP}.
"""
self.doWrite = self.doConnect
self.doRead = self.doConnect
if not hasattr(self, "connector"):
# this happens when connection failed but doConnect
# was scheduled via a callLater in self._finishInit
return
err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if err:
self.failIfNotConnected(error.getConnectError((err, strerror(err))))
return
# doConnect gets called twice. The first time we actually need to
# start the connection attempt. The second time we don't really
# want to (SO_ERROR above will have taken care of any errors, and if
# it reported none, the mere fact that doConnect was called again is
# sufficient to indicate that the connection has succeeded), but it
# is not /particularly/ detrimental to do so. This should get
# cleaned up some day, though.
try:
connectResult = self.socket.connect_ex(self.realAddress)
except socket.error as se:
connectResult = se.args[0]
if connectResult:
if connectResult == EISCONN:
pass
# on Windows EINVAL means sometimes that we should keep trying:
# http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winsock/winsock/connect_2.asp
elif ((connectResult in (EWOULDBLOCK, EINPROGRESS, EALREADY)) or
(connectResult == EINVAL and platformType == "win32")):
self.startReading()
self.startWriting()
return
else:
self.failIfNotConnected(error.getConnectError((connectResult, strerror(connectResult))))
return
# If I have reached this point without raising or returning, that means
# that the socket is connected.
del self.doWrite
del self.doRead
# we first stop and then start, to reset any references to the old doRead
self.stopReading()
self.stopWriting()
self._connectDone()
def _connectDone(self):
"""
This is a hook for when a connection attempt has succeeded.
Here, we build the protocol from the
L{twisted.internet.protocol.ClientFactory} that was passed in, compute
a log string, begin reading so as to send traffic to the newly built
protocol, and finally hook up the protocol itself.
This hook is overridden by L{ssl.Client} to initiate the TLS protocol.
"""
self.protocol = self.connector.buildProtocol(self.getPeer())
self.connected = 1
logPrefix = self._getLogPrefix(self.protocol)
self.logstr = "%s,client" % logPrefix
self.startReading()
self.protocol.makeConnection(self)
_NUMERIC_ONLY = socket.AI_NUMERICHOST | _AI_NUMERICSERV
def _resolveIPv6(ip, port):
"""
Resolve an IPv6 literal into an IPv6 address.
This is necessary to resolve any embedded scope identifiers to the relevant
C{sin6_scope_id} for use with C{socket.connect()}, C{socket.listen()}, or
C{socket.bind()}; see U{RFC 3493 <https://tools.ietf.org/html/rfc3493>} for
more information.
@param ip: An IPv6 address literal.
@type ip: C{str}
@param port: A port number.
@type port: C{int}
@return: a 4-tuple of C{(host, port, flow, scope)}, suitable for use as an
IPv6 address.
@raise socket.gaierror: if either the IP or port is not numeric as it
should be.
"""
return socket.getaddrinfo(ip, port, 0, 0, 0, _NUMERIC_ONLY)[0][4]
class _BaseTCPClient(object):
"""
Code shared with other (non-POSIX) reactors for management of outgoing TCP
connections (both TCPv4 and TCPv6).
@note: In order to be functional, this class must be mixed into the same
hierarchy as L{_BaseBaseClient}. It would subclass L{_BaseBaseClient}
directly, but the class hierarchy here is divided in strange ways out
of the need to share code along multiple axes; specifically, with the
IOCP reactor and also with UNIX clients in other reactors.
@ivar _addressType: The Twisted _IPAddress implementation for this client
@type _addressType: L{IPv4Address} or L{IPv6Address}
@ivar connector: The L{Connector} which is driving this L{_BaseTCPClient}'s
connection attempt.
@ivar addr: The address that this socket will be connecting to.
@type addr: If IPv4, a 2-C{tuple} of C{(str host, int port)}. If IPv6, a
4-C{tuple} of (C{str host, int port, int ignored, int scope}).
@ivar createInternetSocket: Subclasses must implement this as a method to
create a python socket object of the appropriate address family and
socket type.
@type createInternetSocket: 0-argument callable returning
C{socket._socketobject}.
"""
_addressType = address.IPv4Address
def __init__(self, host, port, bindAddress, connector, reactor=None):
# BaseClient.__init__ is invoked later
self.connector = connector
self.addr = (host, port)
whenDone = self.resolveAddress
err = None
skt = None
if abstract.isIPAddress(host):
self._requiresResolution = False
elif abstract.isIPv6Address(host):
self._requiresResolution = False
self.addr = _resolveIPv6(host, port)
self.addressFamily = socket.AF_INET6
self._addressType = address.IPv6Address
else:
self._requiresResolution = True
try:
skt = self.createInternetSocket()
except socket.error as se:
err = error.ConnectBindError(se.args[0], se.args[1])
whenDone = None
if whenDone and bindAddress is not None:
try:
if abstract.isIPv6Address(bindAddress[0]):
bindinfo = _resolveIPv6(*bindAddress)
else:
bindinfo = bindAddress
skt.bind(bindinfo)
except socket.error as se:
err = error.ConnectBindError(se.args[0], se.args[1])
whenDone = None
self._finishInit(whenDone, skt, err, reactor)
def getHost(self):
"""
Returns an L{IPv4Address} or L{IPv6Address}.
This indicates the address from which I am connecting.
"""
return self._addressType('TCP', *self.socket.getsockname()[:2])
def getPeer(self):
"""
Returns an L{IPv4Address} or L{IPv6Address}.
This indicates the address that I am connected to.
"""
# an ipv6 realAddress has more than two elements, but the IPv6Address
# constructor still only takes two.
return self._addressType('TCP', *self.realAddress[:2])
def __repr__(self):
s = '<%s to %s at %x>' % (self.__class__, self.addr, id(self))
return s
class Client(_BaseTCPClient, BaseClient):
"""
A transport for a TCP protocol; either TCPv4 or TCPv6.
Do not create these directly; use L{IReactorTCP.connectTCP}.
"""
class Server(_TLSServerMixin, Connection):
"""
Serverside socket-stream connection class.
This is a serverside network connection transport; a socket which came from
an accept() on a server.
@ivar _base: L{Connection}, which is the base class of this class which has
all of the useful file descriptor methods. This is used by
L{_TLSServerMixin} to call the right methods to directly manipulate the
transport, as is necessary for writing TLS-encrypted bytes (whereas
those methods on L{Server} will go through another layer of TLS if it
has been enabled).
"""
_base = Connection
_addressType = address.IPv4Address
def __init__(self, sock, protocol, client, server, sessionno, reactor):
"""
Server(sock, protocol, client, server, sessionno)
Initialize it with a socket, a protocol, a descriptor for my peer (a
tuple of host, port describing the other end of the connection), an
instance of Port, and a session number.
"""
Connection.__init__(self, sock, protocol, reactor)
if len(client) != 2:
self._addressType = address.IPv6Address
self.server = server
self.client = client
self.sessionno = sessionno
self.hostname = client[0]
logPrefix = self._getLogPrefix(self.protocol)
self.logstr = "%s,%s,%s" % (logPrefix,
sessionno,
self.hostname)
if self.server is not None:
self.repstr = "<%s #%s on %s>" % (self.protocol.__class__.__name__,
self.sessionno,
self.server._realPortNumber)
self.startReading()
self.connected = 1
def __repr__(self):
"""
A string representation of this connection.
"""
return self.repstr
@classmethod
def _fromConnectedSocket(cls, fileDescriptor, addressFamily, factory,
reactor):
"""
Create a new L{Server} based on an existing connected I{SOCK_STREAM}
socket.
Arguments are the same as to L{Server.__init__}, except where noted.
@param fileDescriptor: An integer file descriptor associated with a
connected socket. The socket must be in non-blocking mode. Any
additional attributes desired, such as I{FD_CLOEXEC}, must also be
set already.
@param addressFamily: The address family (sometimes called I{domain})
of the existing socket. For example, L{socket.AF_INET}.
@return: A new instance of C{cls} wrapping the socket given by
C{fileDescriptor}.
"""
addressType = address.IPv4Address
if addressFamily == socket.AF_INET6:
addressType = address.IPv6Address
skt = socket.fromfd(fileDescriptor, addressFamily, socket.SOCK_STREAM)
addr = skt.getpeername()
protocolAddr = addressType('TCP', addr[0], addr[1])
localPort = skt.getsockname()[1]
protocol = factory.buildProtocol(protocolAddr)
if protocol is None:
skt.close()
return
self = cls(skt, protocol, addr, None, addr[1], reactor)
self.repstr = "<%s #%s on %s>" % (
self.protocol.__class__.__name__, self.sessionno, localPort)
protocol.makeConnection(self)
return self
def getHost(self):
"""
Returns an L{IPv4Address} or L{IPv6Address}.
This indicates the server's address.
"""
host, port = self.socket.getsockname()[:2]
return self._addressType('TCP', host, port)
def getPeer(self):
"""
Returns an L{IPv4Address} or L{IPv6Address}.
This indicates the client's address.
"""
return self._addressType('TCP', *self.client[:2])
@implementer(interfaces.IListeningPort)
class Port(base.BasePort, _SocketCloser):
"""
A TCP server port, listening for connections.
When a connection is accepted, this will call a factory's buildProtocol
with the incoming address as an argument, according to the specification
described in L{twisted.internet.interfaces.IProtocolFactory}.
If you wish to change the sort of transport that will be used, the
C{transport} attribute will be called with the signature expected for
C{Server.__init__}, so it can be replaced.
@ivar deferred: a deferred created when L{stopListening} is called, and
that will fire when connection is lost. This is not to be used it
directly: prefer the deferred returned by L{stopListening} instead.
@type deferred: L{defer.Deferred}
@ivar disconnecting: flag indicating that the L{stopListening} method has
been called and that no connections should be accepted anymore.
@type disconnecting: C{bool}
@ivar connected: flag set once the listen has successfully been called on
the socket.
@type connected: C{bool}
@ivar _type: A string describing the connections which will be created by
this port. Normally this is C{"TCP"}, since this is a TCP port, but
when the TLS implementation re-uses this class it overrides the value
with C{"TLS"}. Only used for logging.
@ivar _preexistingSocket: If not C{None}, a L{socket.socket} instance which
was created and initialized outside of the reactor and will be used to
listen for connections (instead of a new socket being created by this
L{Port}).
"""
socketType = socket.SOCK_STREAM
transport = Server
sessionno = 0
interface = ''
backlog = 50
_type = 'TCP'
# Actual port number being listened on, only set to a non-None
# value when we are actually listening.
_realPortNumber = None
# An externally initialized socket that we will use, rather than creating
# our own.
_preexistingSocket = None
addressFamily = socket.AF_INET
_addressType = address.IPv4Address
def __init__(self, port, factory, backlog=50, interface='', reactor=None):
"""Initialize with a numeric port to listen on.
"""
base.BasePort.__init__(self, reactor=reactor)
self.port = port
self.factory = factory
self.backlog = backlog
if abstract.isIPv6Address(interface):
self.addressFamily = socket.AF_INET6
self._addressType = address.IPv6Address
self.interface = interface
@classmethod
def _fromListeningDescriptor(cls, reactor, fd, addressFamily, factory):
"""
Create a new L{Port} based on an existing listening I{SOCK_STREAM}
socket.
Arguments are the same as to L{Port.__init__}, except where noted.
@param fd: An integer file descriptor associated with a listening
socket. The socket must be in non-blocking mode. Any additional
attributes desired, such as I{FD_CLOEXEC}, must also be set already.
@param addressFamily: The address family (sometimes called I{domain}) of
the existing socket. For example, L{socket.AF_INET}.
@return: A new instance of C{cls} wrapping the socket given by C{fd}.
"""
port = socket.fromfd(fd, addressFamily, cls.socketType)
interface = port.getsockname()[0]
self = cls(None, factory, None, interface, reactor)
self._preexistingSocket = port
return self
def __repr__(self):
if self._realPortNumber is not None:
return "<%s of %s on %s>" % (self.__class__,
self.factory.__class__, self._realPortNumber)
else:
return "<%s of %s (not listening)>" % (self.__class__, self.factory.__class__)
def createInternetSocket(self):
s = base.BasePort.createInternetSocket(self)
if platformType == "posix" and sys.platform != "cygwin":
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s
def startListening(self):
"""Create and bind my socket, and begin listening on it.
This is called on unserialization, and must be called after creating a
server to begin listening on the specified port.
"""
if self._preexistingSocket is None:
# Create a new socket and make it listen
try:
skt = self.createInternetSocket()
if self.addressFamily == socket.AF_INET6:
addr = _resolveIPv6(self.interface, self.port)
else:
addr = (self.interface, self.port)
skt.bind(addr)
except socket.error as le:
raise CannotListenError(self.interface, self.port, le)
skt.listen(self.backlog)
else:
# Re-use the externally specified socket
skt = self._preexistingSocket
self._preexistingSocket = None
# Avoid shutting it down at the end.
self._shouldShutdown = False
# Make sure that if we listened on port 0, we update that to
# reflect what the OS actually assigned us.
self._realPortNumber = skt.getsockname()[1]
log.msg("%s starting on %s" % (
self._getLogPrefix(self.factory), self._realPortNumber))
# The order of the next 5 lines is kind of bizarre. If no one
# can explain it, perhaps we should re-arrange them.
self.factory.doStart()
self.connected = True
self.socket = skt
self.fileno = self.socket.fileno
self.numberAccepts = 100
self.startReading()
def _buildAddr(self, address):
host, port = address[:2]
return self._addressType('TCP', host, port)
def doRead(self):
"""Called when my socket is ready for reading.
This accepts a connection and calls self.protocol() to handle the
wire-level protocol.
"""
try:
if platformType == "posix":
numAccepts = self.numberAccepts
else:
# win32 event loop breaks if we do more than one accept()
# in an iteration of the event loop.
numAccepts = 1
for i in range(numAccepts):
# we need this so we can deal with a factory's buildProtocol
# calling our loseConnection
if self.disconnecting:
return
try:
skt, addr = self.socket.accept()
except socket.error as e:
if e.args[0] in (EWOULDBLOCK, EAGAIN):
self.numberAccepts = i
break
elif e.args[0] == EPERM:
# Netfilter on Linux may have rejected the
# connection, but we get told to try to accept()
# anyway.
continue
elif e.args[0] in (EMFILE, ENOBUFS, ENFILE, ENOMEM, ECONNABORTED):
# Linux gives EMFILE when a process is not allowed
# to allocate any more file descriptors. *BSD and
# Win32 give (WSA)ENOBUFS. Linux can also give
# ENFILE if the system is out of inodes, or ENOMEM
# if there is insufficient memory to allocate a new
# dentry. ECONNABORTED is documented as possible on
# both Linux and Windows, but it is not clear
# whether there are actually any circumstances under
# which it can happen (one might expect it to be
# possible if a client sends a FIN or RST after the
# server sends a SYN|ACK but before application code
# calls accept(2), however at least on Linux this
# _seems_ to be short-circuited by syncookies.
log.msg("Could not accept new connection (%s)" % (
errorcode[e.args[0]],))
break
raise
fdesc._setCloseOnExec(skt.fileno())
protocol = self.factory.buildProtocol(self._buildAddr(addr))
if protocol is None:
skt.close()
continue
s = self.sessionno
self.sessionno = s+1
transport = self.transport(skt, protocol, addr, self, s, self.reactor)
protocol.makeConnection(transport)
else:
self.numberAccepts = self.numberAccepts+20
except:
# Note that in TLS mode, this will possibly catch SSL.Errors
# raised by self.socket.accept()
#
# There is no "except SSL.Error:" above because SSL may be
# None if there is no SSL support. In any case, all the
# "except SSL.Error:" suite would probably do is log.deferr()
# and return, so handling it here works just as well.
log.deferr()
def loseConnection(self, connDone=failure.Failure(main.CONNECTION_DONE)):
"""
Stop accepting connections on this port.
This will shut down the socket and call self.connectionLost(). It
returns a deferred which will fire successfully when the port is
actually closed, or with a failure if an error occurs shutting down.
"""
self.disconnecting = True
self.stopReading()
if self.connected:
self.deferred = deferLater(
self.reactor, 0, self.connectionLost, connDone)
return self.deferred
stopListening = loseConnection
def _logConnectionLostMsg(self):
"""
Log message for closing port
"""
log.msg('(%s Port %s Closed)' % (self._type, self._realPortNumber))
def connectionLost(self, reason):
"""
Cleans up the socket.
"""
self._logConnectionLostMsg()
self._realPortNumber = None
base.BasePort.connectionLost(self, reason)
self.connected = False
self._closeSocket(True)
del self.socket
del self.fileno
try:
self.factory.doStop()
finally:
self.disconnecting = False
def logPrefix(self):
"""Returns the name of my class, to prefix log entries with.
"""
return reflect.qual(self.factory.__class__)
def getHost(self):
"""
Return an L{IPv4Address} or L{IPv6Address} indicating the listening
address of this port.
"""
host, port = self.socket.getsockname()[:2]
return self._addressType('TCP', host, port)
class Connector(base.BaseConnector):
"""
A L{Connector} provides of L{twisted.internet.interfaces.IConnector} for
all POSIX-style reactors.
@ivar _addressType: the type returned by L{Connector.getDestination}.
Either L{IPv4Address} or L{IPv6Address}, depending on the type of
address.
@type _addressType: C{type}
"""
_addressType = address.IPv4Address
def __init__(self, host, port, factory, timeout, bindAddress, reactor=None):
if isinstance(port, _portNameType):
try:
port = socket.getservbyname(port, 'tcp')
except socket.error as e:
raise error.ServiceNameUnknownError(string="%s (%r)" % (e, port))
self.host, self.port = host, port
#if abstract.isIPv6Address(host):
# self._addressType = address.IPv6Address
self.bindAddress = bindAddress
base.BaseConnector.__init__(self, factory, timeout, reactor)
def _makeTransport(self):
"""
Create a L{Client} bound to this L{Connector}.
@return: a new L{Client}
@rtype: L{Client}
"""
return Client(self.host, self.port, self.bindAddress, self, self.reactor)
def getDestination(self):
"""
@see: L{twisted.internet.interfaces.IConnector.getDestination}.
"""
return self._addressType('TCP', self.host, self.port)
| mit | 7,567,373,965,345,442,000 | 34.77665 | 108 | 0.623558 | false |
kantanand/insmartapps | server/learning/login/middleware.py | 1 | 2274 | # -*- coding: UTF-8 -*-
# django dependencies
from django.contrib.auth.views import redirect_to_login
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.conf import settings
# from reia import settings
# python dependencies
from re import compile
#---#
EXEMPT_URLS = [compile(settings.LOGIN_URL.lstrip('/'))]
if hasattr(settings, 'LOGIN_EXEMPT_URLS'):
EXEMPT_URLS += [compile(expr) for expr in settings.LOGIN_EXEMPT_URLS]
#---#
#---------------------#
# For Login Requirend #
#---------------------#
class LoginRequiredMiddleware:
"""
Middleware that requires a user to be authenticated to view any page other
than LOGIN_URL. Exemptions to this requirement can optionally be specified
in settings via a list of regular expressions in LOGIN_EXEMPT_URLS (which
you can copy from your urls.py).
Requires authentication middleware and template context processors to be
loaded. You'll get an error if they aren't.
"""
def process_request(self, request):
assert hasattr(request, 'user'), "The Login Required middleware\
requires authentication middleware to be installed. Edit your\
MIDDLEWARE_CLASSES setting to insert\
'django.contrib.auth.middlware.AuthenticationMiddleware'. If that doesn't\
work, ensure your TEMPLATE_CONTEXT_PROCESSORS setting includes\
'django.core.context_processors.auth'."
if not request.user.is_authenticated():
path = request.path_info.lstrip('/')
if not any(m.match(path) for m in EXEMPT_URLS):
path = request.get_full_path()
return redirect_to_login(path, settings.LOGIN_URL, REDIRECT_FIELD_NAME)
#---#
#---------------------#
# For Maintenace Mode #
#---------------------#
# class MaintenanceMiddleware(object):
# """Serve a temporary redirect to a maintenance url in maintenance mode"""
# def process_request(self, request):
# if request.method == 'POST':
# if getattr(settings, 'MAINTENANCE_MODE', False) is True \
# and hasattr(settings, 'MAINTENANCE_URL'):
# # http? where is that defined?
# return http.HttpResponseRedirect(settings.MAINTENANCE_URL)
# return None
#---#
| mit | -4,536,369,468,620,033,500 | 36.278689 | 87 | 0.647318 | false |
pydata/conf_site | ansible/roles/web/templates/sensitive-settings.py | 1 | 3229 | # Passwords, API keys, and other sensitive information.
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from conf_site.settings.base import INSTALLED_APPS
DATABASES_DEFAULT = {
"ENGINE": "django.db.backends.postgresql",
"NAME": "{{ django_database }}",
"USER": "{{ database_user }}",
"PASSWORD": "{{ database_password }}",
"HOST": "{{ database_host }}",
"PORT": "",
}
SECRET_KEY = "{{ django_secret_key }}"
SESSION_COOKIE_PATH = "{{ subdirectory }}" or "/"
DATE_FORMAT = "{{ date_format }}"
TIME_FORMAT= "{{ time_format }}"
TIME_ZONE = "{{ timezone }}"
DATETIME_FORMAT= "{{ date_format }} {{ time_format }}"
DEFAULT_FROM_EMAIL = "{{ default_email }}"
SERVER_EMAIL = "{{ default_email }}"
EMAIL_USE_TLS = True
EMAIL_HOST = '{{ email_host_name }}'
EMAIL_HOST_USER = '{{ email_host_user }}'
EMAIL_HOST_PASSWORD = '{{ email_host_password }}'
EMAIL_PORT = '587'
# Determine which email backend to use. Note that previous variables
# are only relevant to the SMTP backend.
{% if postmark_api_token and environment_type != "development" %}
EMAIL_BACKEND = "anymail.backends.postmark.EmailBackend"
ANYMAIL = {
"POSTMARK_SEND_DEFAULTS": {
"esp_extra": {"MessageStream": "{{ conference_identifier }}"},
},
"POSTMARK_SERVER_TOKEN": "{{ postmark_api_token }}",
}
{% elif environment_type != "development" %}
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
{% else %}
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
{% endif %}
ALLOWED_HOSTS = ['*']
USE_X_FORWARDED_HOST = {% if subdirectory %}True{% else %}False{% endif %}
WEBSITE_URL = "{{ website_url }}"
LOGIN_URL = "{{ website_url }}/accounts/login/"
LOGO_URL = "{{ logo_url }}"
MEDIA_URL = "{{ website_url }}/media/"
STATIC_URL = "{{ website_url }}/static/"
SENTRY_PUBLIC_DSN = (
"https://{{ sentry_public_key }}@sentry.io/{{ sentry_project_id }}"
)
{% if environment_type != "development" %}
sentry_sdk.init(
dsn=SENTRY_PUBLIC_DSN,
environment="{{ environment_type }}",
integrations=[DjangoIntegration(), RedisIntegration()],
release="{{ git_status.stdout }}",
server_name="{{ conference_identifier }}",
traces_sample_rate=0.1,
)
{% endif %}
GOOGLE_ANALYTICS_PROPERTY_ID = "{{ google_analytics_id }}"
{% if github_oauth_client_id is defined %}
INSTALLED_APPS = INSTALLED_APPS + ["allauth.socialaccount.providers.github"]
{% endif %}
{% if google_oauth_client_id is defined %}
INSTALLED_APPS = INSTALLED_APPS + ["allauth.socialaccount.providers.google"]
{% endif %}
SOCIALACCOUNT_PROVIDERS = {
{% if github_oauth_client_id is defined %}"github": {
"APP": {
"client_id": "{{ github_oauth_client_id }}",
"secret": "{{ github_oauth_client_secret }}",
}
},{% endif %}
{% if google_oauth_client_id is defined %}"google": {
"APP": {
"client_id": "{{ google_oauth_client_id }}",
"secret": "{{ google_oauth_client_secret }}",
},
"SCOPE": [
"profile",
"email",
],
"AUTH_PARAMS": {
"access_type": "online",
}
},{% endif %}
}
| mit | 4,393,301,572,885,467,600 | 30.656863 | 76 | 0.619697 | false |
gauteh/ibcao_py | ibcao/tests/test_depth_map.py | 1 | 1262 | # encoding: utf-8
import common
from common import outdir, TRAVIS
import logging as ll
import unittest as ut
from ibcao import *
import cartopy.crs as ccrs
import matplotlib
import matplotlib.pyplot as plt
import os
import os.path
class IbcaoDepthTest (ut.TestCase):
def setUp (self):
self.i = IBCAO ()
def tearDown (self):
self.i.close ()
del self.i
def test_resample_depth (self):
ll.info ('testing resampling of depth')
div = 200
(x, y) = self.i.grid (div)
shp = x.shape
x = x.ravel ()
y = y.ravel ()
ll.info ('resampling to: ' + str(shp))
#z = self.i.interp_depth (x, y)
z = self.i.map_depth (x, y)
ll.info ('interpolation done')
x = x.reshape (shp)
y = y.reshape (shp)
z = z.reshape (shp)
if not TRAVIS:
# make new map with resampled grid
plt.figure ()
ax = plt.axes (projection = self.i.projection)
ax.set_xlim (*self.i.xlim)
ax.set_ylim (*self.i.ylim)
ax.coastlines ('10m')
# plot every 'div' data point
(cmap, norm) = self.i.Colormap ()
cm = ax.pcolormesh (self.i.x[::div], self.i.y[::div], z, cmap = cmap, norm = norm)
plt.colorbar (cm)
plt.savefig (os.path.join (outdir, 'resampled_map.png'))
| lgpl-3.0 | 3,748,618,860,672,520,700 | 19.688525 | 88 | 0.604596 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.