repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
mudbungie/NetExplorer
|
env/share/doc/networkx-1.11/examples/advanced/parallel_betweenness.py
|
51
|
2510
|
"""
Example of parallel implementation of betweenness centrality using the
multiprocessing module from Python Standard Library.
The function betweenness centrality accepts a bunch of nodes and computes
the contribution of those nodes to the betweenness centrality of the whole
network. Here we divide the network in chunks of nodes and we compute their
contribution to the betweenness centrality of the whole network.
"""
from multiprocessing import Pool
import time
import itertools
import networkx as nx
def chunks(l, n):
"""Divide a list of nodes `l` in `n` chunks"""
l_c = iter(l)
while 1:
x = tuple(itertools.islice(l_c, n))
if not x:
return
yield x
def _betmap(G_normalized_weight_sources_tuple):
"""Pool for multiprocess only accepts functions with one argument.
This function uses a tuple as its only argument. We use a named tuple for
python 3 compatibility, and then unpack it when we send it to
`betweenness_centrality_source`
"""
return nx.betweenness_centrality_source(*G_normalized_weight_sources_tuple)
def betweenness_centrality_parallel(G, processes=None):
"""Parallel betweenness centrality function"""
p = Pool(processes=processes)
node_divisor = len(p._pool)*4
node_chunks = list(chunks(G.nodes(), int(G.order()/node_divisor)))
num_chunks = len(node_chunks)
bt_sc = p.map(_betmap,
zip([G]*num_chunks,
[True]*num_chunks,
[None]*num_chunks,
node_chunks))
# Reduce the partial solutions
bt_c = bt_sc[0]
for bt in bt_sc[1:]:
for n in bt:
bt_c[n] += bt[n]
return bt_c
if __name__ == "__main__":
G_ba = nx.barabasi_albert_graph(1000, 3)
G_er = nx.gnp_random_graph(1000, 0.01)
G_ws = nx.connected_watts_strogatz_graph(1000, 4, 0.1)
for G in [G_ba, G_er, G_ws]:
print("")
print("Computing betweenness centrality for:")
print(nx.info(G))
print("\tParallel version")
start = time.time()
bt = betweenness_centrality_parallel(G)
print("\t\tTime: %.4F" % (time.time()-start))
print("\t\tBetweenness centrality for node 0: %.5f" % (bt[0]))
print("\tNon-Parallel version")
start = time.time()
bt = nx.betweenness_centrality(G)
print("\t\tTime: %.4F seconds" % (time.time()-start))
print("\t\tBetweenness centrality for node 0: %.5f" % (bt[0]))
print("")
|
mit
|
vmturbo/nova
|
nova/cmd/idmapshift.py
|
9
|
7512
|
# Copyright 2014 Rackspace, Andrew Melton
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
##########
IDMapShift
##########
IDMapShift is a tool that properly sets the ownership of a filesystem for use
with linux user namespaces.
=====
Usage
=====
nova-idmapshift -i -u 0:10000:2000 -g 0:10000:2000 path
This command will idempotently shift `path` to proper ownership using
the provided uid and gid mappings.
=========
Arguments
=========
nova-idmapshift -i -c -d -v
-u [[guest-uid:host-uid:count],...]
-g [[guest-gid:host-gid:count],...]
-n [nobody-id]
path
path: Root path of the filesystem to be shifted
-i, --idempotent: Shift operation will only be performed if filesystem
appears unshifted
-c, --confirm: Will perform check on filesystem
Returns 0 when filesystem appears shifted
Returns 1 when filesystem appears unshifted
-d, --dry-run: Print chown operations, but won't perform them
-v, --verbose: Print chown operations while performing them
-u, --uid: User ID mappings, maximum of 3 ranges
-g, --gid: Group ID mappings, maximum of 3 ranges
-n, --nobody: ID to map all unmapped uid and gids to.
=======
Purpose
=======
When using user namespaces with linux containers, the filesystem of the
container must be owned by the targeted user and group ids being applied
to that container. Otherwise, processes inside the container won't be able
to access the filesystem.
For example, when using the id map string '0:10000:2000', this means that
user ids inside the container between 0 and 1999 will map to user ids on
the host between 10000 and 11999. Root (0) becomes 10000, user 1 becomes
10001, user 50 becomes 10050 and user 1999 becomes 11999. This means that
files that are owned by root need to actually be owned by user 10000, and
files owned by 50 need to be owned by 10050, and so on.
IDMapShift will take the uid and gid strings used for user namespaces and
properly set up the filesystem for use by those users. Uids and gids outside
of provided ranges will be mapped to nobody (max uid/gid) so that they are
inaccessible inside the container.
"""
import argparse
import os
import sys
from nova.i18n import _
NOBODY_ID = 65534
def find_target_id(fsid, mappings, nobody, memo):
if fsid not in memo:
for start, target, count in mappings:
if start <= fsid < start + count:
memo[fsid] = (fsid - start) + target
break
else:
memo[fsid] = nobody
return memo[fsid]
def print_chown(path, uid, gid, target_uid, target_gid):
print('%s %s:%s -> %s:%s' % (path, uid, gid, target_uid, target_gid))
def shift_path(path, uid_mappings, gid_mappings, nobody, uid_memo, gid_memo,
dry_run=False, verbose=False):
stat = os.lstat(path)
uid = stat.st_uid
gid = stat.st_gid
target_uid = find_target_id(uid, uid_mappings, nobody, uid_memo)
target_gid = find_target_id(gid, gid_mappings, nobody, gid_memo)
if verbose:
print_chown(path, uid, gid, target_uid, target_gid)
if not dry_run:
os.lchown(path, target_uid, target_gid)
def shift_dir(fsdir, uid_mappings, gid_mappings, nobody,
dry_run=False, verbose=False):
uid_memo = dict()
gid_memo = dict()
def shift_path_short(p):
shift_path(p, uid_mappings, gid_mappings, nobody,
dry_run=dry_run, verbose=verbose,
uid_memo=uid_memo, gid_memo=gid_memo)
shift_path_short(fsdir)
for root, dirs, files in os.walk(fsdir):
for d in dirs:
path = os.path.join(root, d)
shift_path_short(path)
for f in files:
path = os.path.join(root, f)
shift_path_short(path)
def confirm_path(path, uid_ranges, gid_ranges, nobody):
stat = os.lstat(path)
uid = stat.st_uid
gid = stat.st_gid
uid_in_range = True if uid == nobody else False
gid_in_range = True if gid == nobody else False
if not uid_in_range or not gid_in_range:
for (start, end) in uid_ranges:
if start <= uid <= end:
uid_in_range = True
break
for (start, end) in gid_ranges:
if start <= gid <= end:
gid_in_range = True
break
return uid_in_range and gid_in_range
def get_ranges(maps):
return [(target, target + count - 1) for (start, target, count) in maps]
def confirm_dir(fsdir, uid_mappings, gid_mappings, nobody):
uid_ranges = get_ranges(uid_mappings)
gid_ranges = get_ranges(gid_mappings)
if not confirm_path(fsdir, uid_ranges, gid_ranges, nobody):
return False
for root, dirs, files in os.walk(fsdir):
for d in dirs:
path = os.path.join(root, d)
if not confirm_path(path, uid_ranges, gid_ranges, nobody):
return False
for f in files:
path = os.path.join(root, f)
if not confirm_path(path, uid_ranges, gid_ranges, nobody):
return False
return True
def id_map_type(val):
maps = val.split(',')
id_maps = []
for m in maps:
map_vals = m.split(':')
if len(map_vals) != 3:
msg = ('Invalid id map %s, correct syntax is '
'guest-id:host-id:count.')
raise argparse.ArgumentTypeError(msg % val)
try:
vals = [int(i) for i in map_vals]
except ValueError:
msg = 'Invalid id map %s, values must be integers' % val
raise argparse.ArgumentTypeError(msg)
id_maps.append(tuple(vals))
return id_maps
def main():
parser = argparse.ArgumentParser(
description=_('nova-idmapshift is a tool that properly '
'sets the ownership of a filesystem for '
'use with linux user namespaces. '
'This tool can only be used with linux '
'lxc containers. See the man page for '
'details.'))
parser.add_argument('path')
parser.add_argument('-u', '--uid', type=id_map_type, default=[])
parser.add_argument('-g', '--gid', type=id_map_type, default=[])
parser.add_argument('-n', '--nobody', default=NOBODY_ID, type=int)
parser.add_argument('-i', '--idempotent', action='store_true')
parser.add_argument('-c', '--confirm', action='store_true')
parser.add_argument('-d', '--dry-run', action='store_true')
parser.add_argument('-v', '--verbose', action='store_true')
args = parser.parse_args()
if args.idempotent or args.confirm:
if confirm_dir(args.path, args.uid, args.gid, args.nobody):
sys.exit(0)
else:
if args.confirm:
sys.exit(1)
shift_dir(args.path, args.uid, args.gid, args.nobody,
dry_run=args.dry_run, verbose=args.verbose)
|
apache-2.0
|
gundalow/ansible-modules-extras
|
cloud/cloudstack/cs_configuration.py
|
32
|
8629
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2016, René Moser <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: cs_configuration
short_description: Manages configuration on Apache CloudStack based clouds.
description:
- Manages global, zone, account, storage and cluster configurations.
version_added: "2.1"
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the configuration.
required: true
value:
description:
- Value of the configuration.
required: true
account:
description:
- Ensure the value for corresponding account.
required: false
default: null
domain:
description:
- Domain the account is related to.
- Only considered if C(account) is used.
required: false
default: ROOT
zone:
description:
- Ensure the value for corresponding zone.
required: false
default: null
storage:
description:
- Ensure the value for corresponding storage pool.
required: false
default: null
cluster:
description:
- Ensure the value for corresponding cluster.
required: false
default: null
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Ensure global configuration
- local_action:
module: cs_configuration
name: router.reboot.when.outofband.migrated
value: false
# Ensure zone configuration
- local_action:
module: cs_configuration
name: router.reboot.when.outofband.migrated
zone: ch-gva-01
value: true
# Ensure storage configuration
- local_action:
module: cs_configuration
name: storage.overprovisioning.factor
storage: storage01
value: 2.0
# Ensure account configuration
- local_action:
module: cs_configuration:
name: allow.public.user.templates
value: false
account: acme inc
domain: customers
'''
RETURN = '''
---
category:
description: Category of the configuration.
returned: success
type: string
sample: Advanced
scope:
description: Scope (zone/cluster/storagepool/account) of the parameter that needs to be updated.
returned: success
type: string
sample: storagepool
description:
description: Description of the configuration.
returned: success
type: string
sample: Setup the host to do multipath
name:
description: Name of the configuration.
returned: success
type: string
sample: zone.vlan.capacity.notificationthreshold
value:
description: Value of the configuration.
returned: success
type: string
sample: "0.75"
account:
description: Account of the configuration.
returned: success
type: string
sample: admin
Domain:
description: Domain of account of the configuration.
returned: success
type: string
sample: ROOT
zone:
description: Zone of the configuration.
returned: success
type: string
sample: ch-gva-01
cluster:
description: Cluster of the configuration.
returned: success
type: string
sample: cluster01
storage:
description: Storage of the configuration.
returned: success
type: string
sample: storage01
'''
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackConfiguration(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackConfiguration, self).__init__(module)
self.returns = {
'category': 'category',
'scope': 'scope',
'value': 'value',
}
self.storage = None
self.account = None
self.cluster = None
def _get_common_configuration_args(self):
args = {}
args['name'] = self.module.params.get('name')
args['accountid'] = self.get_account(key='id')
args['storageid'] = self.get_storage(key='id')
args['zoneid'] = self.get_zone(key='id')
args['clusterid'] = self.get_cluster(key='id')
return args
def get_zone(self, key=None):
# make sure we do net use the default zone
zone = self.module.params.get('zone')
if zone:
return super(AnsibleCloudStackConfiguration, self).get_zone(key=key)
def get_cluster(self, key=None):
if not self.cluster:
cluster_name = self.module.params.get('cluster')
if not cluster_name:
return None
args = {}
args['name'] = cluster_name
clusters = self.cs.listClusters(**args)
if clusters:
self.cluster = clusters['cluster'][0]
self.result['cluster'] = self.cluster['name']
else:
self.module.fail_json(msg="Cluster %s not found." % cluster_name)
return self._get_by_key(key=key, my_dict=self.cluster)
def get_storage(self, key=None):
if not self.storage:
storage_pool_name = self.module.params.get('storage')
if not storage_pool_name:
return None
args = {}
args['name'] = storage_pool_name
storage_pools = self.cs.listStoragePools(**args)
if storage_pools:
self.storage = storage_pools['storagepool'][0]
self.result['storage'] = self.storage['name']
else:
self.module.fail_json(msg="Storage pool %s not found." % storage_pool_name)
return self._get_by_key(key=key, my_dict=self.storage)
def get_configuration(self):
configuration = None
args = self._get_common_configuration_args()
configurations = self.cs.listConfigurations(**args)
if not configurations:
self.module.fail_json(msg="Configuration %s not found." % args['name'])
configuration = configurations['configuration'][0]
return configuration
def get_value(self):
value = str(self.module.params.get('value'))
if value in ('True', 'False'):
value = value.lower()
return value
def present_configuration(self):
configuration = self.get_configuration()
args = self._get_common_configuration_args()
args['value'] = self.get_value()
if self.has_changed(args, configuration, ['value']):
self.result['changed'] = True
if not self.module.check_mode:
res = self.cs.updateConfiguration(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
configuration = res['configuration']
return configuration
def get_result(self, configuration):
self.result = super(AnsibleCloudStackConfiguration, self).get_result(configuration)
if self.account:
self.result['account'] = self.account['name']
self.result['domain'] = self.domain['path']
elif self.zone:
self.result['zone'] = self.zone['name']
return self.result
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
value = dict(type='str', required=True),
zone = dict(default=None),
storage = dict(default=None),
cluster = dict(default=None),
account = dict(default=None),
domain = dict(default='ROOT')
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
try:
acs_configuration = AnsibleCloudStackConfiguration(module)
configuration = acs_configuration.present_configuration()
result = acs_configuration.get_result(configuration)
except CloudStackException as e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
Intel-bigdata/SSM
|
supports/integration-test/ssm_generate_test_data.py
|
4
|
3241
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This script will be used to create test data set. It is also called by:
- test_small_file_rule.py
- test_small_file_actions.py
"""
import sys
import ast
import os
import re
import argparse
from util import *
def create_test_set(file_set_nums, file_size, base_dir, debug):
created_files = []
cids = []
size_in_byte = file_size * 1024
for i in file_set_nums:
if debug:
print("DEBUG: Current file set number: " + str(i) + "; each file size: " + str(file_size) + "KB")
created_files_dir = base_dir + os.sep + "data_" + str(i)
for j in range(0, i):
file_name = created_files_dir + os.sep + "file_" + str(j)
cid = create_file(file_name, size_in_byte)
cids.append(cid)
created_files.append("'" + file_name + "'")
if debug:
print("**********Action " + str(cid) + " Submitted**********")
wait_for_cmdlets(cids)
time.sleep(1)
return "[" + ','.join(created_files) + "]"
if __name__ == '__main__':
# Parse arguments
parser = argparse.ArgumentParser(description='Generate test data set for SSM.')
parser.add_argument("-b", "--dataSetNums", default='[10]', dest="dataSetNums",
help="file number of test data sets, string input, e.g. '[10,100,1000]', Default Value: [10].")
parser.add_argument("-s", "--fileSize", default='1MB', dest="fileSize",
help="size of each file, e.g. 10MB, 10KB, default unit KB, Default Value 1KB.")
parser.add_argument("-d", "--testDir", default=TEST_DIR, dest="testDir",
help="Test data set directory, Default Value: TEST_DIR in util.py")
parser.add_argument("--debug", nargs='?', const=1, default=0, dest="debug",
help="print debug info, Default Value: 0")
options = parser.parse_args()
# Convert arguments
try:
DEBUG = options.debug
data_set_nums = ast.literal_eval(options.dataSetNums)
file_size_arg = options.fileSize
m = re.match(r"(\d+)(\w{2}).*", file_size_arg)
if m:
size = int(m.group(1))
sizeUnit = m.group(2)
if sizeUnit != "MB" and sizeUnit != "KB":
print("Wrong Size Unit")
print("Usage: python3 ssm_generate_test_data -h")
sys.exit(1)
if sizeUnit == "MB":
size = size * 1024
else:
print("Wrong Size Input, e.g. 1MB or 1KB")
sys.exit(1)
if options.testDir:
if options.testDir[-1:len(options.testDirPre)] == '/':
test_dir_prefix = options.testDir[:-1]
else:
test_dir_prefix = options.testDir
else:
raise SystemExit
if DEBUG:
print("DEBUG: file set nums: " + options.fileSetNums + ", each file size: " + str(size) + sizeUnit
+ ", test data directory prefix: " + test_dir_prefix)
except (ValueError, SystemExit) as e:
print("Usage: python3 ssm_generate_test_data -h")
except IndexError:
pass
create_test_set(data_set_nums, size, test_dir_prefix, DEBUG)
|
apache-2.0
|
OpenPIV/openpiv-python
|
openpiv/pyprocess.py
|
2
|
29417
|
import numpy.lib.stride_tricks
import numpy as np
from scipy.fft import rfft2, irfft2, fftshift
from numpy import ma
from scipy.signal import convolve2d
from numpy import log
"""This module contains a pure python implementation of the basic
cross-correlation algorithm for PIV image processing."""
__licence_ = """
Copyright (C) 2011 www.openpiv.net
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
def get_coordinates(image_size, search_area_size, overlap):
"""Compute the x, y coordinates of the centers of the interrogation windows.
the origin (0,0) is like in the image, top left corner
positive x is an increasing column index from left to right
positive y is increasing row index, from top to bottom
Parameters
----------
image_size: two elements tuple
a two dimensional tuple for the pixel size of the image
first element is number of rows, second element is
the number of columns.
search_area_size: int
the size of the search area windows, sometimes it's equal to
the interrogation window size in both frames A and B
overlap: int = 0 (default is no overlap)
the number of pixel by which two adjacent interrogation
windows overlap.
Returns
-------
x : 2d np.ndarray
a two dimensional array containing the x coordinates of the
interrogation window centers, in pixels.
y : 2d np.ndarray
a two dimensional array containing the y coordinates of the
interrogation window centers, in pixels.
Coordinate system 0,0 is at the top left corner, positive
x to the right, positive y from top downwards, i.e.
image coordinate system
"""
# get shape of the resulting flow field
field_shape = get_field_shape(image_size,
search_area_size,
overlap)
# compute grid coordinates of the search area window centers
# note the field_shape[1] (columns) for x
x = (
np.arange(field_shape[1]) * (search_area_size - overlap)
+ (search_area_size) / 2.0
)
# note the rows in field_shape[0]
y = (
np.arange(field_shape[0]) * (search_area_size - overlap)
+ (search_area_size) / 2.0
)
# moving coordinates further to the center, so that the points at the
# extreme left/right or top/bottom
# have the same distance to the window edges. For simplicity only integer
# movements are allowed.
x += (
image_size[1]
- 1
- ((field_shape[1] - 1) * (search_area_size - overlap) +
(search_area_size - 1))
) // 2
y += (
image_size[0] - 1
- ((field_shape[0] - 1) * (search_area_size - overlap) +
(search_area_size - 1))
) // 2
# the origin 0,0 is at top left
# the units are pixels
return np.meshgrid(x, y)
def get_field_shape(image_size, search_area_size, overlap):
"""Compute the shape of the resulting flow field.
Given the image size, the interrogation window size and
the overlap size, it is possible to calculate the number
of rows and columns of the resulting flow field.
Parameters
----------
image_size: two elements tuple
a two dimensional tuple for the pixel size of the image
first element is number of rows, second element is
the number of columns, easy to obtain using .shape
search_area_size: tuple
the size of the interrogation windows (if equal in frames A,B)
or the search area (in frame B), the largest of the two
overlap: tuple
the number of pixel by which two adjacent interrogation
windows overlap.
Returns
-------
field_shape : three elements tuple
the shape of the resulting flow field
"""
field_shape = (np.array(image_size) - np.array(search_area_size)) // (
np.array(search_area_size) - np.array(overlap)
) + 1
return field_shape
def moving_window_array(array, window_size, overlap):
"""
This is a nice numpy trick. The concept of numpy strides should be
clear to understand this code.
Basically, we have a 2d array and we want to perform cross-correlation
over the interrogation windows. An approach could be to loop over the array
but loops are expensive in python. So we create from the array a new array
with three dimension, of size (n_windows, window_size, window_size), in
which each slice, (along the first axis) is an interrogation window.
"""
sz = array.itemsize
shape = array.shape
array = np.ascontiguousarray(array)
strides = (
sz * shape[1] * (window_size - overlap),
sz * (window_size - overlap),
sz * shape[1],
sz,
)
shape = (
int((shape[0] - window_size) / (window_size - overlap)) + 1,
int((shape[1] - window_size) / (window_size - overlap)) + 1,
window_size,
window_size,
)
return numpy.lib.stride_tricks.as_strided(
array, strides=strides, shape=shape
).reshape(-1, window_size, window_size)
def find_first_peak(corr):
"""
Find row and column indices of the first correlation peak.
Parameters
----------
corr : np.ndarray
the correlation map fof the strided images (N,K,M) where
N is the number of windows, KxM is the interrogation window size
Returns
-------
(i,j) : integers, index of the peak position
peak : amplitude of the peak
"""
return np.unravel_index(np.argmax(corr), corr.shape), corr.max()
def find_second_peak(corr, i=None, j=None, width=2):
"""
Find the value of the second largest peak.
The second largest peak is the height of the peak in
the region outside a 3x3 submatrxi around the first
correlation peak.
Parameters
----------
corr: np.ndarray
the correlation map.
i,j : ints
row and column location of the first peak.
width : int
the half size of the region around the first correlation
peak to ignore for finding the second peak.
Returns
-------
i : int
the row index of the second correlation peak.
j : int
the column index of the second correlation peak.
corr_max2 : int
the value of the second correlation peak.
"""
if i is None or j is None:
(i, j), tmp = find_first_peak(corr)
# create a masked view of the corr
tmp = corr.view(ma.MaskedArray)
# set width x width square submatrix around the first correlation peak as
# masked.
# Before check if we are not too close to the boundaries, otherwise we
# have negative indices
iini = max(0, i - width)
ifin = min(i + width + 1, corr.shape[0])
jini = max(0, j - width)
jfin = min(j + width + 1, corr.shape[1])
tmp[iini:ifin, jini:jfin] = ma.masked
(i, j), corr_max2 = find_first_peak(tmp)
return (i, j), corr_max2
def find_subpixel_peak_position(corr, subpixel_method="gaussian"):
"""
Find subpixel approximation of the correlation peak.
This function returns a subpixels approximation of the correlation
peak by using one of the several methods available. If requested,
the function also returns the signal to noise ratio level evaluated
from the correlation map.
Parameters
----------
corr : np.ndarray
the correlation map.
subpixel_method : string
one of the following methods to estimate subpixel location of the
peak:
'centroid' [replaces default if correlation map is negative],
'gaussian' [default if correlation map is positive],
'parabolic'.
Returns
-------
subp_peak_position : two elements tuple
the fractional row and column indices for the sub-pixel
approximation of the correlation peak.
If the first peak is on the border of the correlation map
or any other problem, the returned result is a tuple of NaNs.
"""
# initialization
# default_peak_position = (np.floor(corr.shape[0] / 2.),
# np.floor(corr.shape[1] / 2.))
# default_peak_position = np.array([0,0])
eps = 1e-7
# subp_peak_position = tuple(np.floor(np.array(corr.shape)/2))
subp_peak_position = (np.nan, np.nan) # any wrong position will mark nan
# check inputs
if subpixel_method not in ("gaussian", "centroid", "parabolic"):
raise ValueError(f"Method not implemented {subpixel_method}")
# the peak locations
(peak1_i, peak1_j), _ = find_first_peak(corr)
# import pdb; pdb.set_trace()
# the peak and its neighbours: left, right, down, up
# but we have to make sure that peak is not at the border
# @ErichZimmer noticed this bug for the small windows
if ((peak1_i == 0) | (peak1_i == corr.shape[0]-1) |
(peak1_j == 0) | (peak1_j == corr.shape[1]-1)):
return subp_peak_position
else:
corr += eps # prevents log(0) = nan if "gaussian" is used (notebook)
c = corr[peak1_i, peak1_j]
cl = corr[peak1_i - 1, peak1_j]
cr = corr[peak1_i + 1, peak1_j]
cd = corr[peak1_i, peak1_j - 1]
cu = corr[peak1_i, peak1_j + 1]
# gaussian fit
if np.logical_and(np.any(np.array([c, cl, cr, cd, cu]) < 0),
subpixel_method == "gaussian"):
subpixel_method = "parabolic"
# try:
if subpixel_method == "centroid":
subp_peak_position = (
((peak1_i - 1) * cl + peak1_i * c + (peak1_i + 1) * cr) /
(cl + c + cr),
((peak1_j - 1) * cd + peak1_j * c + (peak1_j + 1) * cu) /
(cd + c + cu),
)
elif subpixel_method == "gaussian":
nom1 = log(cl) - log(cr)
den1 = 2 * log(cl) - 4 * log(c) + 2 * log(cr)
nom2 = log(cd) - log(cu)
den2 = 2 * log(cd) - 4 * log(c) + 2 * log(cu)
subp_peak_position = (
peak1_i + np.divide(nom1, den1, out=np.zeros(1),
where=(den1 != 0.0))[0],
peak1_j + np.divide(nom2, den2, out=np.zeros(1),
where=(den2 != 0.0))[0],
)
elif subpixel_method == "parabolic":
subp_peak_position = (
peak1_i + (cl - cr) / (2 * cl - 4 * c + 2 * cr),
peak1_j + (cd - cu) / (2 * cd - 4 * c + 2 * cu),
)
return subp_peak_position
def sig2noise_ratio(correlation, sig2noise_method="peak2peak", width=2):
"""
Computes the signal to noise ratio from the correlation map.
The signal to noise ratio is computed from the correlation map with
one of two available method. It is a measure of the quality of the
matching between to interrogation windows.
Parameters
----------
corr : 3d np.ndarray
the correlation maps of the image pair, concatenated along 0th axis
sig2noise_method: string
the method for evaluating the signal to noise ratio value from
the correlation map. Can be `peak2peak`, `peak2mean` or None
if no evaluation should be made.
width : int, optional
the half size of the region around the first
correlation peak to ignore for finding the second
peak. [default: 2]. Only used if ``sig2noise_method==peak2peak``.
Returns
-------
sig2noise : np.array
the signal to noise ratios from the correlation maps.
"""
sig2noise = np.zeros(correlation.shape[0])
corr_max1 = np.zeros(correlation.shape[0])
corr_max2 = np.zeros(correlation.shape[0])
if sig2noise_method == "peak2peak":
for i, corr in enumerate(correlation):
# compute first peak position
(peak1_i, peak1_j), corr_max1[i] = find_first_peak(corr)
condition = (
corr_max1[i] < 1e-3
or peak1_i == 0
or peak1_j == corr.shape[0]
or peak1_j == 0
or peak1_j == corr.shape[1]
)
if condition:
# return zero, since we have no signal.
# no point to get the second peak, save time
sig2noise[i] = 0.0
else:
# find second peak height
(peak2_i, peak2_j), corr_max2 = find_second_peak(
corr, peak1_i, peak1_j, width=width
)
condition = (
corr_max2 == 0
or peak2_i == 0
or peak2_j == corr.shape[0]
or peak2_j == 0
or peak2_j == corr.shape[1]
)
if condition: # mark failed peak2
corr_max2 = np.nan
sig2noise[i] = corr_max1[i] / corr_max2
elif sig2noise_method == "peak2mean": # only one loop
for i, corr in enumerate(correlation):
# compute first peak position
(peak1_i, peak1_j), corr_max1[i] = find_first_peak(corr)
condition = (
corr_max1[i] < 1e-3
or peak1_i == 0
or peak1_j == corr.shape[0]
or peak1_j == 0
or peak1_j == corr.shape[1]
)
if condition:
# return zero, since we have no signal.
# no point to get the second peak, save time
sig2noise[i] = 0.0
# find means of all the correlation maps
corr_max2 = np.abs(correlation.mean(axis=(-2, -1)))
corr_max2[corr_max2 == 0] = np.nan # mark failed ones
sig2noise = corr_max1 / corr_max2
else:
raise ValueError("wrong sig2noise_method")
# sig2noise is zero for all failed ones
sig2noise[np.isnan(sig2noise)] = 0.0
return sig2noise
def fft_correlate_images(image_a, image_b,
correlation_method="circular",
normalized_correlation=True):
""" FFT based cross correlation
of two images with multiple views of np.stride_tricks()
The 2D FFT should be applied to the last two axes (-2,-1) and the
zero axis is the number of the interrogation window
This should also work out of the box for rectangular windows.
Parameters
----------
image_a : 3d np.ndarray, first dimension is the number of windows,
and two last dimensions are interrogation windows of the first image
image_b : similar
correlation_method : string
one of the three methods implemented: 'circular' or 'linear'
[default: 'circular].
normalized_correlation : string
decides wetehr normalized correlation is done or not: True or False
[default: True].
"""
if normalized_correlation:
# remove the effect of stronger laser or
# longer exposure for frame B
# image_a = match_histograms(image_a, image_b)
# remove mean background, normalize to 0..1 range
image_a = normalize_intensity(image_a)
image_b = normalize_intensity(image_b)
s1 = np.array(image_a.shape[-2:])
s2 = np.array(image_b.shape[-2:])
if correlation_method == "linear":
# have to be normalized, mainly because of zero padding
size = s1 + s2 - 1
fsize = 2 ** np.ceil(np.log2(size)).astype(int)
fslice = (slice(0, image_a.shape[0]),
slice((fsize[0]-s1[0])//2, (fsize[0]+s1[0])//2),
slice((fsize[1]-s1[1])//2, (fsize[1]+s1[1])//2))
f2a = rfft2(image_a, fsize, axes=(-2, -1)).conj()
f2b = rfft2(image_b, fsize, axes=(-2, -1))
corr = fftshift(irfft2(f2a * f2b).real, axes=(-2, -1))[fslice]
elif correlation_method == "circular":
corr = fftshift(irfft2(rfft2(image_a).conj() *
rfft2(image_b)).real, axes=(-2, -1))
else:
print("method is not implemented!")
if normalized_correlation:
corr = corr/(s2[0]*s2[1]) # for extended search area
corr = np.clip(corr, 0, 1)
return corr
def normalize_intensity(window):
"""Normalize interrogation window or strided image of many windows,
by removing the mean intensity value per window and clipping the
negative values to zero
Parameters
----------
window : 2d np.ndarray
the interrogation window array
Returns
-------
window : 2d np.ndarray
the interrogation window array, with mean value equal to zero and
intensity normalized to -1 +1 and clipped if some pixels are
extra low/high
"""
window = window.astype(np.float32)
window -= window.mean(axis=(-2, -1),
keepdims=True, dtype=np.float32)
tmp = window.std(axis=(-2, -1), keepdims=True)
window = np.divide(window, tmp, out=np.zeros_like(window),
where=(tmp != 0))
return np.clip(window, 0, window.max())
def correlate_windows(window_a, window_b, correlation_method="fft"):
"""Compute correlation function between two interrogation windows.
The correlation function can be computed by using the correlation
theorem to speed up the computation.
Parameters
----------
window_a : 2d np.ndarray
a two dimensions array for the first interrogation window,
window_b : 2d np.ndarray
a two dimensions array for the second interrogation window.
correlation_method : string, methods currently implemented:
'circular' - FFT based without zero-padding
'linear' - FFT based with zero-padding
'direct' - linear convolution based
Default is 'fft', which is much faster.
Returns
-------
corr : 2d np.ndarray
a two dimensions array for the correlation function.
Note that due to the wish to use 2^N windows for faster FFT
we use a slightly different convention for the size of the
correlation map. The theory says it is M+N-1, and the
'direct' method gets this size out
the FFT-based method returns M+N size out, where M is the window_size
and N is the search_area_size
It leads to inconsistency of the output
"""
# first we remove the mean to normalize contrast and intensity
# the background level which is take as a mean of the image
# is subtracted
# import pdb; pdb.set_trace()
window_a = normalize_intensity(window_a)
window_b = normalize_intensity(window_b)
# this is not really circular one, as we pad a bit to get fast 2D FFT,
# see fft_correlate for implementation
if correlation_method in ("circular", "fft"):
corr = fft_correlate_windows(window_a, window_b)
elif correlation_method == "linear":
# save the original size:
s1 = np.array(window_a.shape)
s2 = np.array(window_b.shape)
size = s1 + s2 - 1
fslice = tuple([slice(0, int(sz)) for sz in size])
# and slice only the relevant part
corr = fft_correlate_windows(window_a, window_b)[fslice]
elif correlation_method == "direct":
corr = convolve2d(window_a, window_b[::-1, ::-1], "full")
else:
raise ValueError("method is not implemented")
return corr
def fft_correlate_windows(window_a, window_b):
""" FFT based cross correlation
it is a so-called linear convolution based,
since we increase the size of the FFT to
reduce the edge effects.
This should also work out of the box for rectangular windows.
Parameters
----------
window_a : 2d np.ndarray
a two dimensions array for the first interrogation window,
window_b : 2d np.ndarray
a two dimensions array for the second interrogation window.
# from Stackoverflow:
from scipy import linalg
import numpy as np
# works for rectangular windows as well
x = [[1 , 0 , 0 , 0] , [0 , -1 , 0 , 0] , [0 , 0 , 3 , 0] ,
[0 , 0 , 0 , 1], [0 , 0 , 0 , 1]]
x = np.array(x,dtype=np.float)
y = [[4 , 5] , [3 , 4]]
y = np.array(y)
print ("conv:" , signal.convolve2d(x , y , 'full'))
s1 = np.array(x.shape)
s2 = np.array(y.shape)
size = s1 + s2 - 1
fsize = 2 ** np.ceil(np.log2(size)).astype(int)
fslice = tuple([slice(0, int(sz)) for sz in size])
new_x = np.fft.fft2(x , fsize)
new_y = np.fft.fft2(y , fsize)
result = np.fft.ifft2(new_x*new_y)[fslice].copy()
print("fft for my method:" , np.array(result.real, np.int32))
"""
s1 = np.array(window_a.shape)
s2 = np.array(window_b.shape)
size = s1 + s2 - 1
fsize = 2 ** np.ceil(np.log2(size)).astype(int)
fslice = tuple([slice(0, int(sz)) for sz in size])
f2a = rfft2(window_a, fsize)
f2b = rfft2(window_b[::-1, ::-1], fsize)
corr = irfft2(f2a * f2b).real[fslice]
return corr
def extended_search_area_piv(
frame_a,
frame_b,
window_size,
overlap=0,
dt=1.0,
search_area_size=None,
correlation_method="circular",
subpixel_method="gaussian",
sig2noise_method='peak2mean',
width=2,
normalized_correlation=False
):
"""Standard PIV cross-correlation algorithm, with an option for
extended area search that increased dynamic range. The search region
in the second frame is larger than the interrogation window size in the
first frame. For Cython implementation see
openpiv.process.extended_search_area_piv
This is a pure python implementation of the standard PIV cross-correlation
algorithm. It is a zero order displacement predictor, and no iterative
process is performed.
Parameters
----------
frame_a : 2d np.ndarray
an two dimensions array of integers containing grey levels of
the first frame.
frame_b : 2d np.ndarray
an two dimensions array of integers containing grey levels of
the second frame.
window_size : int
the size of the (square) interrogation window, [default: 32 pix].
overlap : int
the number of pixels by which two adjacent windows overlap
[default: 16 pix].
dt : float
the time delay separating the two frames [default: 1.0].
correlation_method : string
one of the two methods implemented: 'circular' or 'linear',
default: 'circular', it's faster, without zero-padding
'linear' requires also normalized_correlation = True (see below)
subpixel_method : string
one of the following methods to estimate subpixel location of the
peak:
'centroid' [replaces default if correlation map is negative],
'gaussian' [default if correlation map is positive],
'parabolic'.
sig2noise_method : string
defines the method of signal-to-noise-ratio measure,
('peak2peak' or 'peak2mean'. If None, no measure is performed.)
nfftx : int
the size of the 2D FFT in x-direction,
[default: 2 x windows_a.shape[0] is recommended]
nffty : int
the size of the 2D FFT in y-direction,
[default: 2 x windows_a.shape[1] is recommended]
width : int
the half size of the region around the first
correlation peak to ignore for finding the second
peak. [default: 2]. Only used if ``sig2noise_method==peak2peak``.
search_area_size : int
the size of the interrogation window in the second frame,
default is the same interrogation window size and it is a
fallback to the simplest FFT based PIV
normalized_correlation: bool
if True, then the image intensity will be modified by removing
the mean, dividing by the standard deviation and
the correlation map will be normalized. It's slower but could be
more robust
Returns
-------
u : 2d np.ndarray
a two dimensional array containing the u velocity component,
in pixels/seconds.
v : 2d np.ndarray
a two dimensional array containing the v velocity component,
in pixels/seconds.
sig2noise : 2d np.ndarray, ( optional: only if sig2noise_method != None )
a two dimensional array the signal to noise ratio for each
window pair.
The implementation of the one-step direct correlation with different
size of the interrogation window and the search area. The increased
size of the search areas cope with the problem of loss of pairs due
to in-plane motion, allowing for a smaller interrogation window size,
without increasing the number of outlier vectors.
See:
Particle-Imaging Techniques for Experimental Fluid Mechanics
Annual Review of Fluid Mechanics
Vol. 23: 261-304 (Volume publication date January 1991)
DOI: 10.1146/annurev.fl.23.010191.001401
originally implemented in process.pyx in Cython and converted to
a NumPy vectorized solution in pyprocess.py
"""
# check the inputs for validity
if search_area_size is None:
search_area_size = window_size
if overlap >= window_size:
raise ValueError("Overlap has to be smaller than the window_size")
if search_area_size < window_size:
raise ValueError("Search size cannot be smaller than the window_size")
if (window_size > frame_a.shape[0]) or (window_size > frame_a.shape[1]):
raise ValueError("window size cannot be larger than the image")
# get field shape
n_rows, n_cols = get_field_shape(frame_a.shape, search_area_size, overlap)
# We implement the new vectorized code
aa = moving_window_array(frame_a, search_area_size, overlap)
bb = moving_window_array(frame_b, search_area_size, overlap)
# for the case of extended seearch, the window size is smaller than
# the search_area_size. In order to keep it all vectorized the
# approach is to use the interrogation window in both
# frames of the same size of search_area_asize,
# but mask out the region around
# the interrogation window in the frame A
if search_area_size > window_size:
# before masking with zeros we need to remove
# edges
aa = normalize_intensity(aa)
bb = normalize_intensity(bb)
mask = np.zeros((search_area_size, search_area_size)).astype(aa.dtype)
pad = int((search_area_size - window_size) / 2)
mask[slice(pad, search_area_size - pad),
slice(pad, search_area_size - pad)] = 1
mask = np.broadcast_to(mask, aa.shape)
aa *= mask
corr = fft_correlate_images(aa, bb,
correlation_method=correlation_method,
normalized_correlation=normalized_correlation)
u, v = correlation_to_displacement(corr, n_rows, n_cols,
subpixel_method=subpixel_method)
# return output depending if user wanted sig2noise information
if sig2noise_method is not None:
sig2noise = sig2noise_ratio(
corr, sig2noise_method=sig2noise_method, width=width
)
else:
sig2noise = np.zeros_like(u)*np.nan
sig2noise = sig2noise.reshape(n_rows, n_cols)
return u/dt, v/dt, sig2noise
def correlation_to_displacement(corr, n_rows, n_cols,
subpixel_method="gaussian"):
"""
Correlation maps are converted to displacement for each interrogation
window using the convention that the size of the correlation map
is 2N -1 where N is the size of the largest interrogation window
(in frame B) that is called search_area_size
Inputs:
corr : 3D nd.array
contains output of the fft_correlate_images
n_rows, n_cols : number of interrogation windows, output of the
get_field_shape
"""
# iterate through interrogation widows and search areas
u = np.zeros((n_rows, n_cols))
v = np.zeros((n_rows, n_cols))
# center point of the correlation map
default_peak_position = np.floor(np.array(corr[0, :, :].shape)/2)
for k in range(n_rows):
for m in range(n_cols):
# look at studying_correlations.ipynb
# the find_subpixel_peak_position returns
peak = np.array(find_subpixel_peak_position(corr[k*n_cols+m, :, :],
subpixel_method=subpixel_method)) -\
default_peak_position
# the horizontal shift from left to right is the u
# the vertical displacement from top to bottom (increasing row) is v
# x the vertical shift from top to bottom is row-wise shift is now
# a negative vertical
u[k, m], v[k, m] = peak[1], peak[0]
return (u, v)
def nextpower2(i):
""" Find 2^n that is equal to or greater than. """
n = 1
while n < i:
n *= 2
return n
|
gpl-3.0
|
EvanK/ansible
|
lib/ansible/modules/network/nxos/nxos_snmp_location.py
|
106
|
4000
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_snmp_location
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages SNMP location information.
description:
- Manages SNMP location configuration.
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
options:
location:
description:
- Location information.
required: true
state:
description:
- Manage the state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
# ensure snmp location is configured
- nxos_snmp_location:
location: Test
state: present
# ensure snmp location is not configured
- nxos_snmp_location:
location: Test
state: absent
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["snmp-server location New_Test"]
'''
import re
from ansible.module_utils.network.nxos.nxos import load_config, run_commands
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
def execute_show_command(command, module):
command = {
'command': command,
'output': 'text',
}
return run_commands(module, command)
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_snmp_location(module):
location = {}
location_regex = r'^\s*snmp-server\s+location\s+(?P<location>.+)$'
body = execute_show_command('show run snmp', module)[0]
match_location = re.search(location_regex, body, re.M)
if match_location:
location['location'] = match_location.group("location")
return location
def main():
argument_spec = dict(
location=dict(required=True, type='str'),
state=dict(choices=['absent', 'present'], default='present'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
warnings = list()
check_args(module, warnings)
results = {'changed': False, 'commands': [], 'warnings': warnings}
location = module.params['location']
state = module.params['state']
existing = get_snmp_location(module)
commands = []
if state == 'absent':
if existing and existing['location'] == location:
commands.append('no snmp-server location')
elif state == 'present':
if not existing or existing['location'] != location:
commands.append('snmp-server location {0}'.format(location))
cmds = flatten_list(commands)
if cmds:
results['changed'] = True
if not module.check_mode:
load_config(module, cmds)
if 'configure' in cmds:
cmds.pop(0)
results['commands'] = cmds
module.exit_json(**results)
if __name__ == '__main__':
main()
|
gpl-3.0
|
brandond/ansible
|
lib/ansible/modules/network/f5/bigip_gtm_datacenter.py
|
14
|
14902
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_gtm_datacenter
short_description: Manage Datacenter configuration in BIG-IP
description:
- Manage BIG-IP data center configuration. A data center defines the location
where the physical network components reside, such as the server and link
objects that share the same subnet on the network. This module is able to
manipulate the data center definitions in a BIG-IP.
version_added: 2.2
options:
contact:
description:
- The name of the contact for the data center.
description:
description:
- The description of the data center.
location:
description:
- The location of the data center.
name:
description:
- The name of the data center.
required: True
state:
description:
- The virtual address state. If C(absent), an attempt to delete the
virtual address will be made. This will only succeed if this
virtual address is not in use by a virtual server. C(present) creates
the virtual address and enables it. If C(enabled), enable the virtual
address if it exists. If C(disabled), create the virtual address if
needed, and set state to C(disabled).
default: present
choices:
- present
- absent
- enabled
- disabled
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create data center "New York"
bigip_gtm_datacenter:
name: New York
location: 222 West 23rd
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
'''
RETURN = r'''
contact:
description: The contact that was set on the datacenter.
returned: changed
type: str
sample: [email protected]
description:
description: The description that was set for the datacenter.
returned: changed
type: str
sample: Datacenter in NYC
enabled:
description: Whether the datacenter is enabled or not
returned: changed
type: bool
sample: true
disabled:
description: Whether the datacenter is disabled or not.
returned: changed
type: bool
sample: true
state:
description: State of the datacenter.
returned: changed
type: str
sample: disabled
location:
description: The location that is set for the datacenter.
returned: changed
type: str
sample: 222 West 23rd
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.icontrol import module_provisioned
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.icontrol import module_provisioned
class Parameters(AnsibleF5Parameters):
api_map = {}
updatables = [
'location', 'description', 'contact', 'state',
]
returnables = [
'location', 'description', 'contact', 'state', 'enabled', 'disabled',
]
api_attributes = [
'enabled', 'location', 'description', 'contact', 'disabled',
]
class ApiParameters(Parameters):
@property
def disabled(self):
if self._values['disabled'] is True:
return True
return None
@property
def enabled(self):
if self._values['enabled'] is True:
return True
return None
class ModuleParameters(Parameters):
@property
def disabled(self):
if self._values['state'] == 'disabled':
return True
return None
@property
def enabled(self):
if self._values['state'] in ['enabled', 'present']:
return True
return None
@property
def state(self):
if self.enabled and self._values['state'] != 'present':
return 'enabled'
elif self.disabled and self._values['state'] != 'present':
return 'disabled'
else:
return self._values['state']
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def disabled(self):
if self._values['state'] == 'disabled':
return True
@property
def enabled(self):
if self._values['state'] in ['enabled', 'present']:
return True
class ReportableChanges(Changes):
@property
def disabled(self):
if self._values['state'] == 'disabled':
return True
elif self._values['state'] in ['enabled', 'present']:
return False
return None
@property
def enabled(self):
if self._values['state'] in ['enabled', 'present']:
return True
elif self._values['state'] == 'disabled':
return False
return None
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def state(self):
if self.want.enabled != self.have.enabled:
return dict(
state=self.want.state,
enabled=self.want.enabled
)
if self.want.disabled != self.have.disabled:
return dict(
state=self.want.state,
disabled=self.want.disabled
)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.pop('module', None)
self.client = kwargs.pop('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def exec_module(self):
if not module_provisioned(self.client, 'gtm'):
raise F5ModuleError(
"GTM must be provisioned to use this module."
)
changed = False
result = dict()
state = self.want.state
if state in ['present', 'enabled', 'disabled']:
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
changed = False
if self.exists():
changed = self.remove()
return changed
def create(self):
self.have = ApiParameters()
self.should_update()
if self.module.check_mode:
return True
self.create_on_device()
if self.exists():
return True
else:
raise F5ModuleError("Failed to create the datacenter")
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the datacenter")
return True
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/datacenter/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/gtm/datacenter/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/gtm/datacenter/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/datacenter/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/datacenter/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
contact=dict(),
description=dict(),
location=dict(),
name=dict(required=True),
state=dict(
default='present',
choices=['present', 'absent', 'disabled', 'enabled']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
|
gpl-3.0
|
luogangyi/bcec-nova
|
nova/virt/hyperv/migrationops.py
|
3
|
12857
|
# Copyright 2013 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for migration / resize operations.
"""
import os
from nova import exception
from nova.openstack.common import excutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.openstack.common import units
from nova.virt import configdrive
from nova.virt.hyperv import imagecache
from nova.virt.hyperv import utilsfactory
from nova.virt.hyperv import vmops
from nova.virt.hyperv import vmutils
from nova.virt.hyperv import volumeops
LOG = logging.getLogger(__name__)
class MigrationOps(object):
def __init__(self):
self._hostutils = utilsfactory.get_hostutils()
self._vmutils = utilsfactory.get_vmutils()
self._vhdutils = utilsfactory.get_vhdutils()
self._pathutils = utilsfactory.get_pathutils()
self._volumeops = volumeops.VolumeOps()
self._vmops = vmops.VMOps()
self._imagecache = imagecache.ImageCache()
def _migrate_disk_files(self, instance_name, disk_files, dest):
# TODO(mikal): it would be nice if this method took a full instance,
# because it could then be passed to the log messages below.
same_host = False
if dest in self._hostutils.get_local_ips():
same_host = True
LOG.debug(_("Migration target is the source host"))
else:
LOG.debug(_("Migration target host: %s") % dest)
instance_path = self._pathutils.get_instance_dir(instance_name)
revert_path = self._pathutils.get_instance_migr_revert_dir(
instance_name, remove_dir=True)
dest_path = None
try:
if same_host:
# Since source and target are the same, we copy the files to
# a temporary location before moving them into place
dest_path = '%s_tmp' % instance_path
if self._pathutils.exists(dest_path):
self._pathutils.rmtree(dest_path)
self._pathutils.makedirs(dest_path)
else:
dest_path = self._pathutils.get_instance_dir(
instance_name, dest, remove_dir=True)
for disk_file in disk_files:
# Skip the config drive as the instance is already configured
if os.path.basename(disk_file).lower() != 'configdrive.vhd':
LOG.debug(_('Copying disk "%(disk_file)s" to '
'"%(dest_path)s"'),
{'disk_file': disk_file, 'dest_path': dest_path})
self._pathutils.copy(disk_file, dest_path)
self._pathutils.rename(instance_path, revert_path)
if same_host:
self._pathutils.rename(dest_path, instance_path)
except Exception:
with excutils.save_and_reraise_exception():
self._cleanup_failed_disk_migration(instance_path, revert_path,
dest_path)
def _cleanup_failed_disk_migration(self, instance_path,
revert_path, dest_path):
try:
if dest_path and self._pathutils.exists(dest_path):
self._pathutils.rmtree(dest_path)
if self._pathutils.exists(revert_path):
self._pathutils.rename(revert_path, instance_path)
except Exception as ex:
# Log and ignore this exception
LOG.exception(ex)
LOG.error(_("Cannot cleanup migration files"))
def _check_target_flavor(self, instance, flavor):
new_root_gb = flavor['root_gb']
curr_root_gb = instance['root_gb']
if new_root_gb < curr_root_gb:
raise exception.InstanceFaultRollback(
vmutils.VHDResizeException(
_("Cannot resize the root disk to a smaller size. "
"Current size: %(curr_root_gb)s GB. Requested size: "
"%(new_root_gb)s GB") %
{'curr_root_gb': curr_root_gb,
'new_root_gb': new_root_gb}))
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None):
LOG.debug(_("migrate_disk_and_power_off called"), instance=instance)
self._check_target_flavor(instance, flavor)
self._vmops.power_off(instance)
instance_name = instance["name"]
(disk_files,
volume_drives) = self._vmutils.get_vm_storage_paths(instance_name)
if disk_files:
self._migrate_disk_files(instance_name, disk_files, dest)
self._vmops.destroy(instance, destroy_disks=False)
# disk_info is not used
return ""
def confirm_migration(self, migration, instance, network_info):
LOG.debug(_("confirm_migration called"), instance=instance)
self._pathutils.get_instance_migr_revert_dir(instance['name'],
remove_dir=True)
def _revert_migration_files(self, instance_name):
instance_path = self._pathutils.get_instance_dir(
instance_name, create_dir=False, remove_dir=True)
revert_path = self._pathutils.get_instance_migr_revert_dir(
instance_name)
self._pathutils.rename(revert_path, instance_path)
def _check_and_attach_config_drive(self, instance):
if configdrive.required_by(instance):
configdrive_path = self._pathutils.lookup_configdrive_path(
instance.name)
if configdrive_path:
self._vmops.attach_config_drive(instance, configdrive_path)
else:
raise vmutils.HyperVException(
_("Config drive is required by instance: %s, "
"but it does not exist.") % instance.name)
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
LOG.debug(_("finish_revert_migration called"), instance=instance)
instance_name = instance['name']
self._revert_migration_files(instance_name)
if self._volumeops.ebs_root_in_block_devices(block_device_info):
root_vhd_path = None
else:
root_vhd_path = self._pathutils.lookup_root_vhd_path(instance_name)
eph_vhd_path = self._pathutils.lookup_ephemeral_vhd_path(instance_name)
self._vmops.create_instance(instance, network_info, block_device_info,
root_vhd_path, eph_vhd_path)
self._check_and_attach_config_drive(instance)
if power_on:
self._vmops.power_on(instance)
def _merge_base_vhd(self, diff_vhd_path, base_vhd_path):
base_vhd_copy_path = os.path.join(os.path.dirname(diff_vhd_path),
os.path.basename(base_vhd_path))
try:
LOG.debug(_('Copying base disk %(base_vhd_path)s to '
'%(base_vhd_copy_path)s'),
{'base_vhd_path': base_vhd_path,
'base_vhd_copy_path': base_vhd_copy_path})
self._pathutils.copyfile(base_vhd_path, base_vhd_copy_path)
LOG.debug(_("Reconnecting copied base VHD "
"%(base_vhd_copy_path)s and diff "
"VHD %(diff_vhd_path)s"),
{'base_vhd_copy_path': base_vhd_copy_path,
'diff_vhd_path': diff_vhd_path})
self._vhdutils.reconnect_parent_vhd(diff_vhd_path,
base_vhd_copy_path)
LOG.debug(_("Merging base disk %(base_vhd_copy_path)s and "
"diff disk %(diff_vhd_path)s"),
{'base_vhd_copy_path': base_vhd_copy_path,
'diff_vhd_path': diff_vhd_path})
self._vhdutils.merge_vhd(diff_vhd_path, base_vhd_copy_path)
# Replace the differential VHD with the merged one
self._pathutils.rename(base_vhd_copy_path, diff_vhd_path)
except Exception:
with excutils.save_and_reraise_exception():
if self._pathutils.exists(base_vhd_copy_path):
self._pathutils.remove(base_vhd_copy_path)
def _check_resize_vhd(self, vhd_path, vhd_info, new_size):
curr_size = vhd_info['MaxInternalSize']
if new_size < curr_size:
raise vmutils.VHDResizeException(_("Cannot resize a VHD "
"to a smaller size"))
elif new_size > curr_size:
self._resize_vhd(vhd_path, new_size)
def _resize_vhd(self, vhd_path, new_size):
if vhd_path.split('.')[-1].lower() == "vhd":
LOG.debug(_("Getting parent disk info for disk: %s"), vhd_path)
base_disk_path = self._vhdutils.get_vhd_parent_path(vhd_path)
if base_disk_path:
# A differential VHD cannot be resized. This limitation
# does not apply to the VHDX format.
self._merge_base_vhd(vhd_path, base_disk_path)
LOG.debug(_("Resizing disk \"%(vhd_path)s\" to new max "
"size %(new_size)s"),
{'vhd_path': vhd_path, 'new_size': new_size})
self._vhdutils.resize_vhd(vhd_path, new_size)
def _check_base_disk(self, context, instance, diff_vhd_path,
src_base_disk_path):
base_vhd_path = self._imagecache.get_cached_image(context, instance)
# If the location of the base host differs between source
# and target hosts we need to reconnect the base disk
if src_base_disk_path.lower() != base_vhd_path.lower():
LOG.debug(_("Reconnecting copied base VHD "
"%(base_vhd_path)s and diff "
"VHD %(diff_vhd_path)s"),
{'base_vhd_path': base_vhd_path,
'diff_vhd_path': diff_vhd_path})
self._vhdutils.reconnect_parent_vhd(diff_vhd_path,
base_vhd_path)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance=False,
block_device_info=None, power_on=True):
LOG.debug(_("finish_migration called"), instance=instance)
instance_name = instance['name']
if self._volumeops.ebs_root_in_block_devices(block_device_info):
root_vhd_path = None
else:
root_vhd_path = self._pathutils.lookup_root_vhd_path(instance_name)
if not root_vhd_path:
raise vmutils.HyperVException(_("Cannot find boot VHD "
"file for instance: %s") %
instance_name)
root_vhd_info = self._vhdutils.get_vhd_info(root_vhd_path)
src_base_disk_path = root_vhd_info.get("ParentPath")
if src_base_disk_path:
self._check_base_disk(context, instance, root_vhd_path,
src_base_disk_path)
if resize_instance:
new_size = instance['root_gb'] * units.Gi
self._check_resize_vhd(root_vhd_path, root_vhd_info, new_size)
eph_vhd_path = self._pathutils.lookup_ephemeral_vhd_path(instance_name)
if resize_instance:
new_size = instance.get('ephemeral_gb', 0) * units.Gi
if not eph_vhd_path:
if new_size:
eph_vhd_path = self._vmops.create_ephemeral_vhd(instance)
else:
eph_vhd_info = self._vhdutils.get_vhd_info(eph_vhd_path)
self._check_resize_vhd(eph_vhd_path, eph_vhd_info, new_size)
self._vmops.create_instance(instance, network_info, block_device_info,
root_vhd_path, eph_vhd_path)
self._check_and_attach_config_drive(instance)
if power_on:
self._vmops.power_on(instance)
|
apache-2.0
|
petrutlucian94/cinder
|
cinder/tests/unit/test_ibm_flashsystem_iscsi.py
|
18
|
9722
|
# Copyright 2015 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Tests for the IBM FlashSystem iSCSI volume driver.
"""
import mock
import six
import random
from cinder import context
from cinder import exception
from cinder import test
from cinder.tests.unit import test_ibm_flashsystem as fscommon
from cinder import utils
from cinder.volume import configuration as conf
from cinder.volume.drivers.ibm import flashsystem_iscsi
from cinder.volume import volume_types
class FlashSystemManagementSimulator(fscommon.FlashSystemManagementSimulator):
def __init__(self):
# Default protocol is iSCSI
self._protocol = 'iSCSI'
self._volumes_list = {}
self._hosts_list = {}
self._mappings_list = {}
self._next_cmd_error = {
'lsnode': '',
'lssystem': '',
'lsmdiskgrp': ''
}
self._errors = {
# CMMVC50000 is a fake error which indicates that command has not
# got expected results. This error represents kinds of CLI errors.
'CMMVC50000': ('', 'CMMVC50000 The command can not be executed '
'successfully.')
}
class FlashSystemFakeISCSIDriver(flashsystem_iscsi.FlashSystemISCSIDriver):
def __init__(self, *args, **kwargs):
super(FlashSystemFakeISCSIDriver, self).__init__(*args, **kwargs)
def set_fake_storage(self, fake):
self.fake_storage = fake
def _ssh(self, cmd, check_exit_code=True):
utils.check_ssh_injection(cmd)
ret = self.fake_storage.execute_command(cmd, check_exit_code)
return ret
class FlashSystemISCSIDriverTestCase(test.TestCase):
def _set_flag(self, flag, value):
group = self.driver.configuration.config_group
self.driver.configuration.set_override(flag, value, group)
def _reset_flags(self):
self.driver.configuration.local_conf.reset()
for k, v in self._def_flags.items():
self._set_flag(k, v)
def _generate_vol_info(self,
vol_name,
vol_size=10,
vol_status='available'):
rand_id = six.text_type(random.randint(10000, 99999))
if not vol_name:
vol_name = 'test_volume%s' % rand_id
return {'name': vol_name,
'size': vol_size,
'id': '%s' % rand_id,
'volume_type_id': None,
'status': vol_status,
'mdisk_grp_name': 'mdiskgrp0'}
def _generate_snap_info(self,
vol_name,
vol_id,
vol_size,
vol_status,
snap_status='available'):
rand_id = six.text_type(random.randint(10000, 99999))
return {'name': 'test_snap_%s' % rand_id,
'id': rand_id,
'volume': {'name': vol_name,
'id': vol_id,
'size': vol_size,
'status': vol_status},
'volume_size': vol_size,
'status': snap_status,
'mdisk_grp_name': 'mdiskgrp0'}
def setUp(self):
super(FlashSystemISCSIDriverTestCase, self).setUp()
self._def_flags = {'san_ip': 'hostname',
'san_login': 'username',
'san_password': 'password',
'flashsystem_connection_protocol': 'iSCSI',
'flashsystem_multipath_enabled': False,
'flashsystem_multihostmap_enabled': True,
'iscsi_ip_address': '192.168.1.10',
'flashsystem_iscsi_portid': 1}
self.connector = {
'host': 'flashsystem',
'wwnns': ['0123456789abcdef', '0123456789abcdeg'],
'wwpns': ['abcd000000000001', 'abcd000000000002'],
'initiator': 'iqn.123456'}
self.sim = FlashSystemManagementSimulator()
self.driver = FlashSystemFakeISCSIDriver(
configuration=conf.Configuration(None))
self.driver.set_fake_storage(self.sim)
self._reset_flags()
self.ctxt = context.get_admin_context()
self.driver.do_setup(None)
self.driver.check_for_setup_error()
self.sleeppatch = mock.patch('eventlet.greenthread.sleep')
self.sleeppatch.start()
def tearDown(self):
self.sleeppatch.stop()
super(FlashSystemISCSIDriverTestCase, self).tearDown()
def test_flashsystem_do_setup(self):
# case 1: set as iSCSI
self.sim.set_protocol('iSCSI')
self._set_flag('flashsystem_connection_protocol', 'iSCSI')
self.driver.do_setup(None)
self.assertEqual('iSCSI', self.driver._protocol)
# clear environment
self.sim.set_protocol('iSCSI')
self._reset_flags()
def test_flashsystem_validate_connector(self):
conn_neither = {'host': 'host'}
conn_iscsi = {'host': 'host', 'initiator': 'foo'}
conn_both = {'host': 'host', 'initiator': 'foo', 'wwpns': 'bar'}
protocol = self.driver._protocol
# case 1: when protocol is iSCSI
self.driver._protocol = 'iSCSI'
self.driver.validate_connector(conn_iscsi)
self.driver.validate_connector(conn_both)
self.assertRaises(exception.InvalidConnectorException,
self.driver.validate_connector, conn_neither)
# clear environment
self.driver._protocol = protocol
def test_flashsystem_connection(self):
# case 1: initialize_connection/terminate_connection with iSCSI
self.sim.set_protocol('iSCSI')
self._set_flag('flashsystem_connection_protocol', 'iSCSI')
self.driver.do_setup(None)
vol1 = self._generate_vol_info(None)
self.driver.create_volume(vol1)
self.driver.initialize_connection(vol1, self.connector)
self.driver.terminate_connection(vol1, self.connector)
# clear environment
self.driver.delete_volume(vol1)
self.sim.set_protocol('iSCSI')
self._reset_flags()
def test_flashsystem_create_host(self):
# case 1: create host with iqn
self.sim.set_protocol('iSCSI')
self._set_flag('flashsystem_connection_protocol', 'iSCSI')
self.driver.do_setup(None)
conn = {
'host': 'flashsystem',
'wwnns': ['0123456789abcdef', '0123456789abcdeg'],
'wwpns': ['abcd000000000001', 'abcd000000000002'],
'initiator': 'iqn.123456'}
host = self.driver._create_host(conn)
# case 2: delete host
self.driver._delete_host(host)
# clear environment
self.sim.set_protocol('iSCSI')
self._reset_flags()
def test_flashsystem_get_vdisk_params(self):
# case 1: use default params
self.driver._get_vdisk_params(None)
# case 2: use extra params from type
opts1 = {'storage_protocol': 'iSCSI'}
opts2 = {'capabilities:storage_protocol': 'iSCSI'}
opts3 = {'storage_protocol': 'FC'}
type1 = volume_types.create(self.ctxt, 'opts1', opts1)
type2 = volume_types.create(self.ctxt, 'opts2', opts2)
type3 = volume_types.create(self.ctxt, 'opts3', opts3)
self.assertEqual(
'iSCSI',
self.driver._get_vdisk_params(type1['id'])['protocol'])
self.assertEqual(
'iSCSI',
self.driver._get_vdisk_params(type2['id'])['protocol'])
self.assertRaises(exception.InvalidInput,
self.driver._get_vdisk_params,
type3['id'])
# clear environment
volume_types.destroy(self.ctxt, type1['id'])
volume_types.destroy(self.ctxt, type2['id'])
volume_types.destroy(self.ctxt, type3['id'])
def test_flashsystem_map_vdisk_to_host(self):
# case 1: no host found
vol1 = self._generate_vol_info(None)
self.driver.create_volume(vol1)
self.assertEqual(
# lun id shoud begin with 1
1,
self.driver._map_vdisk_to_host(vol1['name'], self.connector))
# case 2: host already exists
vol2 = self._generate_vol_info(None)
self.driver.create_volume(vol2)
self.assertEqual(
# lun id shoud be sequential
2,
self.driver._map_vdisk_to_host(vol2['name'], self.connector))
# case 3: test if already mapped
self.assertEqual(
1,
self.driver._map_vdisk_to_host(vol1['name'], self.connector))
# clean environment
self.driver._unmap_vdisk_from_host(vol1['name'], self.connector)
self.driver._unmap_vdisk_from_host(vol2['name'], self.connector)
self.driver.delete_volume(vol1)
self.driver.delete_volume(vol2)
# case 4: If there is no vdisk mapped to host, host should be removed
self.assertEqual(
None,
self.driver._get_host_from_connector(self.connector))
|
apache-2.0
|
geodynamics/snac
|
Snac/pyre/Inlet.py
|
5
|
3206
|
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
class Inlet(object):
def __init__(self):
self._handle = None
return
def impose(self):
import Snac.pyre.Exchanger as Exchanger
Exchanger.Inlet_impose(self._handle)
return
def recv(self):
import Snac.pyre.Exchanger as Exchanger
Exchanger.Inlet_recv(self._handle)
return
def storeTimestep(self, fge_t, cge_t):
import Snac.pyre.Exchanger as Exchanger
Exchanger.Inlet_storeTimestep(self._handle, fge_t, cge_t)
return
"""
class SVTInlet(Inlet):
def __init__(self, mesh, sink, all_variables):
import CitcomS.Exchanger as Exchanger
self._handle = Exchanger.SVTInlet_create(mesh,
sink,
all_variables)
return
"""
class VInlet(Inlet):
def __init__(self, mesh, sink, all_variables):
import Snac.pyre.Exchanger as Exchanger
self._handle = Exchanger.VInlet_create(mesh,
sink,
all_variables)
return
def storeVold(self):
import Snac.pyre.Exchanger as Exchanger
Exchanger.VInlet_storeVold(self._handle)
return
def readVold(self):
import Snac.pyre.Exchanger as Exchanger
Exchanger.VInlet_readVold(self._handle)
return
"""
class VTInlet(Inlet):
def __init__(self, mesh, sink, all_variables):
import Snac.pyre.Exchanger as Exchanger
self._handle = Exchanger.VTInlet_create(mesh,
sink,
all_variables)
return
class BoundaryVTInlet(Inlet):
'''Available modes -- see above
'''
def __init__(self, communicator, boundary, sink, all_variables, mode="VT"):
import CitcomS.Exchanger as Exchanger
self._handle = Exchanger.BoundaryVTInlet_create(communicator.handle(),
boundary,
sink,
all_variables,
mode)
return
class TractionInlet(Inlet):
'''Inlet that impose velocity and/or traction on the boundary
Available modes --
"F": traction only
"V": velocity only
"FV": normal velocity and tangent traction
'''
def __init__(self, boundary, sink, all_variables, mode='F'):
import CitcomS.Exchanger as Exchanger
self._handle = Exchanger.TractionInlet_create(boundary,
sink,
all_variables,
mode)
return
"""
# version
__id__ = "$Id: Inlet.py,v 1.6 2004/05/11 07:59:31 tan2 Exp $"
# End of file
|
gpl-2.0
|
Yong-Lee/decode-Django
|
Django-1.5.1/django/contrib/gis/tests/geoapp/models.py
|
112
|
1877
|
from django.contrib.gis.db import models
from django.contrib.gis.tests.utils import mysql, spatialite
from django.utils.encoding import python_2_unicode_compatible
# MySQL spatial indices can't handle NULL geometries.
null_flag = not mysql
@python_2_unicode_compatible
class Country(models.Model):
name = models.CharField(max_length=30)
mpoly = models.MultiPolygonField() # SRID, by default, is 4326
objects = models.GeoManager()
def __str__(self): return self.name
@python_2_unicode_compatible
class City(models.Model):
name = models.CharField(max_length=30)
point = models.PointField()
objects = models.GeoManager()
def __str__(self): return self.name
# This is an inherited model from City
class PennsylvaniaCity(City):
county = models.CharField(max_length=30)
founded = models.DateTimeField(null=True)
objects = models.GeoManager() # TODO: This should be implicitly inherited.
@python_2_unicode_compatible
class State(models.Model):
name = models.CharField(max_length=30)
poly = models.PolygonField(null=null_flag) # Allowing NULL geometries here.
objects = models.GeoManager()
def __str__(self): return self.name
@python_2_unicode_compatible
class Track(models.Model):
name = models.CharField(max_length=30)
line = models.LineStringField()
objects = models.GeoManager()
def __str__(self): return self.name
class Truth(models.Model):
val = models.BooleanField()
objects = models.GeoManager()
if not spatialite:
@python_2_unicode_compatible
class Feature(models.Model):
name = models.CharField(max_length=20)
geom = models.GeometryField()
objects = models.GeoManager()
def __str__(self): return self.name
class MinusOneSRID(models.Model):
geom = models.PointField(srid=-1) # Minus one SRID.
objects = models.GeoManager()
|
gpl-2.0
|
kagayakidan/scikit-learn
|
sklearn/metrics/setup.py
|
299
|
1024
|
import os
import os.path
import numpy
from numpy.distutils.misc_util import Configuration
from sklearn._build_utils import get_blas_info
def configuration(parent_package="", top_path=None):
config = Configuration("metrics", parent_package, top_path)
cblas_libs, blas_info = get_blas_info()
if os.name == 'posix':
cblas_libs.append('m')
config.add_extension("pairwise_fast",
sources=["pairwise_fast.c"],
include_dirs=[os.path.join('..', 'src', 'cblas'),
numpy.get_include(),
blas_info.pop('include_dirs', [])],
libraries=cblas_libs,
extra_compile_args=blas_info.pop('extra_compile_args',
[]),
**blas_info)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(**configuration().todict())
|
bsd-3-clause
|
lferr/charm
|
charm/schemes/ibenc/ibenc_lsw08.py
|
3
|
4047
|
'''
Allison Lewko, Amit Sahai and Brent Waters (Pairing-based)
| From: "Revocation Systems with Very Small Private Keys"
| Published in: IEEE S&P 2010
| Available from: http://eprint.iacr.org/2008/309.pdf
| Notes: fully secure IBE Construction with revocable keys.
* type: identity-based encryption (public key)
* setting: Pairing
:Authors: J Ayo Akinyele
:Date: 1/2012
'''
from charm.toolbox.pairinggroup import ZR,G1,pair
from charm.toolbox.IBEnc import *
debug = False
class IBE_Revoke(IBEnc):
"""
>>> from charm.toolbox.pairinggroup import PairingGroup, GT, G2
>>> group = PairingGroup('SS512')
>>> num_users = 5 # total # of users
>>> ibe = IBE_Revoke(group)
>>> ID = "[email protected]"
>>> S = ["[email protected]", "[email protected]", "[email protected]"]
>>> (master_public_key, master_secret_key) = ibe.setup(num_users)
>>> secret_key = ibe.keygen(master_public_key, master_secret_key, ID)
>>> msg = group.random(GT)
>>> cipher_text = ibe.encrypt(master_public_key, msg, S)
>>> decrypted_msg = ibe.decrypt(S, cipher_text, secret_key)
>>> decrypted_msg == msg
True
"""
def __init__(self, groupObj):
IBEnc.__init__(self)
global group, util
group = groupObj
def setup(self, n):
g, w, h, v, v1, v2 = group.random(G1, 6)
a1, a2, b, alpha = group.random(ZR, 4)
tau1 = v * (v1 ** a1)
tau2 = v * (v2 ** a2)
pk = {'n':n, 'g':g, 'g^b':g ** b, 'g^a1':g ** a1, 'g^a2':g ** a2,
'g^ba1':g ** (b * a1), 'g^ba2':g ** (b * a2), 'tau1':tau1, 'tau2':tau2,
'tau1^b':tau1 ** b, 'tau2^b':tau2 ** b, 'w':w, 'h':h,
'egg_alpha': pair(g, g) ** (alpha * a1 * b)}
sk = {'g^alph':g ** alpha, 'g^alph_a1':g ** (alpha * a1),
'g^b':g ** b,'v':v, 'v1':v1, 'v2':v2, 'alpha':alpha }
return (pk, sk)
def keygen(self, mpk, msk, ID):
d1, d2, z1, z2 = group.random(ZR, 4)
d = d1 + d2
_ID = group.hash(ID.upper())
D = {}
D[1] = msk['g^alph_a1'] * (msk['v'] ** d)
D[2] = (mpk['g'] ** -msk['alpha']) * (msk['v1'] ** d) * (mpk['g'] ** z1)
D[3] = mpk['g^b'] ** -z1
D[4] = (msk['v2'] ** d) * (mpk['g'] ** z2)
D[5] = mpk['g^b'] ** -z2
D[6] = mpk['g^b'] ** d2
D[7] = mpk['g'] ** d1
K = ((mpk['w'] ** _ID) * mpk['h']) ** d1
sk = { 'ID':_ID, 'D':D, 'K':K }
return sk
def encrypt(self, mpk, M, S):
s1, s2 = group.random(ZR, 2)
s = s1 + s2
# number of revoked users
r = len(S); t_r = group.random(ZR, r)
t = 0
for i in t_r: t += i
C = {}
C[0] = M * (mpk['egg_alpha'] ** s2)
C[1] = mpk['g^b'] ** s
C[2] = mpk['g^ba1'] ** s1
C[3] = mpk['g^a1'] ** s1
C[4] = mpk['g^ba2'] ** s2
C[5] = mpk['g^a2'] ** s2
C[6] = (mpk['tau1'] ** s1) * (mpk['tau2'] ** s2)
C[7] = (mpk['tau1^b'] ** s1) * (mpk['tau2^b'] ** s2) * (mpk['w'] ** -t)
c1 = [i for i in range(r)]; c2 = [i for i in range(r)]
for i in range(len(t_r)):
c1[i] = mpk['g'] ** t_r[i]
S_hash = group.hash(S[i].upper())
c2[i] = ((mpk['w'] ** S_hash) * mpk['h']) ** t_r[i]
C['i1'] = c1
C['i2'] = c2
return C
def decrypt(self, S, ct, sk):
C, D, K = ct, sk['D'], sk['K']
_ID = sk['ID']
# hash IDs
S_id = [group.hash(i.upper()) for i in S]
if debug: print("hashed IDs: ", S_id)
if _ID in S_id: print("Your ID:", _ID, "is in revoked list!"); return
A1 = pair(C[1], D[1]) * pair(C[2], D[2]) * pair(C[3], D[3]) * pair(C[4], D[4]) * pair(C[5], D[5])
A2 = pair(C[6], D[6]) * pair(C[7], D[7])
A3 = A1 / A2
A4 = 1
for i in range(len(S_id)):
A4 *= (pair(C['i1'][i], K) / pair(C['i2'][i], D[7])) ** (1 / (_ID - S_id[i]))
return C[0] / (A3 / A4)
|
lgpl-3.0
|
Ictp/indico
|
ez_setup.py
|
11
|
10278
|
#!python
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import sys
DEFAULT_VERSION = "0.6c11"
DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
md5_data = {
'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
'setuptools-0.6c10-py2.3.egg': 'ce1e2ab5d3a0256456d9fc13800a7090',
'setuptools-0.6c10-py2.4.egg': '57d6d9d6e9b80772c59a53a8433a5dd4',
'setuptools-0.6c10-py2.5.egg': 'de46ac8b1c97c895572e5e8596aeb8c7',
'setuptools-0.6c10-py2.6.egg': '58ea40aef06da02ce641495523a0b7f5',
'setuptools-0.6c11-py2.3.egg': '2baeac6e13d414a9d28e7ba5b5a596de',
'setuptools-0.6c11-py2.4.egg': 'bd639f9b0eac4c42497034dec2ec0c2b',
'setuptools-0.6c11-py2.5.egg': '64c94f3bf7a72a13ec83e0b24f2749b2',
'setuptools-0.6c11-py2.6.egg': 'bfa92100bd772d5a213eedd356d64086',
'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f',
'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2',
'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc',
'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167',
'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64',
'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d',
'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20',
'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab',
'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53',
'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2',
'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e',
'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372',
'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
}
import sys, os
try: from hashlib import md5
except ImportError: from md5 import md5
def _validate_md5(egg_name, data):
if egg_name in md5_data:
digest = md5(data).hexdigest()
if digest != md5_data[egg_name]:
print >>sys.stderr, (
"md5 validation of %s failed! (Possible download problem?)"
% egg_name
)
sys.exit(2)
return data
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
download_delay=15
):
"""Automatically find/download setuptools and make it available on sys.path
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end with
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
it is not already available. If `download_delay` is specified, it should
be the number of seconds that will be paused before initiating a download,
should one be required. If an older version of setuptools is installed,
this routine will print a message to ``sys.stderr`` and raise SystemExit in
an attempt to abort the calling script.
"""
was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
def do_download():
egg = download_setuptools(version, download_base, to_dir, download_delay)
sys.path.insert(0, egg)
import setuptools; setuptools.bootstrap_install_from = egg
try:
import pkg_resources
except ImportError:
return do_download()
try:
pkg_resources.require("setuptools>="+version); return
except pkg_resources.VersionConflict, e:
if was_imported:
print >>sys.stderr, (
"The required version of setuptools (>=%s) is not available, and\n"
"can't be installed while this script is running. Please install\n"
" a more recent version first, using 'easy_install -U setuptools'."
"\n\n(Currently using %r)"
) % (version, e.args[0])
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return do_download()
except pkg_resources.DistributionNotFound:
return do_download()
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
delay = 15
):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download attempt.
"""
import urllib2, shutil
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
url = download_base + egg_name
saveto = os.path.join(to_dir, egg_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
from distutils import log
if delay:
log.warn("""
---------------------------------------------------------------------------
This script requires setuptools version %s to run (even to display
help). I will attempt to download it for you (from
%s), but
you may need to enable firewall access for this script first.
I will start the download in %d seconds.
(Note: if this machine does not have network access, please obtain the file
%s
and place it in this directory before rerunning this script.)
---------------------------------------------------------------------------""",
version, download_base, delay, url
); from time import sleep; sleep(delay)
log.warn("Downloading %s", url)
src = urllib2.urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = _validate_md5(egg_name, src.read())
dst = open(saveto,"wb"); dst.write(data)
finally:
if src: src.close()
if dst: dst.close()
return os.path.realpath(saveto)
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
try:
import setuptools
except ImportError:
egg = None
try:
egg = download_setuptools(version, delay=0)
sys.path.insert(0,egg)
from setuptools.command.easy_install import main
return main(list(argv)+[egg]) # we're done here
finally:
if egg and os.path.exists(egg):
os.unlink(egg)
else:
if setuptools.__version__ == '0.0.1':
print >>sys.stderr, (
"You have an obsolete version of setuptools installed. Please\n"
"remove it from your system entirely before rerunning this script."
)
sys.exit(2)
req = "setuptools>="+version
import pkg_resources
try:
pkg_resources.require(req)
except pkg_resources.VersionConflict:
try:
from setuptools.command.easy_install import main
except ImportError:
from easy_install import main
main(list(argv)+[download_setuptools(delay=0)])
sys.exit(0) # try to force an exit
else:
if argv:
from setuptools.command.easy_install import main
main(argv)
else:
print "Setuptools version",version,"or greater has been installed."
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
print >>sys.stderr, "Internal error!"
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close()
if __name__=='__main__':
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
update_md5(sys.argv[2:])
else:
main(sys.argv[1:])
|
gpl-3.0
|
ibusybox/pkcs
|
src/main/python/pkcs/opensslconf.py
|
1
|
3420
|
#!/usr/bin/env python
# coding=utf8
caOpensslConf = '''
#http://www.phildev.net/ssl/opensslconf.html
[ ca ]
default_ca = CA_default
[CA_default]
caroot = %(caroot)s
certs = $caroot/certsdb
new_certs_dir = $certs
database = $caroot/index.txt
certificate = $caroot/%(cn)s.cer
private_key = $caroot/%(cn)s-key.pem
serial = $caroot/serial
#crldir = $caroot/crl
#crlnumber = $caroot/crlnumber
#crl = $crldir/crl.pem
RANDFILE = $caroot/private/.rand
x509_extensions = usr_cert
#copy_extensions = copy
name_opt = ca_default
cert_opt = ca_default
default_days = 365
#default_crl_days= 30
default_md = sha256
preserve = no
policy = policy_match
[ policy_match ]
countryName = match
stateOrProvinceName = match
localityName = supplied
organizationName = match
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ policy_anything ]
countryName = optional
stateOrProvinceName = optional
localityName = optional
organizationName = optional
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ req ]
default_bits = 4096
default_keyfile = privkey.pem
distinguished_name = req_distinguished_name
attributes = req_attributes
x509_extensions = v3_ca
req_extensions = v3_req
string_mask = nombstr
[ req_distinguished_name ]
C = %(c)s
ST = %(st)s
L = %(l)s
O = %(o)s
OU = %(ou)s
CN = %(cn)s
#emailAddress = $ENV:REQ_EMAIL
[ req_attributes ]
[ usr_cert ]
basicConstraints = CA:false
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid,issuer
subjectAltName = $ENV::SUBJECT_ALT_NAME
[ v3_req ]
#subjectAltName = %(subjectAltName)s
[ v3_ca ]
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid:always,issuer:always
basicConstraints = CA:true
'''
certOpensslConf = '''
#http://www.phildev.net/ssl/opensslconf.html
x509_extensions = usr_cert
#copy_extensions = copy
default_days = 365
#default_crl_days= 30
default_md = sha256
preserve = no
policy = policy_match
[ policy_match ]
countryName = match
stateOrProvinceName = match
localityName = supplied
organizationName = match
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ policy_anything ]
countryName = optional
stateOrProvinceName = optional
localityName = optional
organizationName = optional
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ req ]
default_bits = 4096
default_keyfile = privkey.pem
distinguished_name = req_distinguished_name
attributes = req_attributes
x509_extensions = v3_req
req_extensions = v3_req
string_mask = nombstr
[ req_distinguished_name ]
C = %(c)s
ST = %(st)s
L = %(l)s
O = %(o)s
OU = %(ou)s
CN = %(cn)s
#emailAddress = $ENV:REQ_EMAIL
[ req_attributes ]
[ usr_cert ]
basicConstraints = CA:false
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid,issuer
[ v3_req ]
subjectAltName = %(subjectAltName)s
'''
|
apache-2.0
|
saisiddhant12/time_management_system
|
db.py
|
1
|
1248
|
import time
import datetime
import sqlite3
conn = sqlite3.connect('diary.db')
c = conn.cursor()
c.execute('''CREATE TABLE executive (unid INTEGER PRIMARY KEY,name text,designation text, abs text)''')
no_of_exec = input() #enter the total number of executives
while(no_of_exec):
no_of_exec = no_of_exec-1
eid = input("Enter the eid of executives") #enter the assigned UID to the executive Enter 0 to directly view the table
nam = raw_input("Enter the name of the executive") #enter the name of the executive
des = raw_input("Enter the mail ID")
c.execute("INSERT INTO executive (unid,name,designation) VALUES (?,?,?)",(eid,nam,des)) #Inserting credentials into database
a = input("Enter 1 for taking a leave")
if a == 1:
yyyy = input("Enter the year\t")
mm = input("Enter the month\t")
dd = input("Enter the days\t")
leave = str(datetime.datetime(yyyy, mm, dd)) #enter the leave date and duration
c.execute("UPDATE executive SET abs=('?') WHERE unid = eid",(leave))
conn.commit()
#c.execute("DROP TABLE diary.db.executive")
for row in c.execute('SELECT * FROM executive '): #Displays the database
print(row)
conn.close()
|
gpl-2.0
|
lra/boto
|
tests/unit/vpc/test_subnet.py
|
113
|
5485
|
from tests.compat import OrderedDict
from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
from boto.vpc import VPCConnection, Subnet
class TestDescribeSubnets(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DescribeSubnetsResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<subnetSet>
<item>
<subnetId>subnet-9d4a7b6c</subnetId>
<state>available</state>
<vpcId>vpc-1a2b3c4d</vpcId>
<cidrBlock>10.0.1.0/24</cidrBlock>
<availableIpAddressCount>251</availableIpAddressCount>
<availabilityZone>us-east-1a</availabilityZone>
<defaultForAz>false</defaultForAz>
<mapPublicIpOnLaunch>false</mapPublicIpOnLaunch>
<tagSet/>
</item>
<item>
<subnetId>subnet-6e7f829e</subnetId>
<state>available</state>
<vpcId>vpc-1a2b3c4d</vpcId>
<cidrBlock>10.0.0.0/24</cidrBlock>
<availableIpAddressCount>251</availableIpAddressCount>
<availabilityZone>us-east-1a</availabilityZone>
<defaultForAz>false</defaultForAz>
<mapPublicIpOnLaunch>false</mapPublicIpOnLaunch>
<tagSet/>
</item>
</subnetSet>
</DescribeSubnetsResponse>
"""
def test_get_all_subnets(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.get_all_subnets(
['subnet-9d4a7b6c', 'subnet-6e7f829e'],
filters=OrderedDict([('state', 'available'),
('vpc-id', ['subnet-9d4a7b6c', 'subnet-6e7f829e'])]))
self.assert_request_parameters({
'Action': 'DescribeSubnets',
'SubnetId.1': 'subnet-9d4a7b6c',
'SubnetId.2': 'subnet-6e7f829e',
'Filter.1.Name': 'state',
'Filter.1.Value.1': 'available',
'Filter.2.Name': 'vpc-id',
'Filter.2.Value.1': 'subnet-9d4a7b6c',
'Filter.2.Value.2': 'subnet-6e7f829e'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(len(api_response), 2)
self.assertIsInstance(api_response[0], Subnet)
self.assertEqual(api_response[0].id, 'subnet-9d4a7b6c')
self.assertEqual(api_response[1].id, 'subnet-6e7f829e')
class TestCreateSubnet(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<CreateSubnetResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<subnet>
<subnetId>subnet-9d4a7b6c</subnetId>
<state>pending</state>
<vpcId>vpc-1a2b3c4d</vpcId>
<cidrBlock>10.0.1.0/24</cidrBlock>
<availableIpAddressCount>251</availableIpAddressCount>
<availabilityZone>us-east-1a</availabilityZone>
<tagSet/>
</subnet>
</CreateSubnetResponse>
"""
def test_create_subnet(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_subnet(
'vpc-1a2b3c4d', '10.0.1.0/24', 'us-east-1a')
self.assert_request_parameters({
'Action': 'CreateSubnet',
'VpcId': 'vpc-1a2b3c4d',
'CidrBlock': '10.0.1.0/24',
'AvailabilityZone': 'us-east-1a'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertIsInstance(api_response, Subnet)
self.assertEquals(api_response.id, 'subnet-9d4a7b6c')
self.assertEquals(api_response.state, 'pending')
self.assertEquals(api_response.vpc_id, 'vpc-1a2b3c4d')
self.assertEquals(api_response.cidr_block, '10.0.1.0/24')
self.assertEquals(api_response.available_ip_address_count, 251)
self.assertEquals(api_response.availability_zone, 'us-east-1a')
class TestDeleteSubnet(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DeleteSubnetResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<return>true</return>
</DeleteSubnetResponse>
"""
def test_delete_subnet(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.delete_subnet('subnet-9d4a7b6c')
self.assert_request_parameters({
'Action': 'DeleteSubnet',
'SubnetId': 'subnet-9d4a7b6c'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
if __name__ == '__main__':
unittest.main()
|
mit
|
pravsripad/jumeg
|
jumeg/jumeg_test.py
|
3
|
1803
|
#!/usr/bin/env python
import jumeg
import os.path
raw_fname = "109925_CAU01A_100715_0842_2_c,rfDC-raw.fif"
if not os.path.isfile(raw_fname):
print("Please find the test file at the below location on the meg_store2 network drive - \
cp /data/meg_store2/fif_data/jumeg_test_data/109925_CAU01A_100715_0842_2_c,rfDC-raw.fif .")
# Function to check and explain the file naming standards
#jumeg.jumeg_utils.check_jumeg_standards(raw_fname)
# Function to apply noise reducer
jumeg.jumeg_noise_reducer.noise_reducer(raw_fname, verbose=True)
# Filter functions
#jumeg.jumeg_preprocessing.apply_filter(raw_fname)
fclean = raw_fname[:raw_fname.rfind('-raw.fif')] + ',bp1-45Hz-raw.fif'
# Evoked functions
#jumeg.jumeg_preprocessing.apply_average(fclean)
# ICA functions
#jumeg.jumeg_preprocessing.apply_ica(fclean)
fica_name = fclean[:fclean.rfind('-raw.fif')] + '-ica.fif'
# Perform ECG/EOG rejection using ICA
#jumeg.jumeg_preprocessing.apply_ica_cleaning(fica_name)
#jumeg.jumeg_preprocessing.apply_ica_cleaning(fica_name, unfiltered=True)
# OCARTA cleaning
from jumeg.decompose import ocarta
ocarta_obj = ocarta.JuMEG_ocarta()
ocarta_obj.fit(fclean, unfiltered=False, verbose=True)
# CTPS functions
#jumeg.jumeg_preprocessing.apply_ctps(fica_name)
fctps_name = '109925_CAU01A_100715_0842_2_c,rfDC,bp1-45Hz,ctps-trigger.npy'
#jumeg.jumeg_preprocessing.apply_ctps_select_ic(fctps_name)
# Function recompose brain response components only
fname_ctps_ics = '109925_CAU01A_100715_0842_2_c,rfDC,bp1-45Hz,ctps-trigger-ic_selection.txt'
#jumeg.jumeg_preprocessing.apply_ica_select_brain_response(fname_ctps_ics)
# Function to process empty file
empty_fname = '109925_CAU01A_100715_0844_2_c,rfDC-empty.fif'
#jumeg.jumeg_preprocessing.apply_create_noise_covariance(empty_fname, verbose=True)
|
bsd-3-clause
|
WoLpH/EventGhost
|
_build/builder/Logging.py
|
1
|
1901
|
# -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2016 EventGhost Project <http://www.eventghost.net/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
import logging
import sys
class StdHandler(object):
indent = 0
def __init__(self, oldStream, logger):
self.oldStream = oldStream
self.encoding = oldStream.encoding
self.buf = ""
self.logger = logger
# the following is a workaround for colorama (0.3.6),
# which is called by sphinx (build CHM docs).
self.closed = False
def flush(self):
pass
def isatty(self):
return True
def write(self, data):
try:
self.buf += data
except UnicodeError:
self.buf += data.decode('mbcs')
lines = self.buf.split("\n")
for line in self.buf.split("\n")[:-1]:
line = (self.indent * 4 * " ") + line.rstrip()
self.logger(line)
self.oldStream.write(line + "\n")
self.buf = lines[-1]
def LogToFile(file):
logging.basicConfig(filename=file, level=logging.DEBUG,)
logging.getLogger().setLevel(20)
sys.stdout = StdHandler(sys.stdout, logging.info)
sys.stderr = StdHandler(sys.stderr, logging.error)
def SetIndent(level):
StdHandler.indent = level
|
gpl-2.0
|
jgraham/servo
|
tests/wpt/web-platform-tests/tools/six/six.py
|
426
|
27961
|
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2014 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <[email protected]>"
__version__ = "1.8.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return iter(d.iterkeys(**kw))
def itervalues(d, **kw):
return iter(d.itervalues(**kw))
def iteritems(d, **kw):
return iter(d.iteritems(**kw))
def iterlists(d, **kw):
return iter(d.iterlists(**kw))
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
|
mpl-2.0
|
ygenc/onlineLDA
|
onlineldavb_new/build/scipy/scipy/integrate/_ode.py
|
5
|
28343
|
# Authors: Pearu Peterson, Pauli Virtanen, John Travers
"""
First-order ODE integrators.
User-friendly interface to various numerical integrators for solving a
system of first order ODEs with prescribed initial conditions::
d y(t)[i]
--------- = f(t,y(t))[i],
d t
y(t=0)[i] = y0[i],
where::
i = 0, ..., len(y0) - 1
class ode
---------
A generic interface class to numeric integrators. It has the following
methods::
integrator = ode(f,jac=None)
integrator = integrator.set_integrator(name,**params)
integrator = integrator.set_initial_value(y0,t0=0.0)
integrator = integrator.set_f_params(*args)
integrator = integrator.set_jac_params(*args)
y1 = integrator.integrate(t1,step=0,relax=0)
flag = integrator.successful()
class complex_ode
-----------------
This class has the same generic interface as ode, except it can handle complex
f, y and Jacobians by transparently translating them into the equivalent
real valued system. It supports the real valued solvers (i.e not zvode) and is
an alternative to ode with the zvode solver, sometimes performing better.
"""
# XXX: Integrators must have:
# ===========================
# cvode - C version of vode and vodpk with many improvements.
# Get it from http://www.netlib.org/ode/cvode.tar.gz
# To wrap cvode to Python, one must write extension module by
# hand. Its interface is too much 'advanced C' that using f2py
# would be too complicated (or impossible).
#
# How to define a new integrator:
# ===============================
#
# class myodeint(IntegratorBase):
#
# runner = <odeint function> or None
#
# def __init__(self,...): # required
# <initialize>
#
# def reset(self,n,has_jac): # optional
# # n - the size of the problem (number of equations)
# # has_jac - whether user has supplied its own routine for Jacobian
# <allocate memory,initialize further>
#
# def run(self,f,jac,y0,t0,t1,f_params,jac_params): # required
# # this method is called to integrate from t=t0 to t=t1
# # with initial condition y0. f and jac are user-supplied functions
# # that define the problem. f_params,jac_params are additional
# # arguments
# # to these functions.
# <calculate y1>
# if <calculation was unsuccesful>:
# self.success = 0
# return t1,y1
#
# # In addition, one can define step() and run_relax() methods (they
# # take the same arguments as run()) if the integrator can support
# # these features (see IntegratorBase doc strings).
#
# if myodeint.runner:
# IntegratorBase.integrator_classes.append(myodeint)
__all__ = ['ode', 'complex_ode']
__version__ = "$Id$"
__docformat__ = "restructuredtext en"
import re
import warnings
from numpy import asarray, array, zeros, int32, isscalar, real, imag
import vode as _vode
import _dop
#------------------------------------------------------------------------------
# User interface
#------------------------------------------------------------------------------
class ode(object):
"""\
A generic interface class to numeric integrators.
Solve an equation system :math:`y'(t) = f(t,y)` with (optional) ``jac = df/dy``.
Parameters
----------
f : callable f(t, y, *f_args)
Rhs of the equation. t is a scalar, y.shape == (n,).
f_args is set by calling set_f_params(*args)
jac : callable jac(t, y, *jac_args)
Jacobian of the rhs, jac[i,j] = d f[i] / d y[j]
jac_args is set by calling set_f_params(*args)
Attributes
----------
t : float
Current time
y : ndarray
Current variable values
See also
--------
odeint : an integrator with a simpler interface based on lsoda from ODEPACK
quad : for finding the area under a curve
Notes
-----
Available integrators are listed below. They can be selected using
the `set_integrator` method.
"vode"
Real-valued Variable-coefficient Ordinary Differential Equation
solver, with fixed-leading-coefficient implementation. It provides
implicit Adams method (for non-stiff problems) and a method based on
backward differentiation formulas (BDF) (for stiff problems).
Source: http://www.netlib.org/ode/vode.f
.. warning::
This integrator is not re-entrant. You cannot have two `ode`
instances using the "vode" integrator at the same time.
This integrator accepts the following parameters in `set_integrator`
method of the `ode` class:
- atol : float or sequence
absolute tolerance for solution
- rtol : float or sequence
relative tolerance for solution
- lband : None or int
- rband : None or int
Jacobian band width, jac[i,j] != 0 for i-lband <= j <= i+rband.
Setting these requires your jac routine to return the jacobian
in packed format, jac_packed[i-j+lband, j] = jac[i,j].
- method: 'adams' or 'bdf'
Which solver to use, Adams (non-stiff) or BDF (stiff)
- with_jacobian : bool
Whether to use the jacobian
- nsteps : int
Maximum number of (internally defined) steps allowed during one
call to the solver.
- first_step : float
- min_step : float
- max_step : float
Limits for the step sizes used by the integrator.
- order : int
Maximum order used by the integrator,
order <= 12 for Adams, <= 5 for BDF.
"zvode"
Complex-valued Variable-coefficient Ordinary Differential Equation
solver, with fixed-leading-coefficient implementation. It provides
implicit Adams method (for non-stiff problems) and a method based on
backward differentiation formulas (BDF) (for stiff problems).
Source: http://www.netlib.org/ode/zvode.f
.. warning::
This integrator is not re-entrant. You cannot have two `ode`
instances using the "zvode" integrator at the same time.
This integrator accepts the same parameters in `set_integrator`
as the "vode" solver.
.. note::
When using ZVODE for a stiff system, it should only be used for
the case in which the function f is analytic, that is, when each f(i)
is an analytic function of each y(j). Analyticity means that the
partial derivative df(i)/dy(j) is a unique complex number, and this
fact is critical in the way ZVODE solves the dense or banded linear
systems that arise in the stiff case. For a complex stiff ODE system
in which f is not analytic, ZVODE is likely to have convergence
failures, and for this problem one should instead use DVODE on the
equivalent real system (in the real and imaginary parts of y).
"dopri5"
This is an explicit runge-kutta method of order (4)5 due to Dormand &
Prince (with stepsize control and dense output).
Authors:
E. Hairer and G. Wanner
Universite de Geneve, Dept. de Mathematiques
CH-1211 Geneve 24, Switzerland
e-mail: [email protected], [email protected]
This code is described in [HNW93]_.
This integrator accepts the following parameters in set_integrator()
method of the ode class:
- atol : float or sequence
absolute tolerance for solution
- rtol : float or sequence
relative tolerance for solution
- nsteps : int
Maximum number of (internally defined) steps allowed during one
call to the solver.
- first_step : float
- max_step : float
- safety : float
Safety factor on new step selection (default 0.9)
- ifactor : float
- dfactor : float
Maximum factor to increase/decrease step size by in one step
- beta : float
Beta parameter for stabilised step size control.
"dop853"
This is an explicit runge-kutta method of order 8(5,3) due to Dormand
& Prince (with stepsize control and dense output).
Options and references the same as "dopri5".
Examples
--------
A problem to integrate and the corresponding jacobian:
>>> from scipy.integrate import ode
>>>
>>> y0, t0 = [1.0j, 2.0], 0
>>>
>>> def f(t, y, arg1):
>>> return [1j*arg1*y[0] + y[1], -arg1*y[1]**2]
>>> def jac(t, y, arg1):
>>> return [[1j*arg1, 1], [0, -arg1*2*y[1]]]
The integration:
>>> r = ode(f, jac).set_integrator('zvode', method='bdf', with_jacobian=True)
>>> r.set_initial_value(y0, t0).set_f_params(2.0).set_jac_params(2.0)
>>> t1 = 10
>>> dt = 1
>>> while r.successful() and r.t < t1:
>>> r.integrate(r.t+dt)
>>> print r.t, r.y
References
----------
.. [HNW93] E. Hairer, S.P. Norsett and G. Wanner, Solving Ordinary
Differential Equations i. Nonstiff Problems. 2nd edition.
Springer Series in Computational Mathematics,
Springer-Verlag (1993)
"""
def __init__(self, f, jac=None):
self.stiff = 0
self.f = f
self.jac = jac
self.f_params = ()
self.jac_params = ()
self.y = []
def set_initial_value(self, y, t=0.0):
"""Set initial conditions y(t) = y."""
if isscalar(y):
y = [y]
n_prev = len(self.y)
if not n_prev:
self.set_integrator('') # find first available integrator
self.y = asarray(y, self._integrator.scalar)
self.t = t
self._integrator.reset(len(self.y), self.jac is not None)
return self
def set_integrator(self, name, **integrator_params):
"""
Set integrator by name.
Parameters
----------
name : str
Name of the integrator.
integrator_params :
Additional parameters for the integrator.
"""
integrator = find_integrator(name)
if integrator is None:
# FIXME: this really should be raise an exception. Will that break
# any code?
warnings.warn('No integrator name match with %r or is not '
'available.' % name)
else:
self._integrator = integrator(**integrator_params)
if not len(self.y):
self.t = 0.0
self.y = array([0.0], self._integrator.scalar)
self._integrator.reset(len(self.y), self.jac is not None)
return self
def integrate(self, t, step=0, relax=0):
"""Find y=y(t), set y as an initial condition, and return y."""
if step and self._integrator.supports_step:
mth = self._integrator.step
elif relax and self._integrator.supports_run_relax:
mth = self._integrator.run_relax
else:
mth = self._integrator.run
self.y, self.t = mth(self.f, self.jac or (lambda: None),
self.y, self.t, t,
self.f_params, self.jac_params)
return self.y
def successful(self):
"""Check if integration was successful."""
try:
self._integrator
except AttributeError:
self.set_integrator('')
return self._integrator.success == 1
def set_f_params(self, *args):
"""Set extra parameters for user-supplied function f."""
self.f_params = args
return self
def set_jac_params(self, *args):
"""Set extra parameters for user-supplied function jac."""
self.jac_params = args
return self
class complex_ode(ode):
"""
A wrapper of ode for complex systems.
This functions similarly as `ode`, but re-maps a complex-valued
equation system to a real-valued one before using the integrators.
Parameters
----------
f : callable f(t, y, *f_args)
Rhs of the equation. t is a scalar, y.shape == (n,).
f_args is set by calling set_f_params(*args)
jac : jac(t, y, *jac_args)
Jacobian of the rhs, jac[i,j] = d f[i] / d y[j]
jac_args is set by calling set_f_params(*args)
Attributes
----------
t : float
Current time
y : ndarray
Current variable values
Examples
--------
For usage examples, see `ode`.
"""
def __init__(self, f, jac=None):
self.cf = f
self.cjac = jac
if jac is not None:
ode.__init__(self, self._wrap, self._wrap_jac)
else:
ode.__init__(self, self._wrap, None)
def _wrap(self, t, y, *f_args):
f = self.cf(*((t, y[::2] + 1j * y[1::2]) + f_args))
self.tmp[::2] = real(f)
self.tmp[1::2] = imag(f)
return self.tmp
def _wrap_jac(self, t, y, *jac_args):
jac = self.cjac(*((t, y[::2] + 1j * y[1::2]) + jac_args))
self.jac_tmp[1::2, 1::2] = self.jac_tmp[::2, ::2] = real(jac)
self.jac_tmp[1::2, ::2] = imag(jac)
self.jac_tmp[::2, 1::2] = -self.jac_tmp[1::2, ::2]
return self.jac_tmp
def set_integrator(self, name, **integrator_params):
"""
Set integrator by name.
Parameters
----------
name : str
Name of the integrator
integrator_params :
Additional parameters for the integrator.
"""
if name == 'zvode':
raise ValueError("zvode should be used with ode, not zode")
return ode.set_integrator(self, name, **integrator_params)
def set_initial_value(self, y, t=0.0):
"""Set initial conditions y(t) = y."""
y = asarray(y)
self.tmp = zeros(y.size * 2, 'float')
self.tmp[::2] = real(y)
self.tmp[1::2] = imag(y)
if self.cjac is not None:
self.jac_tmp = zeros((y.size * 2, y.size * 2), 'float')
return ode.set_initial_value(self, self.tmp, t)
def integrate(self, t, step=0, relax=0):
"""Find y=y(t), set y as an initial condition, and return y."""
y = ode.integrate(self, t, step, relax)
return y[::2] + 1j * y[1::2]
#------------------------------------------------------------------------------
# ODE integrators
#------------------------------------------------------------------------------
def find_integrator(name):
for cl in IntegratorBase.integrator_classes:
if re.match(name, cl.__name__, re.I):
return cl
return None
class IntegratorConcurrencyError(RuntimeError):
"""
Failure due to concurrent usage of an integrator that can be used
only for a single problem at a time.
"""
def __init__(self, name):
msg = ("Integrator `%s` can be used to solve only a single problem "
"at a time. If you want to integrate multiple problems, "
"consider using a different integrator "
"(see `ode.set_integrator`)") % name
RuntimeError.__init__(self, msg)
class IntegratorBase(object):
runner = None # runner is None => integrator is not available
success = None # success==1 if integrator was called successfully
supports_run_relax = None
supports_step = None
integrator_classes = []
scalar = float
def acquire_new_handle(self):
# Some of the integrators have internal state (ancient
# Fortran...), and so only one instance can use them at a time.
# We keep track of this, and fail when concurrent usage is tried.
self.__class__.active_global_handle += 1
self.handle = self.__class__.active_global_handle
def check_handle(self):
if self.handle is not self.__class__.active_global_handle:
raise IntegratorConcurrencyError(self.__class__.__name__)
def reset(self, n, has_jac):
"""Prepare integrator for call: allocate memory, set flags, etc.
n - number of equations.
has_jac - if user has supplied function for evaluating Jacobian.
"""
def run(self, f, jac, y0, t0, t1, f_params, jac_params):
"""Integrate from t=t0 to t=t1 using y0 as an initial condition.
Return 2-tuple (y1,t1) where y1 is the result and t=t1
defines the stoppage coordinate of the result.
"""
raise NotImplementedError('all integrators must define '
'run(f, jac, t0, t1, y0, f_params, jac_params)')
def step(self, f, jac, y0, t0, t1, f_params, jac_params):
"""Make one integration step and return (y1,t1)."""
raise NotImplementedError('%s does not support step() method' %
self.__class__.__name__)
def run_relax(self, f, jac, y0, t0, t1, f_params, jac_params):
"""Integrate from t=t0 to t>=t1 and return (y1,t)."""
raise NotImplementedError('%s does not support run_relax() method' %
self.__class__.__name__)
#XXX: __str__ method for getting visual state of the integrator
class vode(IntegratorBase):
runner = getattr(_vode, 'dvode', None)
messages = {-1: 'Excess work done on this call. (Perhaps wrong MF.)',
-2: 'Excess accuracy requested. (Tolerances too small.)',
-3: 'Illegal input detected. (See printed message.)',
-4: 'Repeated error test failures. (Check all input.)',
-5: 'Repeated convergence failures. (Perhaps bad'
' Jacobian supplied or wrong choice of MF or tolerances.)',
-6: 'Error weight became zero during problem. (Solution'
' component i vanished, and ATOL or ATOL(i) = 0.)'
}
supports_run_relax = 1
supports_step = 1
active_global_handle = 0
def __init__(self,
method='adams',
with_jacobian=0,
rtol=1e-6, atol=1e-12,
lband=None, uband=None,
order=12,
nsteps=500,
max_step=0.0, # corresponds to infinite
min_step=0.0,
first_step=0.0, # determined by solver
):
if re.match(method, r'adams', re.I):
self.meth = 1
elif re.match(method, r'bdf', re.I):
self.meth = 2
else:
raise ValueError('Unknown integration method %s' % method)
self.with_jacobian = with_jacobian
self.rtol = rtol
self.atol = atol
self.mu = uband
self.ml = lband
self.order = order
self.nsteps = nsteps
self.max_step = max_step
self.min_step = min_step
self.first_step = first_step
self.success = 1
self.initialized = False
def reset(self, n, has_jac):
# Calculate parameters for Fortran subroutine dvode.
if has_jac:
if self.mu is None and self.ml is None:
miter = 1
else:
if self.mu is None:
self.mu = 0
if self.ml is None:
self.ml = 0
miter = 4
else:
if self.mu is None and self.ml is None:
if self.with_jacobian:
miter = 2
else:
miter = 0
else:
if self.mu is None:
self.mu = 0
if self.ml is None:
self.ml = 0
if self.ml == self.mu == 0:
miter = 3
else:
miter = 5
mf = 10 * self.meth + miter
if mf == 10:
lrw = 20 + 16 * n
elif mf in [11, 12]:
lrw = 22 + 16 * n + 2 * n * n
elif mf == 13:
lrw = 22 + 17 * n
elif mf in [14, 15]:
lrw = 22 + 18 * n + (3 * self.ml + 2 * self.mu) * n
elif mf == 20:
lrw = 20 + 9 * n
elif mf in [21, 22]:
lrw = 22 + 9 * n + 2 * n * n
elif mf == 23:
lrw = 22 + 10 * n
elif mf in [24, 25]:
lrw = 22 + 11 * n + (3 * self.ml + 2 * self.mu) * n
else:
raise ValueError('Unexpected mf=%s' % mf)
if miter in [0, 3]:
liw = 30
else:
liw = 30 + n
rwork = zeros((lrw,), float)
rwork[4] = self.first_step
rwork[5] = self.max_step
rwork[6] = self.min_step
self.rwork = rwork
iwork = zeros((liw,), int32)
if self.ml is not None:
iwork[0] = self.ml
if self.mu is not None:
iwork[1] = self.mu
iwork[4] = self.order
iwork[5] = self.nsteps
iwork[6] = 2 # mxhnil
self.iwork = iwork
self.call_args = [self.rtol, self.atol, 1, 1,
self.rwork, self.iwork, mf]
self.success = 1
self.initialized = False
def run(self, *args):
if self.initialized:
self.check_handle()
else:
self.initialized = True
self.acquire_new_handle()
y1, t, istate = self.runner(*(args[:5] + tuple(self.call_args) +
args[5:]))
if istate < 0:
warnings.warn('vode: ' +
self.messages.get(istate,
'Unexpected istate=%s' % istate))
self.success = 0
else:
self.call_args[3] = 2 # upgrade istate from 1 to 2
return y1, t
def step(self, *args):
itask = self.call_args[2]
self.call_args[2] = 2
r = self.run(*args)
self.call_args[2] = itask
return r
def run_relax(self, *args):
itask = self.call_args[2]
self.call_args[2] = 3
r = self.run(*args)
self.call_args[2] = itask
return r
if vode.runner is not None:
IntegratorBase.integrator_classes.append(vode)
class zvode(vode):
runner = getattr(_vode, 'zvode', None)
supports_run_relax = 1
supports_step = 1
scalar = complex
active_global_handle = 0
def reset(self, n, has_jac):
# Calculate parameters for Fortran subroutine dvode.
if has_jac:
if self.mu is None and self.ml is None:
miter = 1
else:
if self.mu is None:
self.mu = 0
if self.ml is None:
self.ml = 0
miter = 4
else:
if self.mu is None and self.ml is None:
if self.with_jacobian:
miter = 2
else:
miter = 0
else:
if self.mu is None:
self.mu = 0
if self.ml is None:
self.ml = 0
if self.ml == self.mu == 0:
miter = 3
else:
miter = 5
mf = 10 * self.meth + miter
if mf in (10,):
lzw = 15 * n
elif mf in (11, 12):
lzw = 15 * n + 2 * n ** 2
elif mf in (-11, -12):
lzw = 15 * n + n ** 2
elif mf in (13,):
lzw = 16 * n
elif mf in (14, 15):
lzw = 17 * n + (3 * self.ml + 2 * self.mu) * n
elif mf in (-14, -15):
lzw = 16 * n + (2 * self.ml + self.mu) * n
elif mf in (20,):
lzw = 8 * n
elif mf in (21, 22):
lzw = 8 * n + 2 * n ** 2
elif mf in (-21, -22):
lzw = 8 * n + n ** 2
elif mf in (23,):
lzw = 9 * n
elif mf in (24, 25):
lzw = 10 * n + (3 * self.ml + 2 * self.mu) * n
elif mf in (-24, -25):
lzw = 9 * n + (2 * self.ml + self.mu) * n
lrw = 20 + n
if miter in (0, 3):
liw = 30
else:
liw = 30 + n
zwork = zeros((lzw,), complex)
self.zwork = zwork
rwork = zeros((lrw,), float)
rwork[4] = self.first_step
rwork[5] = self.max_step
rwork[6] = self.min_step
self.rwork = rwork
iwork = zeros((liw,), int32)
if self.ml is not None:
iwork[0] = self.ml
if self.mu is not None:
iwork[1] = self.mu
iwork[4] = self.order
iwork[5] = self.nsteps
iwork[6] = 2 # mxhnil
self.iwork = iwork
self.call_args = [self.rtol, self.atol, 1, 1,
self.zwork, self.rwork, self.iwork, mf]
self.success = 1
self.initialized = False
def run(self, *args):
if self.initialized:
self.check_handle()
else:
self.initialized = True
self.acquire_new_handle()
y1, t, istate = self.runner(*(args[:5] + tuple(self.call_args) +
args[5:]))
if istate < 0:
warnings.warn('zvode: ' +
self.messages.get(istate, 'Unexpected istate=%s' % istate))
self.success = 0
else:
self.call_args[3] = 2 # upgrade istate from 1 to 2
return y1, t
if zvode.runner is not None:
IntegratorBase.integrator_classes.append(zvode)
class dopri5(IntegratorBase):
runner = getattr(_dop, 'dopri5', None)
name = 'dopri5'
messages = {1: 'computation successful',
2: 'comput. successful (interrupted by solout)',
-1: 'input is not consistent',
-2: 'larger nmax is needed',
-3: 'step size becomes too small',
-4: 'problem is probably stiff (interrupted)',
}
def __init__(self,
rtol=1e-6, atol=1e-12,
nsteps=500,
max_step=0.0,
first_step=0.0, # determined by solver
safety=0.9,
ifactor=10.0,
dfactor=0.2,
beta=0.0,
method=None
):
self.rtol = rtol
self.atol = atol
self.nsteps = nsteps
self.max_step = max_step
self.first_step = first_step
self.safety = safety
self.ifactor = ifactor
self.dfactor = dfactor
self.beta = beta
self.success = 1
def reset(self, n, has_jac):
work = zeros((8 * n + 21,), float)
work[1] = self.safety
work[2] = self.dfactor
work[3] = self.ifactor
work[4] = self.beta
work[5] = self.max_step
work[6] = self.first_step
self.work = work
iwork = zeros((21,), int32)
iwork[0] = self.nsteps
self.iwork = iwork
self.call_args = [self.rtol, self.atol, self._solout,
self.work, self.iwork]
self.success = 1
def run(self, f, jac, y0, t0, t1, f_params, jac_params):
x, y, iwork, idid = self.runner(*((f, t0, y0, t1) +
tuple(self.call_args) + (f_params,)))
if idid < 0:
warnings.warn(self.name + ': ' +
self.messages.get(idid, 'Unexpected idid=%s' % idid))
self.success = 0
return y, x
def _solout(self, *args):
# dummy solout function
pass
if dopri5.runner is not None:
IntegratorBase.integrator_classes.append(dopri5)
class dop853(dopri5):
runner = getattr(_dop, 'dop853', None)
name = 'dop853'
def __init__(self,
rtol=1e-6, atol=1e-12,
nsteps=500,
max_step=0.0,
first_step=0.0, # determined by solver
safety=0.9,
ifactor=6.0,
dfactor=0.3,
beta=0.0,
method=None
):
self.rtol = rtol
self.atol = atol
self.nsteps = nsteps
self.max_step = max_step
self.first_step = first_step
self.safety = safety
self.ifactor = ifactor
self.dfactor = dfactor
self.beta = beta
self.success = 1
def reset(self, n, has_jac):
work = zeros((11 * n + 21,), float)
work[1] = self.safety
work[2] = self.dfactor
work[3] = self.ifactor
work[4] = self.beta
work[5] = self.max_step
work[6] = self.first_step
self.work = work
iwork = zeros((21,), int32)
iwork[0] = self.nsteps
self.iwork = iwork
self.call_args = [self.rtol, self.atol, self._solout,
self.work, self.iwork]
self.success = 1
if dop853.runner is not None:
IntegratorBase.integrator_classes.append(dop853)
|
gpl-3.0
|
brianmhunt/SIWorldMap
|
werkzeug/script.py
|
89
|
11151
|
# -*- coding: utf-8 -*-
r'''
werkzeug.script
~~~~~~~~~~~~~~~
.. admonition:: Deprecated Functionality
``werkzeug.script`` is deprecated without replacement functionality.
Python's command line support improved greatly with :mod:`argparse`
and a bunch of alternative modules.
Most of the time you have recurring tasks while writing an application
such as starting up an interactive python interpreter with some prefilled
imports, starting the development server, initializing the database or
something similar.
For that purpose werkzeug provides the `werkzeug.script` module which
helps you writing such scripts.
Basic Usage
-----------
The following snippet is roughly the same in every werkzeug script::
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from werkzeug import script
# actions go here
if __name__ == '__main__':
script.run()
Starting this script now does nothing because no actions are defined.
An action is a function in the same module starting with ``"action_"``
which takes a number of arguments where every argument has a default. The
type of the default value specifies the type of the argument.
Arguments can then be passed by position or using ``--name=value`` from
the shell.
Because a runserver and shell command is pretty common there are two
factory functions that create such commands::
def make_app():
from yourapplication import YourApplication
return YourApplication(...)
action_runserver = script.make_runserver(make_app, use_reloader=True)
action_shell = script.make_shell(lambda: {'app': make_app()})
Using The Scripts
-----------------
The script from above can be used like this from the shell now:
.. sourcecode:: text
$ ./manage.py --help
$ ./manage.py runserver localhost 8080 --debugger --no-reloader
$ ./manage.py runserver -p 4000
$ ./manage.py shell
As you can see it's possible to pass parameters as positional arguments
or as named parameters, pretty much like Python function calls.
:copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
'''
import sys
import inspect
import getopt
from os.path import basename
argument_types = {
bool: 'boolean',
str: 'string',
int: 'integer',
float: 'float'
}
converters = {
'boolean': lambda x: x.lower() in ('1', 'true', 'yes', 'on'),
'string': str,
'integer': int,
'float': float
}
def run(namespace=None, action_prefix='action_', args=None):
"""Run the script. Participating actions are looked up in the caller's
namespace if no namespace is given, otherwise in the dict provided.
Only items that start with action_prefix are processed as actions. If
you want to use all items in the namespace provided as actions set
action_prefix to an empty string.
:param namespace: An optional dict where the functions are looked up in.
By default the local namespace of the caller is used.
:param action_prefix: The prefix for the functions. Everything else
is ignored.
:param args: the arguments for the function. If not specified
:data:`sys.argv` without the first argument is used.
"""
if namespace is None:
namespace = sys._getframe(1).f_locals
actions = find_actions(namespace, action_prefix)
if args is None:
args = sys.argv[1:]
if not args or args[0] in ('-h', '--help'):
return print_usage(actions)
elif args[0] not in actions:
fail('Unknown action \'%s\'' % args[0])
arguments = {}
types = {}
key_to_arg = {}
long_options = []
formatstring = ''
func, doc, arg_def = actions[args.pop(0)]
for idx, (arg, shortcut, default, option_type) in enumerate(arg_def):
real_arg = arg.replace('-', '_')
if shortcut:
formatstring += shortcut
if not isinstance(default, bool):
formatstring += ':'
key_to_arg['-' + shortcut] = real_arg
long_options.append(isinstance(default, bool) and arg or arg + '=')
key_to_arg['--' + arg] = real_arg
key_to_arg[idx] = real_arg
types[real_arg] = option_type
arguments[real_arg] = default
try:
optlist, posargs = getopt.gnu_getopt(args, formatstring, long_options)
except getopt.GetoptError, e:
fail(str(e))
specified_arguments = set()
for key, value in enumerate(posargs):
try:
arg = key_to_arg[key]
except IndexError:
fail('Too many parameters')
specified_arguments.add(arg)
try:
arguments[arg] = converters[types[arg]](value)
except ValueError:
fail('Invalid value for argument %s (%s): %s' % (key, arg, value))
for key, value in optlist:
arg = key_to_arg[key]
if arg in specified_arguments:
fail('Argument \'%s\' is specified twice' % arg)
if types[arg] == 'boolean':
if arg.startswith('no_'):
value = 'no'
else:
value = 'yes'
try:
arguments[arg] = converters[types[arg]](value)
except ValueError:
fail('Invalid value for \'%s\': %s' % (key, value))
newargs = {}
for k, v in arguments.iteritems():
newargs[k.startswith('no_') and k[3:] or k] = v
arguments = newargs
return func(**arguments)
def fail(message, code=-1):
"""Fail with an error."""
print >> sys.stderr, 'Error:', message
sys.exit(code)
def find_actions(namespace, action_prefix):
"""Find all the actions in the namespace."""
actions = {}
for key, value in namespace.iteritems():
if key.startswith(action_prefix):
actions[key[len(action_prefix):]] = analyse_action(value)
return actions
def print_usage(actions):
"""Print the usage information. (Help screen)"""
actions = actions.items()
actions.sort()
print 'usage: %s <action> [<options>]' % basename(sys.argv[0])
print ' %s --help' % basename(sys.argv[0])
print
print 'actions:'
for name, (func, doc, arguments) in actions:
print ' %s:' % name
for line in doc.splitlines():
print ' %s' % line
if arguments:
print
for arg, shortcut, default, argtype in arguments:
if isinstance(default, bool):
print ' %s' % (
(shortcut and '-%s, ' % shortcut or '') + '--' + arg
)
else:
print ' %-30s%-10s%s' % (
(shortcut and '-%s, ' % shortcut or '') + '--' + arg,
argtype, default
)
print
def analyse_action(func):
"""Analyse a function."""
description = inspect.getdoc(func) or 'undocumented action'
arguments = []
args, varargs, kwargs, defaults = inspect.getargspec(func)
if varargs or kwargs:
raise TypeError('variable length arguments for action not allowed.')
if len(args) != len(defaults or ()):
raise TypeError('not all arguments have proper definitions')
for idx, (arg, definition) in enumerate(zip(args, defaults or ())):
if arg.startswith('_'):
raise TypeError('arguments may not start with an underscore')
if not isinstance(definition, tuple):
shortcut = None
default = definition
else:
shortcut, default = definition
argument_type = argument_types[type(default)]
if isinstance(default, bool) and default is True:
arg = 'no-' + arg
arguments.append((arg.replace('_', '-'), shortcut,
default, argument_type))
return func, description, arguments
def make_shell(init_func=None, banner=None, use_ipython=True):
"""Returns an action callback that spawns a new interactive
python shell.
:param init_func: an optional initialization function that is
called before the shell is started. The return
value of this function is the initial namespace.
:param banner: the banner that is displayed before the shell. If
not specified a generic banner is used instead.
:param use_ipython: if set to `True` ipython is used if available.
"""
if banner is None:
banner = 'Interactive Werkzeug Shell'
if init_func is None:
init_func = dict
def action(ipython=use_ipython):
"""Start a new interactive python session."""
namespace = init_func()
if ipython:
try:
try:
from IPython.frontend.terminal.embed import InteractiveShellEmbed
sh = InteractiveShellEmbed(banner1=banner)
except ImportError:
from IPython.Shell import IPShellEmbed
sh = IPShellEmbed(banner=banner)
except ImportError:
pass
else:
sh(global_ns={}, local_ns=namespace)
return
from code import interact
interact(banner, local=namespace)
return action
def make_runserver(app_factory, hostname='localhost', port=5000,
use_reloader=False, use_debugger=False, use_evalex=True,
threaded=False, processes=1, static_files=None,
extra_files=None, ssl_context=None):
"""Returns an action callback that spawns a new development server.
.. versionadded:: 0.5
`static_files` and `extra_files` was added.
..versionadded:: 0.6.1
`ssl_context` was added.
:param app_factory: a function that returns a new WSGI application.
:param hostname: the default hostname the server should listen on.
:param port: the default port of the server.
:param use_reloader: the default setting for the reloader.
:param use_evalex: the default setting for the evalex flag of the debugger.
:param threaded: the default threading setting.
:param processes: the default number of processes to start.
:param static_files: optional dict of static files.
:param extra_files: optional list of extra files to track for reloading.
:param ssl_context: optional SSL context for running server in HTTPS mode.
"""
def action(hostname=('h', hostname), port=('p', port),
reloader=use_reloader, debugger=use_debugger,
evalex=use_evalex, threaded=threaded, processes=processes):
"""Start a new development server."""
from werkzeug.serving import run_simple
app = app_factory()
run_simple(hostname, port, app, reloader, debugger, evalex,
extra_files, 1, threaded, processes,
static_files=static_files, ssl_context=ssl_context)
return action
|
mit
|
ner0x652/RElief
|
elfie.py
|
1
|
6773
|
#!/usr/bin/env python3
import lief
import sys
import termcolor as tc
def get_typeval_as_str(lief_type):
return str(lief_type).split('.')[1]
def show_name(binary):
print(tc.colored("[::] Name", "blue"))
print(binary.name)
def enum_header(header):
def get_ident_props():
identity = "\n{0:18} {1}".format("\t\tClass:", get_typeval_as_str(header.identity_class))
identity += "\n{0:18} {1}".format("\t\tData:", get_typeval_as_str(header.identity_data))
identity += "\n{0:18} {1}".format("\t\tOS ABI:", get_typeval_as_str(header.identity_os_abi))
identity += "\n{0:18} {1}".format("\t\tVersion:", get_typeval_as_str(header.identity_version))
identity += "\n{0:18} {1}".format("\t\tMachine:", get_typeval_as_str(header.machine_type))
return identity
print(tc.colored("[::] Header", "blue"))
print(tc.colored("{0:25} {1}".format("\tEntrypoint:", hex(header.entrypoint)), "green"))
print(tc.colored("{0:25} {1}".format("\tFile type:", get_typeval_as_str(header.file_type)), "green"))
print(tc.colored("{0:25} {1}".format("\tHeader size:", hex(header.header_size)), "green"))
print(tc.colored("{0:25} {1}".format("\tIdentity:", get_ident_props()), "cyan"))
print(tc.colored("{0:25} {1}".format("\tNumber of sections:", header.numberof_sections), "green"))
print(tc.colored("{0:25} {1}".format("\tNumber of segments:", header.numberof_segments), "green"))
print(tc.colored("{0:25} {1}".format("\tObject file version:", get_typeval_as_str(header.object_file_version)), "green"))
print(tc.colored("{0:25} {1}".format("\tProcessor flag:", header.processor_flag), "green"))
print(tc.colored("{0:25} {1}".format("\tProgram header offset:", hex(header.program_header_offset)), "green"))
print(tc.colored("{0:25} {1}".format("\tProgram header size:", hex(header.program_header_size)), "green"))
print(tc.colored("{0:25} {1}".format("\tSection header offset:", hex(header.section_header_offset)), "green"))
print(tc.colored("{0:25} {1}".format("\tSection name table idx:", hex(header.section_name_table_idx)), "green"))
print(tc.colored("{0:25} {1}".format("\tSection header size:", hex(header.section_header_size)), "green"))
def show_interpreter(binary):
print(tc.colored("[::] Interpreter/loader", "blue"))
if binary.has_interpreter:
print(binary.interpreter)
else:
print(tc.colored("No interpreter/loader", "yellow"))
def show_notes(binary):
print(tc.colored("[::] Notes section", "blue"))
if binary.has_notes:
for n in binary.notes:
print(n)
else:
print(tc.colored("No notes section", "yellow"))
def enum_dyn_entries(binary):
print(tc.colored("[::] Dynamic entries", "blue"))
for e in binary.dynamic_entries:
print(e)
def enum_dyn_relocs(binary):
print(tc.colored("[::] Dynamic relocations", "blue"))
for r in binary.dynamic_relocations:
print(r)
def enum_exp_funcs(binary):
print(tc.colored("[::] Exported functions", "blue"))
for f in binary.exported_functions:
print(f)
def enum_exp_symbols(binary):
print(tc.colored("[::] Exported symbols", "blue"))
for s in binary.exported_symbols:
print(s)
def enum_imp_functions(binary):
print(tc.colored("[::] Imported functions", "blue"))
for f in binary.imported_functions:
print(f)
def enum_imp_symbols(binary):
print(tc.colored("[::] Imported symbols", "blue"))
for s in binary.imported_symbols:
print(s)
def enum_libraries(binary):
print(tc.colored("[::] Libraries", "blue"))
for l in binary.libraries:
print(l)
def enum_sections(binary):
print(tc.colored("[::] Sections", "blue"))
for s in binary.sections:
print(s)
# Properties
print(tc.colored("\t{0:15} {1}".format("Alignment", hex(s.alignment)), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Entropy", s.entropy), "cyan"))
print(tc.colored("\t{0:15} {1}".format("File offset", hex(s.file_offset)), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Flags", s.flags), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Information", s.information), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Link", s.link), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Name index", s.name_idx), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Offset", hex(s.offset)), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Original size", hex(s.original_size)), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Size", hex(s.size)), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Type", get_typeval_as_str(s.type)), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Virtual addr", hex(s.virtual_address)), "cyan"))
def enum_segments(binary):
print(tc.colored("[::] Segments", "blue"))
for s in binary.segments:
print(s)
# Properties
print(tc.colored("\t{0:15} {1}".format("Alignment", hex(s.alignment)), "cyan"))
print(tc.colored("\t{0:15} {1}".format("File offset", hex(s.file_offset)), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Flags", s.flags), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Type", get_typeval_as_str(s.type)), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Virtual addr", hex(s.virtual_address)), "cyan"))
print(tc.colored("\t{0:15} {1}".format("Virtual size", hex(s.virtual_size)), "cyan"))
def run():
if len(sys.argv) < 2:
print("[USAGE]: {0} <executable>".format(sys.argv[0]))
sys.exit(1)
try:
binary = lief.ELF.parse(sys.argv[1])
except lief.bad_file as err:
print("Error: {0}".format(err))
sys.exit(1)
show_name(binary)
enum_header(binary.header)
enum_dyn_entries(binary)
enum_dyn_relocs(binary)
enum_exp_funcs(binary)
enum_exp_symbols(binary)
enum_imp_functions(binary)
enum_imp_symbols(binary)
enum_libraries(binary)
show_notes(binary)
show_interpreter(binary)
enum_sections(binary)
enum_segments(binary)
if __name__ == "__main__":
run()
|
mit
|
angstwad/ansible
|
lib/ansible/galaxy/token.py
|
68
|
2167
|
#!/usr/bin/env python
########################################################################
#
# (C) 2015, Chris Houseknecht <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
########################################################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import yaml
from stat import *
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class GalaxyToken(object):
''' Class to storing and retrieving token in ~/.ansible_galaxy '''
def __init__(self):
self.file = os.path.expanduser("~") + '/.ansible_galaxy'
self.config = yaml.safe_load(self.__open_config_for_read())
if not self.config:
self.config = {}
def __open_config_for_read(self):
if os.path.isfile(self.file):
display.vvv('Opened %s' % self.file)
return open(self.file, 'r')
# config.yml not found, create and chomd u+rw
f = open(self.file,'w')
f.close()
os.chmod(self.file,S_IRUSR|S_IWUSR) # owner has +rw
display.vvv('Created %s' % self.file)
return open(self.file, 'r')
def set(self, token):
self.config['token'] = token
self.save()
def get(self):
return self.config.get('token', None)
def save(self):
with open(self.file,'w') as f:
yaml.safe_dump(self.config,f,default_flow_style=False)
|
gpl-3.0
|
robbiet480/home-assistant
|
tests/components/folder_watcher/test_init.py
|
3
|
1834
|
"""The tests for the folder_watcher component."""
import os
from homeassistant.components import folder_watcher
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
async def test_invalid_path_setup(hass):
"""Test that an invalid path is not set up."""
assert not await async_setup_component(
hass,
folder_watcher.DOMAIN,
{folder_watcher.DOMAIN: {folder_watcher.CONF_FOLDER: "invalid_path"}},
)
async def test_valid_path_setup(hass):
"""Test that a valid path is setup."""
cwd = os.path.join(os.path.dirname(__file__))
hass.config.whitelist_external_dirs = {cwd}
with patch.object(folder_watcher, "Watcher"):
assert await async_setup_component(
hass,
folder_watcher.DOMAIN,
{folder_watcher.DOMAIN: {folder_watcher.CONF_FOLDER: cwd}},
)
def test_event():
"""Check that Home Assistant events are fired correctly on watchdog event."""
class MockPatternMatchingEventHandler:
"""Mock base class for the pattern matcher event handler."""
def __init__(self, patterns):
pass
with patch(
"homeassistant.components.folder_watcher.PatternMatchingEventHandler",
MockPatternMatchingEventHandler,
):
hass = Mock()
handler = folder_watcher.create_event_handler(["*"], hass)
handler.on_created(
Mock(is_directory=False, src_path="/hello/world.txt", event_type="created")
)
assert hass.bus.fire.called
assert hass.bus.fire.mock_calls[0][1][0] == folder_watcher.DOMAIN
assert hass.bus.fire.mock_calls[0][1][1] == {
"event_type": "created",
"path": "/hello/world.txt",
"file": "world.txt",
"folder": "/hello",
}
|
apache-2.0
|
bruinfish/cs118-proj2-pox
|
pox/messenger/__init__.py
|
25
|
19645
|
# Copyright 2011,2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
The POX Messenger system.
The Messenger system is a way to build services in POX that can be
consumed by external clients.
Sometimes a controller might need to interact with the outside world.
Sometimes you need to integrate with an existing piece of software and
maybe you don't get to choose how you communicate with it. Other times,
you have the opportunity and burden of rolling your own. The Messenger
system is meant to help you with the latter case.
In short, channels are a system for communicating between POX and
external programs by exchanging messages encoded in JSON. It is intended
to be quite general, both in the communication models it supports and in
the transports is supports (as of this writing, it supports a
straightforward TCP socket transport and an HTTP transport). Any
service written to use the Messenger should theoretically be usable via
any transport.
*Connections* are somehow established when a client connects via some
*Transport*. The server can individually send messages to a specific client.
A client can send messages to a *Channel* on the server. A client can also
become a member of a channel, after which it will receive any messages
the server sends to that channel. There is always a default channel with
no name.
Channels can either be permanent or temporary. Temporary channels are
automatically destroyed when they no longer contain any members.
"""
from pox.lib.revent.revent import *
from pox.core import core as core
import json
import time
import random
import hashlib
from base64 import b32encode
log = core.getLogger()
# JSON decoder used by default
defaultDecoder = json.JSONDecoder()
class ChannelJoin (Event):
""" Fired on a channel when a client joins. """
def __init__ (self, connection, channel, msg = {}):
Event.__init__(self)
self.con = connection
self.channel = channel
self.msg = msg
class ConnectionClosed (Event):
""" Fired on a connection when it closes. """
def __init__ (self, connection):
Event.__init__(self)
self.con = connection
class ChannelLeave (Event):
""" Fired on a channel when a client leaves. """
def __init__ (self, connection, channel):
Event.__init__(self)
self.con = connection
self.channel = channel
class ChannelCreate (Event):
""" Fired on a Nexus when a channel is created. """
def __init__ (self, channel):
Event.__init__(self)
self.channel = channel
class ChannelDestroy (Event):
"""
Fired on the channel and its Nexus right before a channel is destroyed.
Set .keep = True to keep the channel after all.
"""
def __init__ (self, channel):
Event.__init__(self)
self.channel = channel
self.keep = False
class ChannelDestroyed (Event):
"""
Fired on the channel and its Nexus right after a channel is destroyed.
"""
def __init__ (self, channel):
Event.__init__(self)
self.channel = channel
class MissingChannel (Event):
"""
Fired on a Nexus when a message has been received to a non-existant channel.
You can create the channel in response to this.
"""
def __init__ (self, connection, channel_name, msg):
Event.__init__(self)
self.con = connection
self.channel_name = channel_name
self.msg = msg
class MessageReceived (Event):
"""
Fired by a channel when a message has been receieved.
Always fired on the Connection itself. Also fired on the corresponding
Channel object as specified by the CHANNEL key.
The listener looks like:
def _handle_MessageReceived (event, msg):
"""
def __init__ (self, connection, channel, msg):
Event.__init__(self)
self.con = connection
self.msg = msg
self.channel = channel
def is_to_channel (self, channel):
"""
Returns True if this message is to the given channel
"""
if isinstance(channel, Channel):
channel = channel.name
if channel == self.channel: return True
if channel in self.channel: return True
return False
def _invoke (self, handler, *args, **kw):
# Special handling -- pass the message
return handler(self, self.msg, *args, **kw)
def _get_nexus (nexus):
if nexus is None: nexus = "MessengerNexus"
if isinstance(nexus, str):
if not core.hasComponent(nexus):
#TODO: Wait for channel Nexus
s = "MessengerNexus %s is not available" % (nexus,)
log.error(s)
raise RuntimeError(s)
return getattr(core, nexus)
assert isinstance(nexus, MessengerNexus)
return nexus
class Transport (object):
def __init__ (self, nexus):
self._nexus = _get_nexus(nexus)
def _forget (self, connection):
""" Forget about a connection """
raise RuntimeError("Not implemented")
class Connection (EventMixin):
"""
Superclass for Connections.
This could actually be a bit thinner, if someone wants to clean it up.
Maintains the state and handles message parsing and dispatch for a
single connection.
"""
_eventMixin_events = set([
MessageReceived,
ConnectionClosed,
])
def __init__ (self, transport):
"""
transport is the source of the connection (e.g, TCPTransport).
"""
EventMixin.__init__(self)
self._is_connected = True
self._transport = transport
self._newlines = False
# Transports that don't do their own encapsulation can use _recv_raw(),
# which uses this. (Such should probably be broken into a subclass.)
self._buf = bytes()
key,num = self._transport._nexus.generate_session()
self._session_id,self._session_num = key,num
def _send_welcome (self):
"""
Send a message to a client so they know they're connected
"""
self.send({"CHANNEL":"","cmd":"welcome","session_id":self._session_id})
def _close (self):
"""
Called internally to shut the connection down.
"""
if self._is_connected is False: return
self._transport._forget(self)
self._is_connected = False
for name,chan in self._transport._nexus._channels.items():
chan._remove_member(self)
self.raiseEventNoErrors(ConnectionClosed, self)
#self._transport._nexus.raiseEventNoErrors(ConnectionClosed, self)
def send (self, whatever):
"""
Send data over the connection.
It will first be encoded into JSON, and optionally followed with
a newline. Ultimately, it will be passed to send_raw() to actually
be sent.
"""
if self._is_connected is False: return False
s = json.dumps(whatever, default=str)
if self._newlines: s += "\n"
self.send_raw(s)
return True
def send_raw (self, data):
"""
This method should actually send data out over the connection.
Subclasses need to implement this.
"""
raise RuntimeError("Not implemented")
@property
def is_connected (self):
"""
True if this Connection is still connected.
"""
return self._is_connected
def _rx_message (self, msg):
"""
Raises events when a complete message is available.
Subclasses may want to call this when they have a new message
available. See _recv_raw().
"""
e = self.raiseEventNoErrors(MessageReceived,self,msg.get('CHANNEL'),msg)
self._transport._nexus._rx_message(self, msg)
def _rx_raw (self, data):
"""
If your subclass receives a stream instead of discrete messages, this
method can parse out individual messages and call _recv_msg() when
it has full messages.
"""
if len(data) == 0: return
if len(self._buf) == 0:
if data[0].isspace():
self._buf = data.lstrip()
else:
self._buf = data
else:
self._buf += data
while len(self._buf) > 0:
try:
msg, l = defaultDecoder.raw_decode(self._buf)
except:
# Need more data before it's a valid message
# (.. or the stream is corrupt and things will never be okay
# ever again)
return
self._buf = self._buf[l:]
if len(self._buf) != 0 and self._buf[0].isspace():
self._buf = self._buf.lstrip()
self._rx_message(msg)
def __str__ (self):
"""
Subclasses should implement better versions of this.
"""
return "<%s/%s/%i>" % (self.__class__.__name__, self._session_id,
self._session_num)
def close (self):
"""
Close the connection.
"""
self._close()
class Channel (EventMixin):
"""
Allows one to easily listen to only messages that have a CHANNEL key
with a specific name.
Generally you will not create these classes directly, but by calling
getChannel() on the ChannelNexus.
"""
_eventMixin_events = set([
MessageReceived,
ChannelJoin, # Immedaitely when a connection goes up
ChannelLeave, # When a connection goes down
ChannelDestroy,
ChannelDestroyed,
])
def __init__ (self, name, nexus = None, temporary = False):
"""
name is the name for the channel (i.e., the value for the messages'
CHANNEL key).
nexus is the specific MessengerNexus with which this channel is to be
associated (defaults to core.MessengerNexus).
"""
EventMixin.__init__(self)
assert isinstance(name, basestring)
self._name = name
self._nexus = _get_nexus(nexus)
self._nexus._channels[name] = self
self.temporary = temporary
self._members = set() # Member Connections
@property
def name (self):
return self._name
def _destroy (self):
""" Remove channel """
e = self.raiseEvent(ChannelDestroy, self)
if e:
if e.keep: return False
self._nexus.raiseEvent(e)
if e.keep: return False
del self._nexus._channels[self._name]
# We can't just do the follow because then listeners
# can't tell if the channel is now empty...
#for sub in set(self._members):
# sub.raiseEvent(ChannelLeave, sub, self)
#
#self._members.clear()
# .. so do the following really straightforward...
for sub in set(self._members):
self._remove_member(sub, allow_destroy = False)
e = ChannelDestroyed(self)
self.raiseEvent(e)
self._nexus.raiseEvent(e)
def _add_member (self, con, msg = {}):
if con in self._members: return
self._members.add(con)
self.raiseEvent(ChannelJoin, con, self, msg)
def _remove_member (self, con, allow_destroy = True):
if con not in self._members: return
self._members.remove(con)
self.raiseEvent(ChannelLeave, con, self)
if not allow_destroy: return
if self.temporary is True:
if len(self._members) == 0:
self._destroy()
def send (self, msg):
d = dict(msg)
d['CHANNEL'] = self._name
for r in self._members:
if not r.is_connected: continue
r.send(d)
def __str__ (self):
return "<Channel " + self.name + ">"
def reply (_msg, **kw):
if not isinstance(_msg, dict):
# We'll also take an event...
_msg = _msg.msg
kw['CHANNEL'] = _msg.get('CHANNEL')
if 'XID' in _msg: kw['XID'] = _msg.get('XID')
return kw
class ChannelBot (object):
"""
A very simple framework for writing "bots" that respond to messages
on a channel.
"""
def __str__ (self):
return "<%s@%s>" % (self.__class__.__name__, self.channel)
def __init__ (self, channel, nexus = None, weak = False, extra = {}):
self._startup(channel, nexus, weak, extra)
def _startup (self, channel, nexus = None, weak = False, extra = {}):
self._nexus = _get_nexus(nexus)
if isinstance(channel, Channel):
self.channel = channel
else:
self.channel = self._nexus.get_channel(channel, create=True)
self.listeners = self.channel.addListeners(self, weak = weak)
self.prefixes = None
self._init(extra)
if self.prefixes is None:
self.prefixes = []
for n in dir(self):
if n.startswith("_exec_"):
n = n.split("_")[2]
self.prefixes.append(n)
def _handle_ChannelDestroyed (self, event):
self.channel.removeListeners(self.listeners)
self._destroyed()
def _handle_ChannelJoin (self, event):
self._join(event, event.con, event.msg)
def _handle_ChannelLeave (self, event):
self._leave(event.con, len(self.channel._members) == 0)
def _handle_MessageReceived (self, event, msg):
for prefix in self.prefixes:
if prefix in event.msg:
cmd = "_exec_%s_%s" % (prefix, str(event.msg[prefix]))
if hasattr(self, cmd):
getattr(self, cmd)(event)
return #TODO: Return val?
for prefix in self.prefixes:
if prefix in event.msg:
cmd = "_exec_" + prefix
if hasattr(self, cmd):
getattr(self, cmd)(event, msg[prefix])
return #TODO: Return val?
self._unhandled(event)
def _unhandled (self, event):
""" Called when no command found """
pass
def _join (self, event, connection, msg):
""" Called when a connection joins """
pass
def _leave (self, connection, empty):
"""
Called when a connection leaves
If channel now has no members, empty is True
"""
pass
def _destroyed (self):
""" Called when channel is destroyed """
pass
def _init (self, extra):
"""
Called during initialization
'extra' is any additional information passed in when initializing
the bot. In particular, this may be the message that goes along
with its invitation into a channel.
"""
pass
def reply (__self, __event, **kw):
"""
Unicast reply to a specific message.
"""
__event.con.send(reply(__event, **kw))
def send (__self, __msg={}, **kw):
"""
Send a message to all members of this channel.
"""
m = {}
m.update(__msg)
m.update(kw)
__self.channel.send(m)
class DefaultChannelBot (ChannelBot):
def _init (self, extra):
self._bots = {}
def add_bot (self, bot, name = None):
"""
Registers a bot (an instance of ChannelBot) so that it can be
invited to other channels.
"""
assert issubclass(bot, ChannelBot)
if name is None:
name = bot.__name__
self._bots[name] = bot
def _exec_newlines_False (self, event):
event.con._newlines = False
def _exec_newlines_True (self, event):
event.con._newlines = True
def _exec_cmd_invite (self, event):
"""
Invites a bot that has been registered with add_bot() to a channel.
Note that you can invite a bot to an empty (new) temporary channel.
It will stay until the first member leaves.
"""
botname = event.msg.get('bot')
botclass = self._bots.get(botname)
channel = event.msg.get('channel')
new_channel = False
if channel is None:
new_channel = True
channel = self._gen_channel_name(event.msg.get("prefix", "temp"))
chan = self._nexus.get_channel(channel, create=True, temporary=True)
if chan is None:
#TODO: send an error
log.warning("A bot was invited to a nonexistent channel (%s)"
% (channel,))
return
if botclass is None:
#TODO: send an error
log.warning("A nonexistent bot (%s) was invited to a channel"
% (botname,))
return
bot = botclass(channel, self._nexus)
if new_channel:
self.reply(event, new_channel = new_channel)
def _unhandled (self, event):
log.warn("Default channel got unknown command: "
+ str(event.msg.get('cmd')))
def _gen_channel_name (self, prefix = "temp"):
""" Makes up a channel name """
prefix += "_"
import random
while True:
# Sloppy
r = random.randint(1, 100000)
n = prefix + str(r)
if r not in self._nexus._channels:
break
return n
def _exec_cmd_new_channel (self, event):
""" Generates a new channel with random name """
prefix = event.msg.get('prefix', 'temp')
n = self._gen_channel_name(prefix)
ch = self._nexus.get_channel(n, create=True, temporary=True)
ch._add_member(event.con, event.msg)
self.reply(event, new_channel = n)
def _exec_cmd_join_channel (self, event):
""" Joins/creates a channel """
temp = event.msg.get('temporary', True) # Default temporary!
ch = self._nexus.get_channel(event.msg['channel'], temporary=temp)
if ch is None: return
ch._add_member(event.con, event.msg)
def _exec_cmd_leave_channel (self, event):
ch = self._nexus.get_channel(event.msg['channel'])
if ch is None: return
ch._remove_member(event.con)
def _exec_test (self, event, value):
log.info("Default channel got: " + str(value))
self.reply(event, test = value.upper())
class MessengerNexus (EventMixin):
"""
Transports, Channels, etc. are all associated with a MessengerNexus.
Typically, there is only one, and it is registered as
pox.core.MessengerNexus
"""
_eventMixin_events = set([
MissingChannel, # When a msg arrives to nonexistent channel
ChannelDestroy,
ChannelDestroyed,
ChannelCreate,
])
def __init__ (self):
EventMixin.__init__(self)
self._channels = {} # name -> Channel
self.default_bot = DefaultChannelBot("", self)
self._next_ses = 1
self._session_salt = str(time.time())
def generate_session (self):
"""
Return a new session ID tuple (key, num)
The key is a unique and not-trivial-to-guess alphanumeric value
associated with the session.
The num is a unique numerical value associated with the session.
"""
r = self._next_ses
self._next_ses += 1
key = str(random.random()) + str(time.time()) + str(r)
key += str(id(key)) + self._session_salt
key = b32encode(hashlib.md5(key).digest()).upper().replace('=','')
def alphahex (r):
""" base 16 on digits 'a' through 'p' """
r=hex(r)[2:].lower()
return ''.join(chr((10 if ord(x) >= 97 else 49) + ord(x)) for x in r)
key = alphahex(r) + key
return key,r
def get_channel (self, name, create = True, temporary = False):
if name is None: name = ""
if name in self._channels:
return self._channels[name]
elif create:
c = Channel(name, self, temporary = temporary)
self.raiseEvent(ChannelCreate, c)
return c
else:
return None
def _rx_message (self, con, msg):
"""
Dispatches messages to listeners of this nexus and to its Channels.
Called by Connections.
"""
ret = False
assert isinstance(msg, dict)
if isinstance(msg, dict):
channels = msg.get('CHANNEL')
if channels is None:
channels = [""]
if not isinstance(channels, list):
channels = [channels]
for cname in channels:
channel = self.get_channel(cname, create=False)
if channel is None:
e = self.raiseEvent(MissingChannel, con, cname, msg)
if e is not None: cname = e.channel_name
channel = self.get_channel(cname, create=False)
if channel is not None:
#print "raise on", channel
channel.raiseEvent(MessageReceived, con, channel, msg)
ret = True
return ret
def launch ():
core.registerNew(MessengerNexus)
|
gpl-3.0
|
richpsharp/forest_carbon_edge_effects
|
average_human_use_layers.py
|
1
|
8405
|
import os
import time
import numpy
import functools
import sys
import codecs
import types
import gdal
import osr
from invest_natcap import raster_utils
GLOBAL_UPPER_LEFT_ROW = 2602195.7925872812047601
GLOBAL_UPPER_LEFT_COL = -11429693.3490753173828125
def average_layers():
base_table_uri = "C:/Users/rich/Desktop/all_grid_results_100km_clean_v2.csv"
base_table_file = open(base_table_uri, 'rU')
table_header = base_table_file.readline()
#need to mask the average layers to the biomass regions
giant_layer_uri = "C:/Users/rich/Desktop/average_layers_projected/giant_layer.tif"
af_uri = "C:/Users/rich/Desktop/af_biov2ct1.tif"
am_uri = "C:/Users/rich/Desktop/am_biov2ct1.tif"
as_uri = "C:/Users/rich/Desktop/as_biov2ct1.tif"
cell_size = raster_utils.get_cell_size_from_uri(am_uri)
#raster_utils.vectorize_datasets(
# [af_uri, am_uri, as_uri], lambda x,y,z: x+y+z, giant_layer_uri, gdal.GDT_Float32,
# -1, cell_size, 'union', vectorize_op=False)
table_uri = base_table_uri
table_file = open(table_uri, 'rU')
table_header = table_file.readline().rstrip()
lookup_table = raster_utils.get_lookup_from_csv(table_uri, 'ID100km')
out_table_uri = "C:/Users/rich/Desktop/all_grid_results_100km_human_elevation.csv"
out_table_file = codecs.open(out_table_uri, 'w', 'utf-8')
average_raster_list = [
("C:/Users/rich/Desktop/average_layers_projected/lighted_area_luminosity.tif", 'Lighted area density'),
("C:/Users/rich/Desktop/average_layers_projected/fi_average.tif", 'Fire densities'),
("C:/Users/rich/Desktop/average_layers_projected/glbctd1t0503m.tif", 'FAO_Cattle'),
("C:/Users/rich/Desktop/average_layers_projected/glbgtd1t0503m.tif", 'FAO_Goat'),
("C:/Users/rich/Desktop/average_layers_projected/glbpgd1t0503m.tif", 'FAO_Pig'),
("C:/Users/rich/Desktop/average_layers_projected/glbshd1t0503m.tif", 'FAO_Sheep'),
("C:/Users/rich/Desktop/average_layers_projected/glds00ag.tif", 'Human population density AG'),
("C:/Users/rich/Desktop/average_layers_projected/glds00g.tif", 'Human population density G'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_11.tif', '"11: Urban, Dense settlement"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_12.tif', '"12: Dense settlements, Dense settlements"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_22.tif', '"22: Irrigated villages, Villages"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_23.tif', '"23: Cropped & pastoral villages, Villages"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_24.tif', '"24: Pastoral villages, Villages"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_25.tif', '"25: Rainfed villages, Villages"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_26.tif', '"26: Rainfed mosaic villages, Villages"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_31.tif', '"31: Residential irrigated cropland, Croplands"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_32.tif', '"32: Residential rainfed mosaic, Croplands"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_33.tif', '"33: Populated irrigated cropland, Croplands"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_34.tif', '"34: Populated rainfed cropland, Croplands"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_35.tif', '"35: Remote croplands, Croplands"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_41.tif', '"41: Residential rangelands, Rangelands"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_42.tif', '"42: Populated rangelands, Rangelands"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_43.tif', '"43: Remote rangelands, Rangelands"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_51.tif', '"51: Populated forests, Forested"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_52.tif', '"52: Remote forests, Forested"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_61.tif', '"61: Wild forests, Wildlands"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_62.tif', '"62: Sparse trees, Wildlands"'),
('C:/Users/rich/Desktop/average_layers_projected/anthrome_63.tif', '"63: Barren, Wildlands"'),
("C:/Users/rich/Desktop/average_layers_projected/5km_global_pantropic_dem.tif", '"Average Elevation"'),
]
clipped_raster_list = []
for average_raster_uri, header in average_raster_list:
print 'clipping ' + average_raster_uri
clipped_raster_uri = os.path.join(os.path.dirname(average_raster_uri), 'temp', os.path.basename(average_raster_uri))
cell_size = raster_utils.get_cell_size_from_uri(average_raster_uri)
raster_utils.vectorize_datasets(
[average_raster_uri, giant_layer_uri], lambda x,y: x, clipped_raster_uri, gdal.GDT_Float32,
-1, cell_size, 'intersection', vectorize_op=False)
clipped_raster_list.append((clipped_raster_uri, header))
dataset_list = [gdal.Open(uri) for uri, label in clipped_raster_list]
band_list = [ds.GetRasterBand(1) for ds in dataset_list]
nodata_list = [band.GetNoDataValue() for band in band_list]
extended_table_headers = ','.join([header for _, header in average_raster_list])
def write_to_file(value):
try:
out_table_file.write(value)
except UnicodeDecodeError as e:
out_table_file.write(value.decode('latin-1'))
write_to_file(table_header + ',' + extended_table_headers + '\n')
#print table_header + ',' + extended_table_headers
for line in table_file:
split_line = line.rstrip().split(',')
grid_id = split_line[2]
#for grid_id in lookup_table:
try:
split_grid_id = grid_id.split('-')
grid_row_index, grid_col_index = map(int, split_grid_id)
except ValueError as e:
month_to_number = {
'Jan': 1,
'Feb': 2,
'Mar': 3,
'Apr': 4,
'May': 5,
'Jun': 6,
'Jul': 7,
'Aug': 8,
'Sep': 9,
'Oct': 10,
'Nov': 11,
'Dec': 12,
}
grid_row_index, grid_col_index = month_to_number[split_grid_id[0]], int(split_grid_id[1])
print 'processing grid id ' + grid_id
ds = dataset_list[0]
base_srs = osr.SpatialReference(ds.GetProjection())
lat_lng_srs = base_srs.CloneGeogCS()
coord_transform = osr.CoordinateTransformation(
base_srs, lat_lng_srs)
gt = ds.GetGeoTransform()
grid_resolution = 100 #100km
row_coord = grid_row_index * grid_resolution * 1000 + GLOBAL_UPPER_LEFT_ROW
col_coord = grid_col_index * grid_resolution * 1000 + GLOBAL_UPPER_LEFT_COL
lng_coord, lat_coord, _ = coord_transform.TransformPoint(
col_coord, row_coord)
write_to_file(','.join(split_line[0:2]) + ',%d-%d,' % (grid_row_index, grid_col_index) + ','.join(split_line[3:11]) +',%f,%f,' % (lat_coord, lng_coord)+','.join(split_line[13:]))
for (_, header), band, ds, nodata in zip(clipped_raster_list, band_list, dataset_list, nodata_list):
gt = ds.GetGeoTransform()
n_rows = ds.RasterYSize
n_cols = ds.RasterXSize
xoff = int(grid_col_index * (grid_resolution * 1000.0) / (gt[1]))
yoff = int(grid_row_index * (grid_resolution * 1000.0) / (-gt[5]))
win_xsize = int((grid_resolution * 1000.0) / (gt[1]))
win_ysize = int((grid_resolution * 1000.0) / (gt[1]))
if xoff + win_xsize > n_cols:
win_xsize = n_cols - xoff
if yoff + win_ysize > n_rows:
win_ysize = n_rows - yoff
block = band.ReadAsArray(
xoff=xoff, yoff=yoff, win_xsize=win_xsize, win_ysize=win_ysize)
block_average = numpy.average(block[block != nodata])
write_to_file(',%f' % block_average)
write_to_file('\n')
if __name__ == '__main__':
average_layers()
|
apache-2.0
|
1a1a11a/mimircache
|
PyMimircache/cache/slru.py
|
1
|
3337
|
# coding=utf-8
from PyMimircache.cache.lru import LRU
from PyMimircache.cache.abstractCache import Cache
class SLRU(Cache):
def __init__(self, cache_size=1000, ratio=1, **kwargs):
"""
:param cache_size: size of cache
:param args: raio: the ratio of protected/probationary
:return:
"""
super().__init__(cache_size, **kwargs)
self.ratio = ratio
# Maybe use two linkedlist and a dict will be more efficient?
self.protected = LRU(
int(self.cache_size * self.ratio / (self.ratio + 1)))
self.probationary = LRU(int(self.cache_size * 1 / (self.ratio + 1)))
def has(self, req_id, **kwargs):
"""
:param **kwargs:
:param req_id:
:return: whether the given element is in the cache
"""
if req_id in self.protected or req_id in self.probationary:
return True
else:
return False
def _update(self, req_item, **kwargs):
""" the given element is in the cache, now update it to new location
:param **kwargs:
:param req_item:
:return: None
"""
if req_item in self.protected:
self.protected._update(req_item, )
else:
# req_item is in probationary, remove from probationary, insert to end of protected,
# evict from protected to probationary if needed
# get the node and remove from probationary
node = self.probationary.cache_dict[req_item]
self.probationary.cache_linked_list.remove_node(node)
del self.probationary.cache_dict[req_item]
# insert into protected
evicted_key = self.protected._insert(node.content, )
# if there are req_item evicted from protected area, add to probationary area
if evicted_key:
self.probationary._insert(evicted_key, )
def _insert(self, req_item, **kwargs):
"""
the given element is not in the cache, now insert it into cache
:param **kwargs:
:param req_item:
:return: evicted element
"""
return self.probationary._insert(req_item, )
def _print_cache_line(self):
print("protected: ")
self.protected._print_cache_line()
print("probationary: ")
self.probationary._print_cache_line()
def evict(self, **kwargs):
"""
evict one element from the cache line
:param **kwargs:
:return: True on success, False on failure
"""
pass
def access(self, req_item, **kwargs):
"""
:param **kwargs:
:param req_item: a cache request, it can be in the cache, or not
:return: None
"""
if self.has(req_item, ):
self._update(req_item, )
return True
else:
self._insert(req_item, )
return False
def __repr__(self):
return "SLRU, given size: {}, given protected part size: {}, given probationary part size: {}, \
current protected part size: {}, current probationary size: {}". \
format(self.cache_size, self.protected.cache_size, self.probationary.cache_size,
self.protected.cache_linked_list.size, self.probationary.cache_linked_list.size)
|
gpl-3.0
|
UltronAI/Deep-Learning
|
Pattern-Recognition/hw2-Feature-Selection/skfeature/function/wrapper/svm_backward.py
|
1
|
1775
|
import numpy as np
from sklearn.svm import SVC
from sklearn.model_selection import KFold
from sklearn.metrics import accuracy_score
def svm_backward(X, y, n_selected_features):
"""
This function implements the backward feature selection algorithm based on SVM
Input
-----
X: {numpy array}, shape (n_samples, n_features)
input data
y: {numpy array}, shape (n_samples,)
input class labels
n_selected_features: {int}
number of selected features
Output
------
F: {numpy array}, shape (n_features, )
index of selected features
"""
n_samples, n_features = X.shape
# using 10 fold cross validation
cv = KFold(n_samples, n_folds=10, shuffle=True)
# choose SVM as the classifier
clf = SVC()
# selected feature set, initialized to contain all features
F = range(n_features)
count = n_features
while count > n_selected_features:
max_acc = 0
for i in range(n_features):
if i in F:
F.remove(i)
X_tmp = X[:, F]
acc = 0
for train, test in cv:
clf.fit(X_tmp[train], y[train])
y_predict = clf.predict(X_tmp[test])
acc_tmp = accuracy_score(y[test], y_predict)
acc += acc_tmp
acc = float(acc)/10
F.append(i)
# record the feature which results in the largest accuracy
if acc > max_acc:
max_acc = acc
idx = i
# delete the feature which results in the largest accuracy
F.remove(idx)
count -= 1
return np.array(F)
|
mit
|
jalexanderqed/rocksdb
|
build_tools/precommit_checker.py
|
2
|
5638
|
#!/usr/local/fbcode/gcc-4.8.1-glibc-2.17-fb/bin/python2.7
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import commands
import subprocess
import sys
import re
import os
import time
#
# Simple logger
#
class Log:
def __init__(self, filename):
self.filename = filename
self.f = open(self.filename, 'w+', 0)
def caption(self, str):
line = "\n##### %s #####\n" % str
if self.f:
self.f.write("%s \n" % line)
else:
print(line)
def error(self, str):
data = "\n\n##### ERROR ##### %s" % str
if self.f:
self.f.write("%s \n" % data)
else:
print(data)
def log(self, str):
if self.f:
self.f.write("%s \n" % str)
else:
print(str)
#
# Shell Environment
#
class Env(object):
def __init__(self, logfile, tests):
self.tests = tests
self.log = Log(logfile)
def shell(self, cmd, path=os.getcwd()):
if path:
os.chdir(path)
self.log.log("==== shell session ===========================")
self.log.log("%s> %s" % (path, cmd))
status = subprocess.call("cd %s; %s" % (path, cmd), shell=True,
stdout=self.log.f, stderr=self.log.f)
self.log.log("status = %s" % status)
self.log.log("============================================== \n\n")
return status
def GetOutput(self, cmd, path=os.getcwd()):
if path:
os.chdir(path)
self.log.log("==== shell session ===========================")
self.log.log("%s> %s" % (path, cmd))
status, out = commands.getstatusoutput(cmd)
self.log.log("status = %s" % status)
self.log.log("out = %s" % out)
self.log.log("============================================== \n\n")
return status, out
#
# Pre-commit checker
#
class PreCommitChecker(Env):
def __init__(self, args):
Env.__init__(self, args.logfile, args.tests)
self.ignore_failure = args.ignore_failure
#
# Get commands for a given job from the determinator file
#
def get_commands(self, test):
status, out = self.GetOutput(
"build_tools/rocksdb-lego-determinator %s" % test, ".")
return status, out
#
# Run a specific CI job
#
def run_test(self, test):
self.log.caption("Running test %s locally" % test)
# get commands for the CI job determinator
status, cmds = self.get_commands(test)
if status != 0:
self.log.error("Error getting commands for test %s" % test)
return False
# Parse the JSON to extract the commands to run
cmds = re.findall("'shell':'([^\']*)'", cmds)
if len(cmds) == 0:
self.log.log("No commands found")
return False
# Run commands
for cmd in cmds:
# Replace J=<..> with the local environment variable
if "J" in os.environ:
cmd = cmd.replace("J=1", "J=%s" % os.environ["J"])
cmd = cmd.replace("make ", "make -j%s " % os.environ["J"])
# Run the command
status = self.shell(cmd, ".")
if status != 0:
self.log.error("Error running command %s for test %s"
% (cmd, test))
return False
return True
#
# Run specified CI jobs
#
def run_tests(self):
if not self.tests:
self.log.error("Invalid args. Please provide tests")
return False
self.print_separator()
self.print_row("TEST", "RESULT")
self.print_separator()
result = True
for test in self.tests:
start_time = time.time()
self.print_test(test)
result = self.run_test(test)
elapsed_min = (time.time() - start_time) / 60
if not result:
self.log.error("Error running test %s" % test)
self.print_result("FAIL (%dm)" % elapsed_min)
if not self.ignore_failure:
return False
result = False
else:
self.print_result("PASS (%dm)" % elapsed_min)
self.print_separator()
return result
#
# Print a line
#
def print_separator(self):
print("".ljust(60, "-"))
#
# Print two colums
#
def print_row(self, c0, c1):
print("%s%s" % (c0.ljust(40), c1.ljust(20)))
def print_test(self, test):
print(test.ljust(40), end="")
sys.stdout.flush()
def print_result(self, result):
print(result.ljust(20))
#
# Main
#
parser = argparse.ArgumentParser(description='RocksDB pre-commit checker.')
# --log <logfile>
parser.add_argument('--logfile', default='/tmp/precommit-check.log',
help='Log file. Default is /tmp/precommit-check.log')
# --ignore_failure
parser.add_argument('--ignore_failure', action='store_true', default=False,
help='Stop when an error occurs')
# <test ....>
parser.add_argument('tests', nargs='+',
help='CI test(s) to run. e.g: unit punit asan tsan ubsan')
args = parser.parse_args()
checker = PreCommitChecker(args)
print("Please follow log %s" % checker.log.filename)
if not checker.run_tests():
print("Error running tests. Please check log file %s"
% checker.log.filename)
sys.exit(1)
sys.exit(0)
|
bsd-3-clause
|
kastriothaliti/techstitution
|
venv/lib/python3.5/site-packages/wheel/util.py
|
345
|
4890
|
"""Utility functions."""
import sys
import os
import base64
import json
import hashlib
try:
from collections import OrderedDict
except ImportError:
OrderedDict = dict
__all__ = ['urlsafe_b64encode', 'urlsafe_b64decode', 'utf8',
'to_json', 'from_json', 'matches_requirement']
def urlsafe_b64encode(data):
"""urlsafe_b64encode without padding"""
return base64.urlsafe_b64encode(data).rstrip(binary('='))
def urlsafe_b64decode(data):
"""urlsafe_b64decode without padding"""
pad = b'=' * (4 - (len(data) & 3))
return base64.urlsafe_b64decode(data + pad)
def to_json(o):
'''Convert given data to JSON.'''
return json.dumps(o, sort_keys=True)
def from_json(j):
'''Decode a JSON payload.'''
return json.loads(j)
def open_for_csv(name, mode):
if sys.version_info[0] < 3:
nl = {}
bin = 'b'
else:
nl = { 'newline': '' }
bin = ''
return open(name, mode + bin, **nl)
try:
unicode
def utf8(data):
'''Utf-8 encode data.'''
if isinstance(data, unicode):
return data.encode('utf-8')
return data
except NameError:
def utf8(data):
'''Utf-8 encode data.'''
if isinstance(data, str):
return data.encode('utf-8')
return data
try:
# For encoding ascii back and forth between bytestrings, as is repeatedly
# necessary in JSON-based crypto under Python 3
unicode
def native(s):
return s
def binary(s):
if isinstance(s, unicode):
return s.encode('ascii')
return s
except NameError:
def native(s):
if isinstance(s, bytes):
return s.decode('ascii')
return s
def binary(s):
if isinstance(s, str):
return s.encode('ascii')
class HashingFile(object):
def __init__(self, fd, hashtype='sha256'):
self.fd = fd
self.hashtype = hashtype
self.hash = hashlib.new(hashtype)
self.length = 0
def write(self, data):
self.hash.update(data)
self.length += len(data)
self.fd.write(data)
def close(self):
self.fd.close()
def digest(self):
if self.hashtype == 'md5':
return self.hash.hexdigest()
digest = self.hash.digest()
return self.hashtype + '=' + native(urlsafe_b64encode(digest))
class OrderedDefaultDict(OrderedDict):
def __init__(self, *args, **kwargs):
if not args:
self.default_factory = None
else:
if not (args[0] is None or callable(args[0])):
raise TypeError('first argument must be callable or None')
self.default_factory = args[0]
args = args[1:]
super(OrderedDefaultDict, self).__init__(*args, **kwargs)
def __missing__ (self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = default = self.default_factory()
return default
if sys.platform == 'win32':
import ctypes.wintypes
# CSIDL_APPDATA for reference - not used here for compatibility with
# dirspec, which uses LOCAL_APPDATA and COMMON_APPDATA in that order
csidl = dict(CSIDL_APPDATA=26, CSIDL_LOCAL_APPDATA=28,
CSIDL_COMMON_APPDATA=35)
def get_path(name):
SHGFP_TYPE_CURRENT = 0
buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
ctypes.windll.shell32.SHGetFolderPathW(0, csidl[name], 0, SHGFP_TYPE_CURRENT, buf)
return buf.value
def save_config_path(*resource):
appdata = get_path("CSIDL_LOCAL_APPDATA")
path = os.path.join(appdata, *resource)
if not os.path.isdir(path):
os.makedirs(path)
return path
def load_config_paths(*resource):
ids = ["CSIDL_LOCAL_APPDATA", "CSIDL_COMMON_APPDATA"]
for id in ids:
base = get_path(id)
path = os.path.join(base, *resource)
if os.path.exists(path):
yield path
else:
def save_config_path(*resource):
import xdg.BaseDirectory
return xdg.BaseDirectory.save_config_path(*resource)
def load_config_paths(*resource):
import xdg.BaseDirectory
return xdg.BaseDirectory.load_config_paths(*resource)
def matches_requirement(req, wheels):
"""List of wheels matching a requirement.
:param req: The requirement to satisfy
:param wheels: List of wheels to search.
"""
try:
from pkg_resources import Distribution, Requirement
except ImportError:
raise RuntimeError("Cannot use requirements without pkg_resources")
req = Requirement.parse(req)
selected = []
for wf in wheels:
f = wf.parsed_filename
dist = Distribution(project_name=f.group("name"), version=f.group("ver"))
if dist in req:
selected.append(wf)
return selected
|
gpl-3.0
|
kirienko/gourmet
|
src/gourmet/threadManager.py
|
1
|
15304
|
# This module is designed to handle all multi-threading processes in
# Gourmet. Separate threads are limited to doing the following things
# with respect to the GUI:
#
# 1. Start a notification dialog with a progress bar
# 2. Update the progress bar
# 3. Finish successfully
# 4. Stop with an error.
#
# If you need to get user input in the middle of your threaded process,
# you need to redesign so that it works as follows:
#
# 1. Run the first half of your process as a thread.
# 2. Upon completion of your thread, run your dialog to get your user
# input
# 3. Run the second half of your process as a thread.
#
# In this module, we define the following base classes...
#
# A singleton ThreadingManager that tracks how many threads we have
# running, and allows a maximum number of threads to be run at any
# single time.
#
# A SuspendableThread base class for creating and running threaded
# processes.
import threading
import time
import traceback
import webbrowser
from gettext import ngettext
from typing import Any
from gi.repository import GLib, GObject, Gtk, Pango
from gourmet.gtk_extras.dialog_extras import show_message
from gourmet.i18n import _
# _IdleObject etc. based on example John Stowers
# <[email protected]>
class _IdleObject(GObject.GObject):
"""
Override GObject.GObject to always emit signals in the main thread
by emitting on an idle handler
"""
def __init__(self):
GObject.GObject.__init__(self)
def emit(self, *args):
if args[0] != 'progress':
print('emit', *args)
GLib.idle_add(GObject.GObject.emit, self, *args)
class Terminated (Exception):
def __init__ (self, value):
self.value=value
def __str__(self):
return repr(self.value)
class SuspendableThread (threading.Thread, _IdleObject):
"""A class for long-running processes that shouldn't interrupt the
GUI.
"""
__gsignals__ = {
'completed' : (GObject.SignalFlags.RUN_LAST, None, []),
'progress' : (GObject.SignalFlags.RUN_LAST, None,
[GObject.TYPE_FLOAT, GObject.TYPE_STRING]), #percent complete, progress bar text
'error' : (GObject.SignalFlags.RUN_LAST, None, [GObject.TYPE_INT, # error number
GObject.TYPE_STRING, # error name
GObject.TYPE_STRING # stack trace
]),
'stopped': (GObject.SignalFlags.RUN_LAST, None, []), # emitted when we are stopped
'pause': (GObject.SignalFlags.RUN_LAST, None, []), # emitted when we pause
'resume': (GObject.SignalFlags.RUN_LAST, None, []), # emitted when we resume
'done': (GObject.SignalFlags.RUN_LAST, None, []), # emitted when/however we finish
}
def __init__(self, name=None):
self.initialized = False
#self.name = name
self.suspended = False
self.terminated = False
self.done = False
_IdleObject.__init__(self)
threading.Thread.__init__(self, name=name)
def initialize_thread (self):
self.initialized = True
self.start()
def connect_subthread (self, subthread):
'''For subthread subthread, connect to error and pause signals and
and emit as if they were our own.'''
subthread.connect('error',lambda st,enum,ename,strace: self.emit('error',enum,ename,strace))
subthread.connect('stopped',lambda st: self.emit('stopped'))
subthread.connect('pause',lambda st: self.emit('pause'))
subthread.connect('resume',lambda st: self.emit('resume'))
def run (self):
try:
self.do_run()
except Terminated:
self.emit('stopped')
except:
self.emit('error', 1, f'Error during {self.name}',
traceback.format_exc())
else:
self.emit('completed')
self.done = True
self.emit('done')
def do_run (self):
# Note that sub-classes need to call check_for_sleep
# periodically, otherwise pausing & cancelling won't work
raise NotImplementedError
def suspend (self):
self.suspended = True
def resume (self):
self.suspended = False
def terminate (self):
self.terminated = True
self.emit('stopped')
def check_for_sleep (self):
"""Check whether we have been suspended or terminated.
"""
emit_resume = False
if self.terminated:
raise Terminated('%s terminated'%self.name)
if self.suspended:
self.emit('pause')
emit_resume = True
while self.suspended:
if self.terminated:
raise Terminated('%s terminated'%self.name)
time.sleep(1)
if emit_resume:
self.emit('resume')
def __repr__ (self):
try:
return threading.Thread.__repr__(self)
except AssertionError:
return '<SuspendableThread %s - uninitialized>'%self.name
class NotThreadSafe:
"""Subclasses of this do things that are not thread safe. An error
will be raised if an object that is an instance of this class is
added to a thread manager.
"""
pass
class ThreadManager:
__single = None
@classmethod
def instance(cls):
if ThreadManager.__single is None:
ThreadManager.__single = cls()
return ThreadManager.__single
def __init__ (self, max_concurrent_threads = 2):
self.max_concurrent_threads = max_concurrent_threads
self.thread_queue = []
self.count = 0
self.active_count = 0
self.threads = []
def add_thread (self, thread):
try:
assert(isinstance(thread,SuspendableThread))
except AssertionError:
print('Class',thread,type(thread),'is not a SuspendableThread')
raise
if isinstance(thread,NotThreadSafe):
raise TypeError("Thread %s is NotThreadSafe"%thread)
self.threads.append(thread)
thread.connect('pause',self.register_thread_paused)
thread.connect('resume',self.register_thread_resume)
thread.connect('done',self.register_thread_done)
if self.active_count < self.max_concurrent_threads:
self.active_count += 1
thread.initialize_thread()
else:
self.thread_queue.append(thread)
def register_thread_done (self, thread):
if thread in self.threads:
self.threads.remove(thread)
self.active_count -= 1
self.start_queued_threads()
def register_thread_paused (self, thread):
self.active_count -= 1
self.start_queued_threads()
def register_thread_resume (self, thread):
self.active_count += 1
def resume_thread (self, thread):
if self.active_count < self.max_concurrent_threads:
thread.resume()
self.active_count += 1
else:
self.thread_queue.append(thread)
def start_queued_threads (self):
while self.active_count < self.max_concurrent_threads and self.thread_queue:
thread_to_add = self.thread_queue.pop()
self.active_count += 1
if thread_to_add.initialized:
thread_to_add.resume()
else:
thread_to_add.initialize_thread()
def get_thread_manager ():
return ThreadManager.instance()
class ThreadManagerGui:
__single = None
paused_text = ' (' + _('Paused') + ')'
PAUSE = 10
@classmethod
def instance(cls):
if ThreadManagerGui.__single is None:
ThreadManagerGui.__single = cls()
return ThreadManagerGui.__single
def __init__ (self, messagebox=None):
self.tm = get_thread_manager()
self.threads = {}
if messagebox is None:
# import done here to avoid cycling imports
from gourmet.GourmetRecipeManager import RecGui
self.messagebox = RecGui.instance().messagebox
else:
self.messagebox = messagebox
self.to_remove = [] # a list of widgets to remove when we close...
def response (self, dialog, response):
if response==Gtk.ResponseType.CLOSE:
self.close()
def importer_thread_done(self, thread):
# The following logic allows different messages to be displayed
# depending on if a recipe was actually imported or if the user
# cancelled the request.
if (len(thread.added_recs) > 0):
done_message = ngettext("Recipe successfully imported",
"Recipes successfully imported",
len(thread.added_recs))
elif (len(thread.added_recs) == 0):
done_message = _("Import Unsuccessful")
self.notification_thread_done(thread, done_message)
def notification_thread_done(self, thread, message):
infobox = Gtk.InfoBar()
infobox.set_message_type(Gtk.MessageType.INFO)
infobox.add_button(Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)
infobox.connect('response', lambda ib, response_id: ib.hide())
infobox.show_all()
self.messagebox.pack_start(infobox, True, True, 0)
label = Gtk.Label()
label.set_markup(message)
label.connect('activate-link', lambda lbl, uri: webbrowser.open(uri))
label.show()
infobox.get_content_area().add(label)
self.messagebox.show()
def register_thread_with_dialog (self, description, thread):
threadbox = Gtk.InfoBar()
threadbox.set_message_type(Gtk.MessageType.INFO)
pb = Gtk.ProgressBar()
pb.set_ellipsize(Pango.EllipsizeMode.MIDDLE)
pause_button = Gtk.ToggleButton(label=_('Pause'))
threadbox.add_action_widget(pause_button, self.PAUSE)
dlab = Gtk.Label(label=description)
dlab.set_ellipsize(Pango.EllipsizeMode.MIDDLE)
cancel_button = threadbox.add_button(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL)
vbox = Gtk.VBox()
vbox.pack_start(dlab, expand=True, fill=True, padding=0)
vbox.pack_start(pb, expand=True, fill=True, padding=0)
threadbox.get_content_area().add(vbox)
threadbox.show_all()
self.messagebox.pack_start(threadbox, True, True, 0)
self.messagebox.show()
#for b in threadbox.buttons: b.show()
thread.connect('completed',self.thread_done,threadbox)
thread.connect('error',self.thread_error,threadbox)
thread.connect('stopped',self.thread_stopped,threadbox)
thread.connect('pause',self.thread_pause,threadbox)
thread.connect('resume',self.thread_resume,threadbox)
thread.connect('progress',self.progress_update,pb)
pause_button.connect('clicked',self.pause_cb,thread)
cancel_button.connect('clicked',self.cancel_cb,thread)
def pause_cb (self, b, thread):
if b.get_active():
thread.suspend()
else:
self.tm.resume_thread(thread)
def cancel_cb (self, b, thread):
thread.terminate()
def thread_done (self, thread, threadbox):
for b in threadbox.get_action_area().get_children(): b.hide()
threadbox.add_button(Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)
threadbox.connect('response', lambda ib, response_id: ib.hide())
self.to_remove.append(threadbox)
pb = threadbox.get_content_area().get_children()[0].get_children()[1]
txt = pb.get_text()
if txt:
pb.set_text(txt + ' ('+_('Done')+')')
else:
pb.set_text('Done')
pb.set_fraction(.01)
for widget in threadbox.get_content_area().get_children()[0]:
widget.hide()
threadbox.hide()
def progress_update (self, thread, perc, txt, pb):
if perc >= 0.0:
pb.set_fraction(perc)
else:
pb.pulse()
pb.set_text(txt)
def thread_error(self, thread: Any, errno: int,
errname: str, trace: str, threadbox: Gtk.InfoBar):
threadbox.get_action_area().get_children()[1].hide() # Pause button
pb = threadbox.get_content_area().get_children()[0].get_children()[1]
pb.set_text(_('Error: %s') % errname)
b = threadbox.add_button(_('Details'), 11)
b.connect('clicked', self.show_traceback, errno, errname, trace)
b.show()
self.to_remove.append(threadbox)
def thread_stopped(self, thread: Any, threadbox: Gtk.InfoBar):
threadbox.hide()
def thread_pause (self, thread: Any, threadbox: Gtk.InfoBar):
pb = threadbox.get_content_area().get_children()[0].get_children()[1]
txt = pb.get_text()
txt += self.paused_text
pb.set_text(txt)
def thread_resume (self, thread: Any, threadbox: Gtk.InfoBar):
pb = threadbox.get_content_area().get_children()[0].get_children()[1]
txt = pb.get_text()
if txt.find(self.paused_text):
txt = txt[:-len(self.paused_text)]
pb.set_text(txt)
def show (self, *args):
self.messagebox.show()
def delete_event_cb (self, *args):
self.messagebox.hide()
return True
def close (self, *args):
while self.to_remove:
box_to_remove = self.to_remove.pop()
for w in box_to_remove.widgets:
w.hide()
self.pbtable.remove(w)
self.messagebox.hide()
def show_traceback (self, button: Gtk.Button, errno: int,
errname: str, traceback: str):
show_message(label=_('Error'),
sublabel=_('Error %s: %s') % (errno,errname),
expander=(_('Traceback'), traceback))
def get_thread_manager_gui ():
return ThreadManagerGui.instance()
if __name__ == '__main__':
from gi.repository import Gtk
class TestThread (SuspendableThread):
def do_run (self):
for n in range(1000):
time.sleep(0.01)
self.emit('progress',n/1000.0,'%s of 1000'%n)
self.check_for_sleep()
class TestError (SuspendableThread):
def do_run (self):
for n in range(1000):
time.sleep(0.01)
if n==100: raise AttributeError("This is a phony error")
self.emit('progress',n/1000.0,'%s of 1000'%n)
self.check_for_sleep()
class TestInterminable (SuspendableThread):
def do_run (self):
while 1:
time.sleep(0.1)
self.emit('progress',-1,'Working interminably')
self.check_for_sleep()
tm = get_thread_manager()
tmg = get_thread_manager_gui()
for desc,thread in [
('Interminable 1',TestInterminable()),
('Linear 1',TestThread()),
('Linear 2',TestThread()),
('Interminable 2',TestInterminable()),
('Error 3',TestError())
]:
tm.add_thread(thread)
tmg.register_thread_with_dialog(desc,thread)
def quit (*args): Gtk.main_quit()
tmg.dialog.connect('delete-event',quit)
tmg.show()
Gtk.main()
|
gpl-2.0
|
ilismal/luhnCompliance
|
luhn.py
|
1
|
1535
|
# Luhn algorithm check
# From https://en.wikipedia.org/wiki/Luhn_algorithm
def luhn_checksum(card_number):
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd_digits = digits[-1::-2]
even_digits = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
for d in even_digits:
checksum += sum(digits_of(d*2))
return checksum % 10
def is_luhn_valid(card_number):
return luhn_checksum(card_number) == 0
def readPAN():
# There's no do-while in python, lazy workaround
while True:
# Read the input
# Check that's a number with 16 digits
try:
pan=int(raw_input('PAN: '))
if (len(str(pan)) != 16):
print "PAN must be 16 chars long"
else:
break
except ValueError:
print("Not a number")
return pan
# There's no do-while in python, lazy workaround
print "Please input the first PAN in range"
firstValue = readPAN()
print "Please input the last PAN in range"
lastValue = readPAN()
# Swap variables if the first value is higher than the last
if (firstValue > lastValue):
firstValue,lastValue = lastValue,firstValue
print "Valid card numbers in range {0}/{1}".format(firstValue,lastValue)
totalValid = 0
# Check if the values in the range are luhn compliant
for ccc in range(firstValue,lastValue):
if is_luhn_valid(ccc):
print "\t" + str(ccc)
totalValid += 1
print "Total: {0} valid cards in range".format(totalValid)
|
unlicense
|
abzaloid/maps
|
django-project/lib/python2.7/site-packages/django/contrib/sessions/middleware.py
|
25
|
2648
|
import time
from importlib import import_module
from django.conf import settings
from django.utils.cache import patch_vary_headers
from django.utils.http import cookie_date
class SessionMiddleware(object):
def __init__(self):
engine = import_module(settings.SESSION_ENGINE)
self.SessionStore = engine.SessionStore
def process_request(self, request):
session_key = request.COOKIES.get(settings.SESSION_COOKIE_NAME, None)
request.session = self.SessionStore(session_key)
def process_response(self, request, response):
"""
If request.session was modified, or if the configuration is to save the
session every time, save the changes and set a session cookie or delete
the session cookie if the session has been emptied.
"""
try:
accessed = request.session.accessed
modified = request.session.modified
empty = request.session.is_empty()
except AttributeError:
pass
else:
# First check if we need to delete this cookie.
# The session should be deleted only if the session is entirely empty
if settings.SESSION_COOKIE_NAME in request.COOKIES and empty:
response.delete_cookie(settings.SESSION_COOKIE_NAME,
domain=settings.SESSION_COOKIE_DOMAIN)
else:
if accessed:
patch_vary_headers(response, ('Cookie',))
if modified or settings.SESSION_SAVE_EVERY_REQUEST:
if request.session.get_expire_at_browser_close():
max_age = None
expires = None
else:
max_age = request.session.get_expiry_age()
expires_time = time.time() + max_age
expires = cookie_date(expires_time)
# Save the session data and refresh the client cookie.
# Skip session save for 500 responses, refs #3881.
if response.status_code != 500:
request.session.save()
response.set_cookie(settings.SESSION_COOKIE_NAME,
request.session.session_key, max_age=max_age,
expires=expires, domain=settings.SESSION_COOKIE_DOMAIN,
path=settings.SESSION_COOKIE_PATH,
secure=settings.SESSION_COOKIE_SECURE or None,
httponly=settings.SESSION_COOKIE_HTTPONLY or None)
return response
|
mit
|
hjtabisola/final-project
|
appengine_config.py
|
36
|
3078
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
## Edit the code below to add you own hooks and modify tailbone's behavior
## Base Tailbone overrides and hooks
## Set the global default namespace
# def namespace_manager_default_namespace_for_request():
# return "my_custom_namespace"
## Use JSONP for all apis
# tailbone_JSONP = False
# Use CORS for all apis
tailbone_CORS = True
tailbone_CORS_RESTRICTED_DOMAINS = ["http://localhost"]
## modify the below functions to change how users are identified
# tailbone_is_current_user_admin =
# tailbone_get_current_user =
# tailbone_create_login_url =
# tailbone_create_logout_url =
## Use cloud store instead of blobstore
# tailboneFiles_CLOUDSTORE = False
## Store counts for restful models accessible in HEAD query
# tailboneRestful_METADATA = False
## If specified is a list of tailbone.restful.ScopedModel objects these will be the only ones allowed.
## This is a next level step of model restriction to your db, this replaces validation.json
# from google.appengine.ext import ndb
# from tailbone.restful import ScopedModel
# class MyModel(ScopedModel):
# stuff = ndb.IntegerProperty()
# tailboneRestful_DEFINED_MODELS = {"mymodel": MyModel}
# tailboneRestful_RESTRICT_TO_DEFINED_MODELS = False
## Protected model names gets overridden by RESTRICTED_MODELS
# tailboneRestful_PROTECTED_MODEL_NAMES = ["(?i)tailbone.*", "custom", "(?i)users"]
## Proxy can only be used for the restricted domains if specified
# tailboneProxy_RESTRICTED_DOMAINS = ["google.com"]
## Cloud store bucket to use default is your application id
# tailboneCloudstore_BUCKET = "mybucketname"
# tailboneTurn_RESTIRCTED_DOMAINS = ["localhost"]
# tailboneTurn_SECRET = "notasecret"
# tailboneMesh_ENABLE_TURN = True
# tailboneMesh_ENABLE_WEBSOCKET = True
## Seconds until room expires
# tailboneMesh_ROOM_EXPIRATION = 86400
## Protected site
# tailboneStaticProtected_PASSWORD = "mypassword"
## the base path for the protected site can change to deploy or something else defaults to app
# tailboneStaticProtected_BASE_PATH = "app"
## Custom load balanced compute engine instance
# tailboneCustomCE_STARTUP_SCRIPT = """
# apt-get install build-essential
# curl -O http://nodejs.org/dist/v0.10.15/node-v0.10.15.tar.gz
# tar xvfz node-v0.10.15.tar.gz
# cd node-v0.10.15
# ./configure
# make
# make install
# cd ..
# rm -rf node-v0.10.15
# rm -f node-v0.10.15.tar.gz
# cat >server.js <<EOL
# %s
# EOL
# npm install ws
# node server.js
# """ % (open("client/mywebsocketserver.js").read(),)
|
apache-2.0
|
Universal-Model-Converter/UMC3.0a
|
data/Python/x86/Lib/encodings/cp1026.py
|
593
|
13369
|
""" Python Character Mapping Codec cp1026 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP1026.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1026',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x9c' # 0x04 -> CONTROL
u'\t' # 0x05 -> HORIZONTAL TABULATION
u'\x86' # 0x06 -> CONTROL
u'\x7f' # 0x07 -> DELETE
u'\x97' # 0x08 -> CONTROL
u'\x8d' # 0x09 -> CONTROL
u'\x8e' # 0x0A -> CONTROL
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x9d' # 0x14 -> CONTROL
u'\x85' # 0x15 -> CONTROL
u'\x08' # 0x16 -> BACKSPACE
u'\x87' # 0x17 -> CONTROL
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x92' # 0x1A -> CONTROL
u'\x8f' # 0x1B -> CONTROL
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u'\x80' # 0x20 -> CONTROL
u'\x81' # 0x21 -> CONTROL
u'\x82' # 0x22 -> CONTROL
u'\x83' # 0x23 -> CONTROL
u'\x84' # 0x24 -> CONTROL
u'\n' # 0x25 -> LINE FEED
u'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
u'\x1b' # 0x27 -> ESCAPE
u'\x88' # 0x28 -> CONTROL
u'\x89' # 0x29 -> CONTROL
u'\x8a' # 0x2A -> CONTROL
u'\x8b' # 0x2B -> CONTROL
u'\x8c' # 0x2C -> CONTROL
u'\x05' # 0x2D -> ENQUIRY
u'\x06' # 0x2E -> ACKNOWLEDGE
u'\x07' # 0x2F -> BELL
u'\x90' # 0x30 -> CONTROL
u'\x91' # 0x31 -> CONTROL
u'\x16' # 0x32 -> SYNCHRONOUS IDLE
u'\x93' # 0x33 -> CONTROL
u'\x94' # 0x34 -> CONTROL
u'\x95' # 0x35 -> CONTROL
u'\x96' # 0x36 -> CONTROL
u'\x04' # 0x37 -> END OF TRANSMISSION
u'\x98' # 0x38 -> CONTROL
u'\x99' # 0x39 -> CONTROL
u'\x9a' # 0x3A -> CONTROL
u'\x9b' # 0x3B -> CONTROL
u'\x14' # 0x3C -> DEVICE CONTROL FOUR
u'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
u'\x9e' # 0x3E -> CONTROL
u'\x1a' # 0x3F -> SUBSTITUTE
u' ' # 0x40 -> SPACE
u'\xa0' # 0x41 -> NO-BREAK SPACE
u'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE
u'{' # 0x48 -> LEFT CURLY BRACKET
u'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE
u'\xc7' # 0x4A -> LATIN CAPITAL LETTER C WITH CEDILLA
u'.' # 0x4B -> FULL STOP
u'<' # 0x4C -> LESS-THAN SIGN
u'(' # 0x4D -> LEFT PARENTHESIS
u'+' # 0x4E -> PLUS SIGN
u'!' # 0x4F -> EXCLAMATION MARK
u'&' # 0x50 -> AMPERSAND
u'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE
u'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE
u'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN)
u'\u011e' # 0x5A -> LATIN CAPITAL LETTER G WITH BREVE
u'\u0130' # 0x5B -> LATIN CAPITAL LETTER I WITH DOT ABOVE
u'*' # 0x5C -> ASTERISK
u')' # 0x5D -> RIGHT PARENTHESIS
u';' # 0x5E -> SEMICOLON
u'^' # 0x5F -> CIRCUMFLEX ACCENT
u'-' # 0x60 -> HYPHEN-MINUS
u'/' # 0x61 -> SOLIDUS
u'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'[' # 0x68 -> LEFT SQUARE BRACKET
u'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE
u'\u015f' # 0x6A -> LATIN SMALL LETTER S WITH CEDILLA
u',' # 0x6B -> COMMA
u'%' # 0x6C -> PERCENT SIGN
u'_' # 0x6D -> LOW LINE
u'>' # 0x6E -> GREATER-THAN SIGN
u'?' # 0x6F -> QUESTION MARK
u'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE
u'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE
u'\u0131' # 0x79 -> LATIN SMALL LETTER DOTLESS I
u':' # 0x7A -> COLON
u'\xd6' # 0x7B -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\u015e' # 0x7C -> LATIN CAPITAL LETTER S WITH CEDILLA
u"'" # 0x7D -> APOSTROPHE
u'=' # 0x7E -> EQUALS SIGN
u'\xdc' # 0x7F -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE
u'a' # 0x81 -> LATIN SMALL LETTER A
u'b' # 0x82 -> LATIN SMALL LETTER B
u'c' # 0x83 -> LATIN SMALL LETTER C
u'd' # 0x84 -> LATIN SMALL LETTER D
u'e' # 0x85 -> LATIN SMALL LETTER E
u'f' # 0x86 -> LATIN SMALL LETTER F
u'g' # 0x87 -> LATIN SMALL LETTER G
u'h' # 0x88 -> LATIN SMALL LETTER H
u'i' # 0x89 -> LATIN SMALL LETTER I
u'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'}' # 0x8C -> RIGHT CURLY BRACKET
u'`' # 0x8D -> GRAVE ACCENT
u'\xa6' # 0x8E -> BROKEN BAR
u'\xb1' # 0x8F -> PLUS-MINUS SIGN
u'\xb0' # 0x90 -> DEGREE SIGN
u'j' # 0x91 -> LATIN SMALL LETTER J
u'k' # 0x92 -> LATIN SMALL LETTER K
u'l' # 0x93 -> LATIN SMALL LETTER L
u'm' # 0x94 -> LATIN SMALL LETTER M
u'n' # 0x95 -> LATIN SMALL LETTER N
u'o' # 0x96 -> LATIN SMALL LETTER O
u'p' # 0x97 -> LATIN SMALL LETTER P
u'q' # 0x98 -> LATIN SMALL LETTER Q
u'r' # 0x99 -> LATIN SMALL LETTER R
u'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR
u'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR
u'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE
u'\xb8' # 0x9D -> CEDILLA
u'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE
u'\xa4' # 0x9F -> CURRENCY SIGN
u'\xb5' # 0xA0 -> MICRO SIGN
u'\xf6' # 0xA1 -> LATIN SMALL LETTER O WITH DIAERESIS
u's' # 0xA2 -> LATIN SMALL LETTER S
u't' # 0xA3 -> LATIN SMALL LETTER T
u'u' # 0xA4 -> LATIN SMALL LETTER U
u'v' # 0xA5 -> LATIN SMALL LETTER V
u'w' # 0xA6 -> LATIN SMALL LETTER W
u'x' # 0xA7 -> LATIN SMALL LETTER X
u'y' # 0xA8 -> LATIN SMALL LETTER Y
u'z' # 0xA9 -> LATIN SMALL LETTER Z
u'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK
u'\xbf' # 0xAB -> INVERTED QUESTION MARK
u']' # 0xAC -> RIGHT SQUARE BRACKET
u'$' # 0xAD -> DOLLAR SIGN
u'@' # 0xAE -> COMMERCIAL AT
u'\xae' # 0xAF -> REGISTERED SIGN
u'\xa2' # 0xB0 -> CENT SIGN
u'\xa3' # 0xB1 -> POUND SIGN
u'\xa5' # 0xB2 -> YEN SIGN
u'\xb7' # 0xB3 -> MIDDLE DOT
u'\xa9' # 0xB4 -> COPYRIGHT SIGN
u'\xa7' # 0xB5 -> SECTION SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS
u'\xac' # 0xBA -> NOT SIGN
u'|' # 0xBB -> VERTICAL LINE
u'\xaf' # 0xBC -> MACRON
u'\xa8' # 0xBD -> DIAERESIS
u'\xb4' # 0xBE -> ACUTE ACCENT
u'\xd7' # 0xBF -> MULTIPLICATION SIGN
u'\xe7' # 0xC0 -> LATIN SMALL LETTER C WITH CEDILLA
u'A' # 0xC1 -> LATIN CAPITAL LETTER A
u'B' # 0xC2 -> LATIN CAPITAL LETTER B
u'C' # 0xC3 -> LATIN CAPITAL LETTER C
u'D' # 0xC4 -> LATIN CAPITAL LETTER D
u'E' # 0xC5 -> LATIN CAPITAL LETTER E
u'F' # 0xC6 -> LATIN CAPITAL LETTER F
u'G' # 0xC7 -> LATIN CAPITAL LETTER G
u'H' # 0xC8 -> LATIN CAPITAL LETTER H
u'I' # 0xC9 -> LATIN CAPITAL LETTER I
u'\xad' # 0xCA -> SOFT HYPHEN
u'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'~' # 0xCC -> TILDE
u'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE
u'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE
u'\u011f' # 0xD0 -> LATIN SMALL LETTER G WITH BREVE
u'J' # 0xD1 -> LATIN CAPITAL LETTER J
u'K' # 0xD2 -> LATIN CAPITAL LETTER K
u'L' # 0xD3 -> LATIN CAPITAL LETTER L
u'M' # 0xD4 -> LATIN CAPITAL LETTER M
u'N' # 0xD5 -> LATIN CAPITAL LETTER N
u'O' # 0xD6 -> LATIN CAPITAL LETTER O
u'P' # 0xD7 -> LATIN CAPITAL LETTER P
u'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
u'R' # 0xD9 -> LATIN CAPITAL LETTER R
u'\xb9' # 0xDA -> SUPERSCRIPT ONE
u'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\\' # 0xDC -> REVERSE SOLIDUS
u'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE
u'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\xfc' # 0xE0 -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xf7' # 0xE1 -> DIVISION SIGN
u'S' # 0xE2 -> LATIN CAPITAL LETTER S
u'T' # 0xE3 -> LATIN CAPITAL LETTER T
u'U' # 0xE4 -> LATIN CAPITAL LETTER U
u'V' # 0xE5 -> LATIN CAPITAL LETTER V
u'W' # 0xE6 -> LATIN CAPITAL LETTER W
u'X' # 0xE7 -> LATIN CAPITAL LETTER X
u'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
u'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
u'\xb2' # 0xEA -> SUPERSCRIPT TWO
u'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'#' # 0xEC -> NUMBER SIGN
u'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE
u'0' # 0xF0 -> DIGIT ZERO
u'1' # 0xF1 -> DIGIT ONE
u'2' # 0xF2 -> DIGIT TWO
u'3' # 0xF3 -> DIGIT THREE
u'4' # 0xF4 -> DIGIT FOUR
u'5' # 0xF5 -> DIGIT FIVE
u'6' # 0xF6 -> DIGIT SIX
u'7' # 0xF7 -> DIGIT SEVEN
u'8' # 0xF8 -> DIGIT EIGHT
u'9' # 0xF9 -> DIGIT NINE
u'\xb3' # 0xFA -> SUPERSCRIPT THREE
u'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'"' # 0xFC -> QUOTATION MARK
u'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE
u'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
mit
|
GalaxyTab4/twrp_matissevewifi
|
scripts/rt-tester/rt-tester.py
|
11005
|
5307
|
#!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
|
gpl-2.0
|
MaizerGomes/youtube-dl
|
test/test_InfoExtractor.py
|
104
|
2243
|
#!/usr/bin/env python
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL
from youtube_dl.extractor.common import InfoExtractor
from youtube_dl.extractor import YoutubeIE, get_info_extractor
class TestIE(InfoExtractor):
pass
class TestInfoExtractor(unittest.TestCase):
def setUp(self):
self.ie = TestIE(FakeYDL())
def test_ie_key(self):
self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE)
def test_html_search_regex(self):
html = '<p id="foo">Watch this <a href="http://www.youtube.com/watch?v=BaW_jenozKc">video</a></p>'
search = lambda re, *args: self.ie._html_search_regex(re, html, *args)
self.assertEqual(search(r'<p id="foo">(.+?)</p>', 'foo'), 'Watch this video')
def test_opengraph(self):
ie = self.ie
html = '''
<meta name="og:title" content='Foo'/>
<meta content="Some video's description " name="og:description"/>
<meta property='og:image' content='http://domain.com/pic.jpg?key1=val1&key2=val2'/>
'''
self.assertEqual(ie._og_search_title(html), 'Foo')
self.assertEqual(ie._og_search_description(html), 'Some video\'s description ')
self.assertEqual(ie._og_search_thumbnail(html), 'http://domain.com/pic.jpg?key1=val1&key2=val2')
def test_html_search_meta(self):
ie = self.ie
html = '''
<meta name="a" content="1" />
<meta name='b' content='2'>
<meta name="c" content='3'>
<meta name=d content='4'>
<meta property="e" content='5' >
<meta content="6" name="f">
'''
self.assertEqual(ie._html_search_meta('a', html), '1')
self.assertEqual(ie._html_search_meta('b', html), '2')
self.assertEqual(ie._html_search_meta('c', html), '3')
self.assertEqual(ie._html_search_meta('d', html), '4')
self.assertEqual(ie._html_search_meta('e', html), '5')
self.assertEqual(ie._html_search_meta('f', html), '6')
if __name__ == '__main__':
unittest.main()
|
unlicense
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-2.7.2/Lib/_LWPCookieJar.py
|
267
|
6553
|
"""Load / save to libwww-perl (LWP) format files.
Actually, the format is slightly extended from that used by LWP's
(libwww-perl's) HTTP::Cookies, to avoid losing some RFC 2965 information
not recorded by LWP.
It uses the version string "2.0", though really there isn't an LWP Cookies
2.0 format. This indicates that there is extra information in here
(domain_dot and # port_spec) while still being compatible with
libwww-perl, I hope.
"""
import time, re
from cookielib import (_warn_unhandled_exception, FileCookieJar, LoadError,
Cookie, MISSING_FILENAME_TEXT,
join_header_words, split_header_words,
iso2time, time2isoz)
def lwp_cookie_str(cookie):
"""Return string representation of Cookie in an the LWP cookie file format.
Actually, the format is extended a bit -- see module docstring.
"""
h = [(cookie.name, cookie.value),
("path", cookie.path),
("domain", cookie.domain)]
if cookie.port is not None: h.append(("port", cookie.port))
if cookie.path_specified: h.append(("path_spec", None))
if cookie.port_specified: h.append(("port_spec", None))
if cookie.domain_initial_dot: h.append(("domain_dot", None))
if cookie.secure: h.append(("secure", None))
if cookie.expires: h.append(("expires",
time2isoz(float(cookie.expires))))
if cookie.discard: h.append(("discard", None))
if cookie.comment: h.append(("comment", cookie.comment))
if cookie.comment_url: h.append(("commenturl", cookie.comment_url))
keys = cookie._rest.keys()
keys.sort()
for k in keys:
h.append((k, str(cookie._rest[k])))
h.append(("version", str(cookie.version)))
return join_header_words([h])
class LWPCookieJar(FileCookieJar):
"""
The LWPCookieJar saves a sequence of"Set-Cookie3" lines.
"Set-Cookie3" is the format used by the libwww-perl libary, not known
to be compatible with any browser, but which is easy to read and
doesn't lose information about RFC 2965 cookies.
Additional methods
as_lwp_str(ignore_discard=True, ignore_expired=True)
"""
def as_lwp_str(self, ignore_discard=True, ignore_expires=True):
"""Return cookies as a string of "\n"-separated "Set-Cookie3" headers.
ignore_discard and ignore_expires: see docstring for FileCookieJar.save
"""
now = time.time()
r = []
for cookie in self:
if not ignore_discard and cookie.discard:
continue
if not ignore_expires and cookie.is_expired(now):
continue
r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie))
return "\n".join(r+[""])
def save(self, filename=None, ignore_discard=False, ignore_expires=False):
if filename is None:
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
f = open(filename, "w")
try:
# There really isn't an LWP Cookies 2.0 format, but this indicates
# that there is extra information in here (domain_dot and
# port_spec) while still being compatible with libwww-perl, I hope.
f.write("#LWP-Cookies-2.0\n")
f.write(self.as_lwp_str(ignore_discard, ignore_expires))
finally:
f.close()
def _really_load(self, f, filename, ignore_discard, ignore_expires):
magic = f.readline()
if not re.search(self.magic_re, magic):
msg = ("%r does not look like a Set-Cookie3 (LWP) format "
"file" % filename)
raise LoadError(msg)
now = time.time()
header = "Set-Cookie3:"
boolean_attrs = ("port_spec", "path_spec", "domain_dot",
"secure", "discard")
value_attrs = ("version",
"port", "path", "domain",
"expires",
"comment", "commenturl")
try:
while 1:
line = f.readline()
if line == "": break
if not line.startswith(header):
continue
line = line[len(header):].strip()
for data in split_header_words([line]):
name, value = data[0]
standard = {}
rest = {}
for k in boolean_attrs:
standard[k] = False
for k, v in data[1:]:
if k is not None:
lc = k.lower()
else:
lc = None
# don't lose case distinction for unknown fields
if (lc in value_attrs) or (lc in boolean_attrs):
k = lc
if k in boolean_attrs:
if v is None: v = True
standard[k] = v
elif k in value_attrs:
standard[k] = v
else:
rest[k] = v
h = standard.get
expires = h("expires")
discard = h("discard")
if expires is not None:
expires = iso2time(expires)
if expires is None:
discard = True
domain = h("domain")
domain_specified = domain.startswith(".")
c = Cookie(h("version"), name, value,
h("port"), h("port_spec"),
domain, domain_specified, h("domain_dot"),
h("path"), h("path_spec"),
h("secure"),
expires,
discard,
h("comment"),
h("commenturl"),
rest)
if not ignore_discard and c.discard:
continue
if not ignore_expires and c.is_expired(now):
continue
self.set_cookie(c)
except IOError:
raise
except Exception:
_warn_unhandled_exception()
raise LoadError("invalid Set-Cookie3 format file %r: %r" %
(filename, line))
|
mit
|
sdklite/gyp
|
test/win/win-tool/gyptest-win-tool-handles-readonly-files.py
|
164
|
1699
|
#!/usr/bin/env python
# Copyright (c) 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure overwriting read-only files works as expected (via win-tool).
"""
import TestGyp
import filecmp
import os
import stat
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['ninja'])
# First, create the source files.
os.makedirs('subdir')
read_only_files = ['read-only-file', 'subdir/A', 'subdir/B', 'subdir/C']
for f in read_only_files:
test.write(f, 'source_contents')
test.chmod(f, stat.S_IREAD)
if os.access(f, os.W_OK):
test.fail_test()
# Second, create the read-only destination files. Note that we are creating
# them where the ninja and win-tool will try to copy them to, in order to test
# that copies overwrite the files.
os.makedirs(test.built_file_path('dest/subdir'))
for f in read_only_files:
f = os.path.join('dest', f)
test.write(test.built_file_path(f), 'SHOULD BE OVERWRITTEN')
test.chmod(test.built_file_path(f), stat.S_IREAD)
# Ensure not writable.
if os.access(test.built_file_path(f), os.W_OK):
test.fail_test()
test.run_gyp('copies_readonly_files.gyp')
test.build('copies_readonly_files.gyp')
# Check the destination files were overwritten by ninja.
for f in read_only_files:
f = os.path.join('dest', f)
test.must_contain(test.built_file_path(f), 'source_contents')
# This will fail if the files are not the same mode or contents.
for f in read_only_files:
if not filecmp.cmp(f, test.built_file_path(os.path.join('dest', f))):
test.fail_test()
test.pass_test()
|
bsd-3-clause
|
jeandet/meson
|
mesonbuild/modules/gnome.py
|
1
|
77105
|
# Copyright 2015-2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''This module provides helper functions for Gnome/GLib related
functionality such as gobject-introspection, gresources and gtk-doc'''
import os
import copy
import subprocess
from .. import build
from .. import mlog
from .. import mesonlib
from .. import compilers
from .. import interpreter
from . import GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget
from . import get_include_args
from . import ExtensionModule
from . import ModuleReturnValue
from ..mesonlib import MesonException, OrderedSet, Popen_safe, extract_as_list
from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs
# gresource compilation is broken due to the way
# the resource compiler and Ninja clash about it
#
# https://github.com/ninja-build/ninja/issues/1184
# https://bugzilla.gnome.org/show_bug.cgi?id=774368
gresource_dep_needed_version = '>= 2.51.1'
native_glib_version = None
girwarning_printed = False
gdbuswarning_printed = False
gresource_warning_printed = False
_gir_has_extra_lib_arg = None
def gir_has_extra_lib_arg(intr_obj):
global _gir_has_extra_lib_arg
if _gir_has_extra_lib_arg is not None:
return _gir_has_extra_lib_arg
_gir_has_extra_lib_arg = False
try:
g_ir_scanner = intr_obj.find_program_impl('g-ir-scanner').get_command()
opts = Popen_safe(g_ir_scanner + ['--help'], stderr=subprocess.STDOUT)[1]
_gir_has_extra_lib_arg = '--extra-library' in opts
except (MesonException, FileNotFoundError, subprocess.CalledProcessError):
pass
return _gir_has_extra_lib_arg
class GnomeModule(ExtensionModule):
gir_dep = None
@staticmethod
def _get_native_glib_version(state):
global native_glib_version
if native_glib_version is None:
glib_dep = PkgConfigDependency('glib-2.0', state.environment,
{'native': True, 'required': False})
if glib_dep.found():
native_glib_version = glib_dep.get_version()
else:
mlog.warning('Could not detect glib version, assuming 2.54. '
'You may get build errors if your glib is older.')
native_glib_version = '2.54'
return native_glib_version
def __print_gresources_warning(self, state):
global gresource_warning_printed
if not gresource_warning_printed:
if not mesonlib.version_compare(self._get_native_glib_version(state), gresource_dep_needed_version):
mlog.warning('GLib compiled dependencies do not work reliably with \n'
'the current version of GLib. See the following upstream issue:',
mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368'))
gresource_warning_printed = True
return []
@staticmethod
def _print_gdbus_warning():
global gdbuswarning_printed
if not gdbuswarning_printed:
mlog.warning('Code generated with gdbus_codegen() requires the root directory be added to\n'
' include_directories of targets with GLib < 2.51.3:',
mlog.bold('https://github.com/mesonbuild/meson/issues/1387'))
gdbuswarning_printed = True
@FeatureNewKwargs('gnome.compile_resources', '0.37.0', ['gresource_bundle', 'export', 'install_header'])
@permittedKwargs({'source_dir', 'c_name', 'dependencies', 'export', 'gresource_bundle', 'install_header',
'install', 'install_dir', 'extra_args', 'build_by_default'})
def compile_resources(self, state, args, kwargs):
self.__print_gresources_warning(state)
glib_version = self._get_native_glib_version(state)
cmd = ['glib-compile-resources', '@INPUT@']
source_dirs, dependencies = mesonlib.extract_as_list(kwargs, 'source_dir', 'dependencies', pop=True)
if len(args) < 2:
raise MesonException('Not enough arguments; the name of the resource '
'and the path to the XML file are required')
# Validate dependencies
for (ii, dep) in enumerate(dependencies):
if hasattr(dep, 'held_object'):
dependencies[ii] = dep = dep.held_object
if not isinstance(dep, (mesonlib.File, build.CustomTarget, build.CustomTargetIndex)):
m = 'Unexpected dependency type {!r} for gnome.compile_resources() ' \
'"dependencies" argument.\nPlease pass the return value of ' \
'custom_target() or configure_file()'
raise MesonException(m.format(dep))
if isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
m = 'The "dependencies" argument of gnome.compile_resources() can not\n' \
'be used with the current version of glib-compile-resources due to\n' \
'<https://bugzilla.gnome.org/show_bug.cgi?id=774368>'
raise MesonException(m)
ifile = args[1]
if isinstance(ifile, mesonlib.File):
# glib-compile-resources will be run inside the source dir,
# so we need either 'src_to_build' or the absolute path.
# Absolute path is the easiest choice.
if ifile.is_built:
ifile = os.path.join(state.environment.get_build_dir(), ifile.subdir, ifile.fname)
else:
ifile = os.path.join(ifile.subdir, ifile.fname)
elif isinstance(ifile, str):
ifile = os.path.join(state.subdir, ifile)
elif isinstance(ifile, (interpreter.CustomTargetHolder,
interpreter.CustomTargetIndexHolder,
interpreter.GeneratedObjectsHolder)):
m = 'Resource xml files generated at build-time cannot be used ' \
'with gnome.compile_resources() because we need to scan ' \
'the xml for dependencies. Use configure_file() instead ' \
'to generate it at configure-time.'
raise MesonException(m)
else:
raise MesonException('Invalid file argument: {!r}'.format(ifile))
depend_files, depends, subdirs = self._get_gresource_dependencies(
state, ifile, source_dirs, dependencies)
# Make source dirs relative to build dir now
source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs]
# Always include current directory, but after paths set by user
source_dirs.append(os.path.join(state.build_to_src, state.subdir))
# Ensure build directories of generated deps are included
source_dirs += subdirs
for source_dir in OrderedSet(source_dirs):
cmd += ['--sourcedir', source_dir]
if 'c_name' in kwargs:
cmd += ['--c-name', kwargs.pop('c_name')]
export = kwargs.pop('export', False)
if not export:
cmd += ['--internal']
cmd += ['--generate', '--target', '@OUTPUT@']
cmd += mesonlib.stringlistify(kwargs.pop('extra_args', []))
gresource = kwargs.pop('gresource_bundle', False)
if gresource:
output = args[0] + '.gresource'
name = args[0] + '_gresource'
else:
output = args[0] + '.c'
name = args[0] + '_c'
if kwargs.get('install', False) and not gresource:
raise MesonException('The install kwarg only applies to gresource bundles, see install_header')
install_header = kwargs.pop('install_header', False)
if install_header and gresource:
raise MesonException('The install_header kwarg does not apply to gresource bundles')
if install_header and not export:
raise MesonException('GResource header is installed yet export is not enabled')
kwargs['input'] = args[1]
kwargs['output'] = output
kwargs['depends'] = depends
if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
# This will eventually go out of sync if dependencies are added
kwargs['depend_files'] = depend_files
kwargs['command'] = cmd
else:
depfile = kwargs['output'] + '.d'
kwargs['depfile'] = depfile
kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
target_c = GResourceTarget(name, state.subdir, state.subproject, kwargs)
if gresource: # Only one target for .gresource files
return ModuleReturnValue(target_c, [target_c])
h_kwargs = {
'command': cmd,
'input': args[1],
'output': args[0] + '.h',
# The header doesn't actually care about the files yet it errors if missing
'depends': depends
}
if 'build_by_default' in kwargs:
h_kwargs['build_by_default'] = kwargs['build_by_default']
if install_header:
h_kwargs['install'] = install_header
h_kwargs['install_dir'] = kwargs.get('install_dir',
state.environment.coredata.get_builtin_option('includedir'))
target_h = GResourceHeaderTarget(args[0] + '_h', state.subdir, state.subproject, h_kwargs)
rv = [target_c, target_h]
return ModuleReturnValue(rv, rv)
def _get_gresource_dependencies(self, state, input_file, source_dirs, dependencies):
cmd = ['glib-compile-resources',
input_file,
'--generate-dependencies']
# Prefer generated files over source files
cmd += ['--sourcedir', state.subdir] # Current build dir
for source_dir in source_dirs:
cmd += ['--sourcedir', os.path.join(state.subdir, source_dir)]
pc, stdout, stderr = Popen_safe(cmd, cwd=state.environment.get_source_dir())
if pc.returncode != 0:
m = 'glib-compile-resources failed to get dependencies for {}:\n{}'
mlog.warning(m.format(cmd[1], stderr))
raise subprocess.CalledProcessError(pc.returncode, cmd)
dep_files = stdout.split('\n')[:-1]
depends = []
subdirs = []
for resfile in dep_files[:]:
resbasename = os.path.basename(resfile)
for dep in dependencies:
if hasattr(dep, 'held_object'):
dep = dep.held_object
if isinstance(dep, mesonlib.File):
if dep.fname != resbasename:
continue
dep_files.remove(resfile)
dep_files.append(dep)
subdirs.append(dep.subdir)
break
elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
fname = None
outputs = {(o, os.path.basename(o)) for o in dep.get_outputs()}
for o, baseo in outputs:
if baseo == resbasename:
fname = o
break
if fname is not None:
dep_files.remove(resfile)
depends.append(dep)
subdirs.append(dep.get_subdir())
break
else:
# In generate-dependencies mode, glib-compile-resources doesn't raise
# an error for missing resources but instead prints whatever filename
# was listed in the input file. That's good because it means we can
# handle resource files that get generated as part of the build, as
# follows.
#
# If there are multiple generated resource files with the same basename
# then this code will get confused.
try:
f = mesonlib.File.from_source_file(state.environment.get_source_dir(),
".", resfile)
except MesonException:
raise MesonException(
'Resource "%s" listed in "%s" was not found. If this is a '
'generated file, pass the target that generates it to '
'gnome.compile_resources() using the "dependencies" '
'keyword argument.' % (resfile, input_file))
dep_files.remove(resfile)
dep_files.append(f)
return dep_files, depends, subdirs
def _get_link_args(self, state, lib, depends, include_rpath=False,
use_gir_args=False):
link_command = []
# Construct link args
if isinstance(lib, build.SharedLibrary):
libdir = os.path.join(state.environment.get_build_dir(), state.backend.get_target_dir(lib))
link_command.append('-L' + libdir)
# Needed for the following binutils bug:
# https://github.com/mesonbuild/meson/issues/1911
# However, g-ir-scanner does not understand -Wl,-rpath
# so we need to use -L instead
for d in state.backend.determine_rpath_dirs(lib):
d = os.path.join(state.environment.get_build_dir(), d)
link_command.append('-L' + d)
if include_rpath:
link_command.append('-Wl,-rpath,' + d)
if include_rpath:
link_command.append('-Wl,-rpath,' + libdir)
depends.append(lib)
if gir_has_extra_lib_arg(self.interpreter) and use_gir_args:
link_command.append('--extra-library=' + lib.name)
else:
link_command.append('-l' + lib.name)
return link_command
def _get_dependencies_flags(self, deps, state, depends, include_rpath=False,
use_gir_args=False, separate_nodedup=False):
cflags = OrderedSet()
internal_ldflags = OrderedSet()
external_ldflags = OrderedSet()
# External linker flags that can't be de-duped reliably because they
# require two args in order, such as -framework AVFoundation
external_ldflags_nodedup = []
gi_includes = OrderedSet()
deps = mesonlib.listify(deps, unholder=True)
for dep in deps:
if isinstance(dep, InternalDependency):
cflags.update(get_include_args(dep.include_directories))
for lib in dep.libraries:
if hasattr(lib, 'held_object'):
lib = lib.held_object
internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath))
libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath,
use_gir_args, True)
cflags.update(libdepflags[0])
internal_ldflags.update(libdepflags[1])
external_ldflags.update(libdepflags[2])
external_ldflags_nodedup += libdepflags[3]
gi_includes.update(libdepflags[4])
extdepflags = self._get_dependencies_flags(dep.ext_deps, state, depends, include_rpath,
use_gir_args, True)
cflags.update(extdepflags[0])
internal_ldflags.update(extdepflags[1])
external_ldflags.update(extdepflags[2])
external_ldflags_nodedup += extdepflags[3]
gi_includes.update(extdepflags[4])
for source in dep.sources:
if hasattr(source, 'held_object'):
source = source.held_object
if isinstance(source, GirTarget):
gi_includes.update([os.path.join(state.environment.get_build_dir(),
source.get_subdir())])
# This should be any dependency other than an internal one.
elif isinstance(dep, Dependency):
cflags.update(dep.get_compile_args())
ldflags = iter(dep.get_link_args(raw=True))
for lib in ldflags:
if (os.path.isabs(lib) and
# For PkgConfigDependency only:
getattr(dep, 'is_libtool', False)):
lib_dir = os.path.dirname(lib)
external_ldflags.update(["-L%s" % lib_dir])
if include_rpath:
external_ldflags.update(['-Wl,-rpath {}'.format(lib_dir)])
libname = os.path.basename(lib)
if libname.startswith("lib"):
libname = libname[3:]
libname = libname.split(".so")[0]
lib = "-l%s" % libname
# FIXME: Hack to avoid passing some compiler options in
if lib.startswith("-W"):
continue
# If it's a framework arg, slurp the framework name too
# to preserve the order of arguments
if lib == '-framework':
external_ldflags_nodedup += [lib, next(ldflags)]
else:
external_ldflags.update([lib])
if isinstance(dep, PkgConfigDependency):
girdir = dep.get_pkgconfig_variable("girdir", {'default': ''})
if girdir:
gi_includes.update([girdir])
elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
cflags.update(get_include_args(dep.get_include_dirs()))
depends.append(dep)
else:
mlog.log('dependency {!r} not handled to build gir files'.format(dep))
continue
if gir_has_extra_lib_arg(self.interpreter) and use_gir_args:
def fix_ldflags(ldflags):
fixed_ldflags = OrderedSet()
for ldflag in ldflags:
if ldflag.startswith("-l"):
ldflag = ldflag.replace('-l', '--extra-library=', 1)
fixed_ldflags.add(ldflag)
return fixed_ldflags
internal_ldflags = fix_ldflags(internal_ldflags)
external_ldflags = fix_ldflags(external_ldflags)
if not separate_nodedup:
external_ldflags.update(external_ldflags_nodedup)
return cflags, internal_ldflags, external_ldflags, gi_includes
else:
return cflags, internal_ldflags, external_ldflags, external_ldflags_nodedup, gi_includes
def _unwrap_gir_target(self, girtarget):
while hasattr(girtarget, 'held_object'):
girtarget = girtarget.held_object
if not isinstance(girtarget, (build.Executable, build.SharedLibrary)):
raise MesonException('Gir target must be an executable or shared library')
return girtarget
def _get_gir_dep(self, state):
try:
gir_dep = self.gir_dep or PkgConfigDependency('gobject-introspection-1.0',
state.environment,
{'native': True})
pkgargs = gir_dep.get_compile_args()
except Exception:
raise MesonException('gobject-introspection dependency was not found, gir cannot be generated.')
return gir_dep, pkgargs
def _scan_header(self, kwargs):
ret = []
header = kwargs.pop('header', None)
if header:
if not isinstance(header, str):
raise MesonException('header must be a string')
ret = ['--c-include=' + header]
return ret
def _scan_extra_args(self, kwargs):
return mesonlib.stringlistify(kwargs.pop('extra_args', []))
def _scan_link_withs(self, state, depends, kwargs):
ret = []
if 'link_with' in kwargs:
link_with = mesonlib.extract_as_list(kwargs, 'link_with', pop = True)
for link in link_with:
ret += self._get_link_args(state, link.held_object, depends,
use_gir_args=True)
return ret
# May mutate depends and gir_inc_dirs
def _scan_include(self, state, depends, gir_inc_dirs, kwargs):
ret = []
if 'includes' in kwargs:
includes = mesonlib.extract_as_list(kwargs, 'includes', pop = True)
for inc in includes:
if hasattr(inc, 'held_object'):
inc = inc.held_object
if isinstance(inc, str):
ret += ['--include=%s' % (inc, )]
elif isinstance(inc, GirTarget):
gir_inc_dirs += [
os.path.join(state.environment.get_build_dir(),
inc.get_subdir()),
]
ret += [
"--include-uninstalled=%s" % (os.path.join(inc.get_subdir(), inc.get_basename()), )
]
depends += [inc]
else:
raise MesonException(
'Gir includes must be str, GirTarget, or list of them')
return ret
def _scan_symbol_prefix(self, kwargs):
ret = []
if 'symbol_prefix' in kwargs:
sym_prefixes = mesonlib.stringlistify(kwargs.pop('symbol_prefix', []))
ret += ['--symbol-prefix=%s' % sym_prefix for sym_prefix in sym_prefixes]
return ret
def _scan_identifier_prefix(self, kwargs):
ret = []
if 'identifier_prefix' in kwargs:
identifier_prefix = kwargs.pop('identifier_prefix')
if not isinstance(identifier_prefix, str):
raise MesonException('Gir identifier prefix must be str')
ret += ['--identifier-prefix=%s' % identifier_prefix]
return ret
def _scan_export_packages(self, kwargs):
ret = []
if 'export_packages' in kwargs:
pkgs = kwargs.pop('export_packages')
if isinstance(pkgs, str):
ret += ['--pkg-export=%s' % pkgs]
elif isinstance(pkgs, list):
ret += ['--pkg-export=%s' % pkg for pkg in pkgs]
else:
raise MesonException('Gir export packages must be str or list')
return ret
def _scan_inc_dirs(self, kwargs):
ret = mesonlib.extract_as_list(kwargs, 'include_directories', pop = True)
for incd in ret:
if not isinstance(incd.held_object, (str, build.IncludeDirs)):
raise MesonException(
'Gir include dirs should be include_directories().')
return ret
def _scan_langs(self, state, langs):
ret = []
for lang in langs:
for link_arg in state.environment.coredata.get_external_link_args(lang):
if link_arg.startswith('-L'):
ret.append(link_arg)
return ret
def _scan_gir_targets(self, state, girtargets):
ret = []
for girtarget in girtargets:
if isinstance(girtarget, build.Executable):
ret += ['--program', girtarget]
elif isinstance(girtarget, build.SharedLibrary):
libname = girtarget.get_basename()
# Needed for the following binutils bug:
# https://github.com/mesonbuild/meson/issues/1911
# However, g-ir-scanner does not understand -Wl,-rpath
# so we need to use -L instead
for d in state.backend.determine_rpath_dirs(girtarget):
d = os.path.join(state.environment.get_build_dir(), d)
ret.append('-L' + d)
ret += ['--library', libname]
# need to put our output directory first as we need to use the
# generated libraries instead of any possibly installed system/prefix
# ones.
ret += ["-L@PRIVATE_OUTDIR_ABS_%s@" % girtarget.get_id()]
return ret
def _get_girtargets_langs_compilers(self, girtargets):
ret = []
for girtarget in girtargets:
for lang, compiler in girtarget.compilers.items():
# XXX: Can you use g-i with any other language?
if lang in ('c', 'cpp', 'objc', 'objcpp', 'd'):
ret.append((lang, compiler))
break
return ret
def _get_gir_targets_deps(self, girtargets):
ret = []
for girtarget in girtargets:
ret += girtarget.get_all_link_deps()
ret += girtarget.get_external_deps()
return ret
def _get_gir_targets_inc_dirs(self, girtargets):
ret = []
for girtarget in girtargets:
ret += girtarget.get_include_dirs()
return ret
def _get_langs_compilers_flags(self, state, langs_compilers):
cflags = []
internal_ldflags = []
external_ldflags = []
for lang, compiler in langs_compilers:
if state.global_args.get(lang):
cflags += state.global_args[lang]
if state.project_args.get(lang):
cflags += state.project_args[lang]
if 'b_sanitize' in compiler.base_options:
sanitize = state.environment.coredata.base_options['b_sanitize'].value
cflags += compilers.sanitizer_compile_args(sanitize)
if 'address' in sanitize.split(','):
internal_ldflags += ['-lasan'] # This must be first in ldflags
# FIXME: Linking directly to libasan is not recommended but g-ir-scanner
# does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892
# ldflags += compilers.sanitizer_link_args(sanitize)
return cflags, internal_ldflags, external_ldflags
def _make_gir_filelist(self, state, srcdir, ns, nsversion, girtargets, libsources):
gir_filelist_dir = state.backend.get_target_private_dir_abs(girtargets[0])
if not os.path.isdir(gir_filelist_dir):
os.mkdir(gir_filelist_dir)
gir_filelist_filename = os.path.join(gir_filelist_dir, '%s_%s_gir_filelist' % (ns, nsversion))
with open(gir_filelist_filename, 'w', encoding='utf-8') as gir_filelist:
for s in libsources:
if hasattr(s, 'held_object'):
s = s.held_object
if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
for custom_output in s.get_outputs():
gir_filelist.write(os.path.join(state.environment.get_build_dir(),
state.backend.get_target_dir(s),
custom_output) + '\n')
elif isinstance(s, mesonlib.File):
gir_filelist.write(s.rel_to_builddir(state.build_to_src) + '\n')
elif isinstance(s, build.GeneratedList):
for gen_src in s.get_outputs():
gir_filelist.write(os.path.join(srcdir, gen_src) + '\n')
else:
gir_filelist.write(os.path.join(srcdir, s) + '\n')
return gir_filelist_filename
def _make_gir_target(self, state, girfile, scan_command, depends, kwargs):
scankwargs = {'output': girfile,
'command': scan_command,
'depends': depends}
if 'install' in kwargs:
scankwargs['install'] = kwargs['install']
scankwargs['install_dir'] = kwargs.get('install_dir_gir',
os.path.join(state.environment.get_datadir(), 'gir-1.0'))
if 'build_by_default' in kwargs:
scankwargs['build_by_default'] = kwargs['build_by_default']
return GirTarget(girfile, state.subdir, state.subproject, scankwargs)
def _make_typelib_target(self, state, typelib_output, typelib_cmd, kwargs):
typelib_kwargs = {
'output': typelib_output,
'command': typelib_cmd,
}
if 'install' in kwargs:
typelib_kwargs['install'] = kwargs['install']
typelib_kwargs['install_dir'] = kwargs.get('install_dir_typelib',
os.path.join(state.environment.get_libdir(), 'girepository-1.0'))
if 'build_by_default' in kwargs:
typelib_kwargs['build_by_default'] = kwargs['build_by_default']
return TypelibTarget(typelib_output, state.subdir, state.subproject, typelib_kwargs)
# May mutate depends
def _gather_typelib_includes_and_update_depends(self, state, deps, depends):
# Need to recursively add deps on GirTarget sources from our
# dependencies and also find the include directories needed for the
# typelib generation custom target below.
typelib_includes = []
for dep in deps:
if hasattr(dep, 'held_object'):
dep = dep.held_object
# Add a dependency on each GirTarget listed in dependencies and add
# the directory where it will be generated to the typelib includes
if isinstance(dep, InternalDependency):
for source in dep.sources:
if hasattr(source, 'held_object'):
source = source.held_object
if isinstance(source, GirTarget) and source not in depends:
depends.append(source)
subdir = os.path.join(state.environment.get_build_dir(),
source.get_subdir())
if subdir not in typelib_includes:
typelib_includes.append(subdir)
# Do the same, but for dependencies of dependencies. These are
# stored in the list of generated sources for each link dep (from
# girtarget.get_all_link_deps() above).
# FIXME: Store this in the original form from declare_dependency()
# so it can be used here directly.
elif isinstance(dep, build.SharedLibrary):
for source in dep.generated:
if isinstance(source, GirTarget):
subdir = os.path.join(state.environment.get_build_dir(),
source.get_subdir())
if subdir not in typelib_includes:
typelib_includes.append(subdir)
elif isinstance(dep, PkgConfigDependency):
girdir = dep.get_pkgconfig_variable("girdir", {'default': ''})
if girdir and girdir not in typelib_includes:
typelib_includes.append(girdir)
return typelib_includes
def _get_external_args_for_langs(self, state, langs):
ret = []
for lang in langs:
ret += state.environment.coredata.get_external_args(lang)
return ret
@staticmethod
def _get_scanner_cflags(cflags):
'g-ir-scanner only accepts -I/-D/-U; must ignore all other flags'
for f in cflags:
if f.startswith(('-D', '-U', '-I')):
yield f
@staticmethod
def _get_scanner_ldflags(ldflags):
'g-ir-scanner only accepts -L/-l; must ignore -F and other linker flags'
for f in ldflags:
if f.startswith(('-L', '-l', '--extra-library')):
yield f
@FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
@permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix',
'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories',
'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args',
'packages', 'header', 'build_by_default'})
def generate_gir(self, state, args, kwargs):
if not args:
raise MesonException('generate_gir takes at least one argument')
if kwargs.get('install_dir'):
raise MesonException('install_dir is not supported with generate_gir(), see "install_dir_gir" and "install_dir_typelib"')
giscanner = self.interpreter.find_program_impl('g-ir-scanner')
gicompiler = self.interpreter.find_program_impl('g-ir-compiler')
girtargets = [self._unwrap_gir_target(arg) for arg in args]
if len(girtargets) > 1 and any([isinstance(el, build.Executable) for el in girtargets]):
raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable')
self.gir_dep, pkgargs = self._get_gir_dep(state)
ns = kwargs.pop('namespace')
nsversion = kwargs.pop('nsversion')
libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True)
girfile = '%s-%s.gir' % (ns, nsversion)
srcdir = os.path.join(state.environment.get_source_dir(), state.subdir)
builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
depends = [] + girtargets
gir_inc_dirs = []
langs_compilers = self._get_girtargets_langs_compilers(girtargets)
cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers)
deps = self._get_gir_targets_deps(girtargets)
deps += extract_as_list(kwargs, 'dependencies', pop=True, unholder=True)
typelib_includes = self._gather_typelib_includes_and_update_depends(state, deps, depends)
# ldflags will be misinterpreted by gir scanner (showing
# spurious dependencies) but building GStreamer fails if they
# are not used here.
dep_cflags, dep_internal_ldflags, dep_external_ldflags, gi_includes = \
self._get_dependencies_flags(deps, state, depends, use_gir_args=True)
cflags += list(self._get_scanner_cflags(dep_cflags))
cflags += list(self._get_scanner_cflags(self._get_external_args_for_langs(state, [lc[0] for lc in langs_compilers])))
internal_ldflags += list(self._get_scanner_ldflags(dep_internal_ldflags))
external_ldflags += list(self._get_scanner_ldflags(dep_external_ldflags))
girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets)
inc_dirs = self._scan_inc_dirs(kwargs)
scan_command = [giscanner]
scan_command += pkgargs
scan_command += ['--no-libtool']
scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion]
scan_command += ['--warn-all']
scan_command += ['--output', '@OUTPUT@']
scan_command += self._scan_header(kwargs)
scan_command += self._scan_extra_args(kwargs)
scan_command += ['-I' + srcdir, '-I' + builddir]
scan_command += get_include_args(girtargets_inc_dirs)
scan_command += ['--filelist=' + self._make_gir_filelist(state, srcdir, ns, nsversion, girtargets, libsources)]
scan_command += self._scan_link_withs(state, depends, kwargs)
scan_command += self._scan_include(state, depends, gir_inc_dirs, kwargs)
scan_command += self._scan_symbol_prefix(kwargs)
scan_command += self._scan_identifier_prefix(kwargs)
scan_command += self._scan_export_packages(kwargs)
scan_command += ['--cflags-begin']
scan_command += cflags
scan_command += ['--cflags-end']
scan_command += get_include_args(inc_dirs)
scan_command += get_include_args(list(gi_includes) + gir_inc_dirs + inc_dirs, prefix='--add-include-path=')
scan_command += list(internal_ldflags)
scan_command += self._scan_gir_targets(state, girtargets)
scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers])
scan_command += list(external_ldflags)
scan_target = self._make_gir_target(state, girfile, scan_command, depends, kwargs)
typelib_output = '%s-%s.typelib' % (ns, nsversion)
typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@']
typelib_cmd += get_include_args(gir_inc_dirs, prefix='--includedir=')
for incdir in typelib_includes:
typelib_cmd += ["--includedir=" + incdir]
typelib_target = self._make_typelib_target(state, typelib_output, typelib_cmd, kwargs)
rv = [scan_target, typelib_target]
return ModuleReturnValue(rv, rv)
@FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
@permittedKwargs({'build_by_default', 'depend_files'})
def compile_schemas(self, state, args, kwargs):
if args:
raise MesonException('Compile_schemas does not take positional arguments.')
srcdir = os.path.join(state.build_to_src, state.subdir)
outdir = state.subdir
cmd = [self.interpreter.find_program_impl('glib-compile-schemas')]
cmd += ['--targetdir', outdir, srcdir]
kwargs['command'] = cmd
kwargs['input'] = []
kwargs['output'] = 'gschemas.compiled'
if state.subdir == '':
targetname = 'gsettings-compile'
else:
targetname = 'gsettings-compile-' + state.subdir.replace('/', '_')
target_g = build.CustomTarget(targetname, state.subdir, state.subproject, kwargs)
return ModuleReturnValue(target_g, [target_g])
@permittedKwargs({'sources', 'media', 'symlink_media', 'languages'})
def yelp(self, state, args, kwargs):
if len(args) < 1:
raise MesonException('Yelp requires a project id')
project_id = args[0]
sources = mesonlib.stringlistify(kwargs.pop('sources', []))
if not sources:
if len(args) > 1:
sources = mesonlib.stringlistify(args[1:])
if not sources:
raise MesonException('Yelp requires a list of sources')
source_str = '@@'.join(sources)
langs = mesonlib.stringlistify(kwargs.pop('languages', []))
if langs:
mlog.deprecation('''The "languages" argument of gnome.yelp() is deprecated.
Use a LINGUAS file in the sources directory instead.
This will become a hard error in the future.''')
media = mesonlib.stringlistify(kwargs.pop('media', []))
symlinks = kwargs.pop('symlink_media', True)
if not isinstance(symlinks, bool):
raise MesonException('symlink_media must be a boolean')
if kwargs:
raise MesonException('Unknown arguments passed: {}'.format(', '.join(kwargs.keys())))
script = state.environment.get_build_command()
args = ['--internal',
'yelphelper',
'install',
'--subdir=' + state.subdir,
'--id=' + project_id,
'--installdir=' + os.path.join(state.environment.get_datadir(), 'help'),
'--sources=' + source_str]
if symlinks:
args.append('--symlinks=true')
if media:
args.append('--media=' + '@@'.join(media))
if langs:
args.append('--langs=' + '@@'.join(langs))
inscript = build.RunScript(script, args)
potargs = state.environment.get_build_command() + [
'--internal', 'yelphelper', 'pot',
'--subdir=' + state.subdir,
'--id=' + project_id,
'--sources=' + source_str,
]
pottarget = build.RunTarget('help-' + project_id + '-pot', potargs[0],
potargs[1:], [], state.subdir, state.subproject)
poargs = state.environment.get_build_command() + [
'--internal', 'yelphelper', 'update-po',
'--subdir=' + state.subdir,
'--id=' + project_id,
'--sources=' + source_str,
'--langs=' + '@@'.join(langs),
]
potarget = build.RunTarget('help-' + project_id + '-update-po', poargs[0],
poargs[1:], [], state.subdir, state.subproject)
rv = [inscript, pottarget, potarget]
return ModuleReturnValue(None, rv)
@FeatureNewKwargs('gnome.gtkdoc', '0.37.0', ['namespace', 'mode'])
@permittedKwargs({'main_xml', 'main_sgml', 'src_dir', 'dependencies', 'install',
'install_dir', 'scan_args', 'scanobjs_args', 'gobject_typesfile',
'fixxref_args', 'html_args', 'html_assets', 'content_files',
'mkdb_args', 'ignore_headers', 'include_directories',
'namespace', 'mode', 'expand_content_files'})
def gtkdoc(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Gtkdoc must have one positional argument.')
modulename = args[0]
if not isinstance(modulename, str):
raise MesonException('Gtkdoc arg must be string.')
if 'src_dir' not in kwargs:
raise MesonException('Keyword argument src_dir missing.')
main_file = kwargs.get('main_sgml', '')
if not isinstance(main_file, str):
raise MesonException('Main sgml keyword argument must be a string.')
main_xml = kwargs.get('main_xml', '')
if not isinstance(main_xml, str):
raise MesonException('Main xml keyword argument must be a string.')
if main_xml != '':
if main_file != '':
raise MesonException('You can only specify main_xml or main_sgml, not both.')
main_file = main_xml
targetname = modulename + '-doc'
command = state.environment.get_build_command()
namespace = kwargs.get('namespace', '')
mode = kwargs.get('mode', 'auto')
VALID_MODES = ('xml', 'sgml', 'none', 'auto')
if mode not in VALID_MODES:
raise MesonException('gtkdoc: Mode {} is not a valid mode: {}'.format(mode, VALID_MODES))
src_dirs = mesonlib.extract_as_list(kwargs, 'src_dir')
header_dirs = []
for src_dir in src_dirs:
if hasattr(src_dir, 'held_object'):
src_dir = src_dir.held_object
if not isinstance(src_dir, build.IncludeDirs):
raise MesonException('Invalid keyword argument for src_dir.')
for inc_dir in src_dir.get_incdirs():
header_dirs.append(os.path.join(state.environment.get_source_dir(),
src_dir.get_curdir(), inc_dir))
header_dirs.append(os.path.join(state.environment.get_build_dir(),
src_dir.get_curdir(), inc_dir))
else:
header_dirs.append(src_dir)
args = ['--internal', 'gtkdoc',
'--sourcedir=' + state.environment.get_source_dir(),
'--builddir=' + state.environment.get_build_dir(),
'--subdir=' + state.subdir,
'--headerdirs=' + '@@'.join(header_dirs),
'--mainfile=' + main_file,
'--modulename=' + modulename,
'--mode=' + mode]
if namespace:
args.append('--namespace=' + namespace)
args += self._unpack_args('--htmlargs=', 'html_args', kwargs)
args += self._unpack_args('--scanargs=', 'scan_args', kwargs)
args += self._unpack_args('--scanobjsargs=', 'scanobjs_args', kwargs)
args += self._unpack_args('--gobjects-types-file=', 'gobject_typesfile', kwargs, state)
args += self._unpack_args('--fixxrefargs=', 'fixxref_args', kwargs)
args += self._unpack_args('--mkdbargs=', 'mkdb_args', kwargs)
args += self._unpack_args('--html-assets=', 'html_assets', kwargs, state)
depends = []
content_files = []
for s in mesonlib.extract_as_list(kwargs, 'content_files'):
if hasattr(s, 'held_object'):
s = s.held_object
if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
depends.append(s)
for o in s.get_outputs():
content_files.append(os.path.join(state.environment.get_build_dir(),
state.backend.get_target_dir(s),
o))
elif isinstance(s, mesonlib.File):
content_files.append(s.absolute_path(state.environment.get_source_dir(),
state.environment.get_build_dir()))
elif isinstance(s, build.GeneratedList):
depends.append(s)
for gen_src in s.get_outputs():
content_files.append(os.path.join(state.environment.get_source_dir(),
state.subdir,
gen_src))
elif isinstance(s, str):
content_files.append(os.path.join(state.environment.get_source_dir(),
state.subdir,
s))
else:
raise MesonException(
'Invalid object type: {!r}'.format(s.__class__.__name__))
args += ['--content-files=' + '@@'.join(content_files)]
args += self._unpack_args('--expand-content-files=', 'expand_content_files', kwargs, state)
args += self._unpack_args('--ignore-headers=', 'ignore_headers', kwargs)
args += self._unpack_args('--installdir=', 'install_dir', kwargs)
args += self._get_build_args(kwargs, state, depends)
res = [build.RunTarget(targetname, command[0], command[1:] + args, depends, state.subdir, state.subproject)]
if kwargs.get('install', True):
res.append(build.RunScript(command, args))
return ModuleReturnValue(None, res)
def _get_build_args(self, kwargs, state, depends):
args = []
deps = extract_as_list(kwargs, 'dependencies', unholder=True)
cflags, internal_ldflags, external_ldflags, gi_includes = \
self._get_dependencies_flags(deps, state, depends, include_rpath=True)
inc_dirs = mesonlib.extract_as_list(kwargs, 'include_directories')
for incd in inc_dirs:
if not isinstance(incd.held_object, (str, build.IncludeDirs)):
raise MesonException(
'Gir include dirs should be include_directories().')
cflags.update(get_include_args(inc_dirs))
ldflags = OrderedSet()
ldflags.update(internal_ldflags)
ldflags.update(external_ldflags)
if state.environment.is_cross_build():
compiler = state.environment.coredata.cross_compilers.get('c')
else:
cflags.update(state.environment.coredata.get_external_args('c'))
ldflags.update(state.environment.coredata.get_external_link_args('c'))
compiler = state.environment.coredata.compilers.get('c')
if compiler:
args += ['--cc=%s' % ' '.join(compiler.get_exelist())]
args += ['--ld=%s' % ' '.join(compiler.get_linker_exelist())]
if cflags:
args += ['--cflags=%s' % ' '.join(cflags)]
if ldflags:
args += ['--ldflags=%s' % ' '.join(ldflags)]
return args
@noKwargs
def gtkdoc_html_dir(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Must have exactly one argument.')
modulename = args[0]
if not isinstance(modulename, str):
raise MesonException('Argument must be a string')
return ModuleReturnValue(os.path.join('share/gtk-doc/html', modulename), [])
@staticmethod
def _unpack_args(arg, kwarg_name, kwargs, expend_file_state=None):
if kwarg_name not in kwargs:
return []
new_args = mesonlib.extract_as_list(kwargs, kwarg_name)
args = []
for i in new_args:
if expend_file_state and isinstance(i, mesonlib.File):
i = i.absolute_path(expend_file_state.environment.get_source_dir(), expend_file_state.environment.get_build_dir())
elif expend_file_state and isinstance(i, str):
i = os.path.join(expend_file_state.environment.get_source_dir(), expend_file_state.subdir, i)
elif not isinstance(i, str):
raise MesonException(kwarg_name + ' values must be strings.')
args.append(i)
if args:
return [arg + '@@'.join(args)]
return []
def _get_autocleanup_args(self, kwargs, glib_version):
if not mesonlib.version_compare(glib_version, '>= 2.49.1'):
# Warn if requested, silently disable if not
if 'autocleanup' in kwargs:
mlog.warning('Glib version ({}) is too old to support the \'autocleanup\' '
'kwarg, need 2.49.1 or newer'.format(glib_version))
return []
autocleanup = kwargs.pop('autocleanup', 'all')
values = ('none', 'objects', 'all')
if autocleanup not in values:
raise MesonException('gdbus_codegen does not support {!r} as an autocleanup value, '
'must be one of: {!r}'.format(autocleanup, ', '.join(values)))
return ['--c-generate-autocleanup', autocleanup]
@FeatureNewKwargs('build target', '0.46.0', ['install_header', 'install_dir', 'sources'])
@FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
@FeatureNewKwargs('build target', '0.47.0', ['extra_args', 'autocleanup'])
@permittedKwargs({'interface_prefix', 'namespace', 'extra_args', 'autocleanup', 'object_manager', 'build_by_default',
'annotations', 'docbook', 'install_header', 'install_dir', 'sources'})
def gdbus_codegen(self, state, args, kwargs):
if len(args) not in (1, 2):
raise MesonException('gdbus_codegen takes at most two arguments, name and xml file.')
namebase = args[0]
xml_files = args[1:]
cmd = [self.interpreter.find_program_impl('gdbus-codegen')]
extra_args = mesonlib.stringlistify(kwargs.pop('extra_args', []))
cmd += extra_args
# Autocleanup supported?
glib_version = self._get_native_glib_version(state)
cmd += self._get_autocleanup_args(kwargs, glib_version)
if 'interface_prefix' in kwargs:
cmd += ['--interface-prefix', kwargs.pop('interface_prefix')]
if 'namespace' in kwargs:
cmd += ['--c-namespace', kwargs.pop('namespace')]
if kwargs.get('object_manager', False):
cmd += ['--c-generate-object-manager']
if 'sources' in kwargs:
xml_files += mesonlib.listify(kwargs.pop('sources'))
build_by_default = kwargs.get('build_by_default', False)
# Annotations are a bit ugly in that they are a list of lists of strings...
annotations = kwargs.pop('annotations', [])
if not isinstance(annotations, list):
raise MesonException('annotations takes a list')
if annotations and isinstance(annotations, list) and not isinstance(annotations[0], list):
annotations = [annotations]
for annotation in annotations:
if len(annotation) != 3 or not all(isinstance(i, str) for i in annotation):
raise MesonException('Annotations must be made up of 3 strings for ELEMENT, KEY, and VALUE')
cmd += ['--annotate'] + annotation
targets = []
install_header = kwargs.get('install_header', False)
install_dir = kwargs.get('install_dir', state.environment.coredata.get_builtin_option('includedir'))
output = namebase + '.c'
# Added in https://gitlab.gnome.org/GNOME/glib/commit/e4d68c7b3e8b01ab1a4231bf6da21d045cb5a816 (2.55.2)
# Fixed in https://gitlab.gnome.org/GNOME/glib/commit/cd1f82d8fc741a2203582c12cc21b4dacf7e1872 (2.56.2)
if mesonlib.version_compare(glib_version, '>= 2.56.2'):
custom_kwargs = {'input': xml_files,
'output': output,
'command': cmd + ['--body', '--output', '@OUTPUT@', '@INPUT@'],
'build_by_default': build_by_default
}
else:
if 'docbook' in kwargs:
docbook = kwargs['docbook']
if not isinstance(docbook, str):
raise MesonException('docbook value must be a string.')
cmd += ['--generate-docbook', docbook]
# https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a
if mesonlib.version_compare(glib_version, '>= 2.51.3'):
cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@']
else:
self._print_gdbus_warning()
cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@']
custom_kwargs = {'input': xml_files,
'output': output,
'command': cmd,
'build_by_default': build_by_default
}
cfile_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)
targets.append(cfile_custom_target)
output = namebase + '.h'
if mesonlib.version_compare(glib_version, '>= 2.56.2'):
custom_kwargs = {'input': xml_files,
'output': output,
'command': cmd + ['--header', '--output', '@OUTPUT@', '@INPUT@'],
'build_by_default': build_by_default,
'install': install_header,
'install_dir': install_dir
}
else:
custom_kwargs = {'input': xml_files,
'output': output,
'command': cmd,
'build_by_default': build_by_default,
'install': install_header,
'install_dir': install_dir,
'depends': cfile_custom_target
}
hfile_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)
targets.append(hfile_custom_target)
if 'docbook' in kwargs:
docbook = kwargs['docbook']
if not isinstance(docbook, str):
raise MesonException('docbook value must be a string.')
docbook_cmd = cmd + ['--output-directory', '@OUTDIR@', '--generate-docbook', docbook, '@INPUT@']
# The docbook output is always ${docbook}-${name_of_xml_file}
output = namebase + '-docbook'
outputs = []
for f in xml_files:
outputs.append('{}-{}'.format(docbook, os.path.basename(str(f))))
if mesonlib.version_compare(glib_version, '>= 2.56.2'):
custom_kwargs = {'input': xml_files,
'output': outputs,
'command': docbook_cmd,
'build_by_default': build_by_default
}
else:
custom_kwargs = {'input': xml_files,
'output': outputs,
'command': cmd,
'build_by_default': build_by_default,
'depends': cfile_custom_target
}
docbook_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)
targets.append(docbook_custom_target)
return ModuleReturnValue(targets, targets)
@permittedKwargs({'sources', 'c_template', 'h_template', 'install_header', 'install_dir',
'comments', 'identifier_prefix', 'symbol_prefix', 'eprod', 'vprod',
'fhead', 'fprod', 'ftail', 'vhead', 'vtail', 'depends'})
def mkenums(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Mkenums requires one positional argument.')
basename = args[0]
if 'sources' not in kwargs:
raise MesonException('Missing keyword argument "sources".')
sources = kwargs.pop('sources')
if isinstance(sources, str):
sources = [sources]
elif not isinstance(sources, list):
raise MesonException(
'Sources keyword argument must be a string or array.')
cmd = []
known_kwargs = ['comments', 'eprod', 'fhead', 'fprod', 'ftail',
'identifier_prefix', 'symbol_prefix', 'template',
'vhead', 'vprod', 'vtail']
known_custom_target_kwargs = ['install_dir', 'build_always',
'depends', 'depend_files']
c_template = h_template = None
install_header = False
for arg, value in kwargs.items():
if arg == 'sources':
raise AssertionError("sources should've already been handled")
elif arg == 'c_template':
c_template = value
if isinstance(c_template, mesonlib.File):
c_template = c_template.absolute_path(state.environment.source_dir, state.environment.build_dir)
if 'template' in kwargs:
raise MesonException('Mkenums does not accept both '
'c_template and template keyword '
'arguments at the same time.')
elif arg == 'h_template':
h_template = value
if isinstance(h_template, mesonlib.File):
h_template = h_template.absolute_path(state.environment.source_dir, state.environment.build_dir)
if 'template' in kwargs:
raise MesonException('Mkenums does not accept both '
'h_template and template keyword '
'arguments at the same time.')
elif arg == 'install_header':
install_header = value
elif arg in known_kwargs:
cmd += ['--' + arg.replace('_', '-'), value]
elif arg not in known_custom_target_kwargs:
raise MesonException(
'Mkenums does not take a %s keyword argument.' % (arg, ))
cmd = [self.interpreter.find_program_impl(['glib-mkenums', 'mkenums'])] + cmd
custom_kwargs = {}
for arg in known_custom_target_kwargs:
if arg in kwargs:
custom_kwargs[arg] = kwargs[arg]
targets = []
if h_template is not None:
h_output = os.path.basename(os.path.splitext(h_template)[0])
# We always set template as the first element in the source array
# so --template consumes it.
h_cmd = cmd + ['--template', '@INPUT@']
h_sources = [h_template] + sources
custom_kwargs['install'] = install_header
if 'install_dir' not in custom_kwargs:
custom_kwargs['install_dir'] = \
state.environment.coredata.get_builtin_option('includedir')
h_target = self._make_mkenum_custom_target(state, h_sources,
h_output, h_cmd,
custom_kwargs)
targets.append(h_target)
if c_template is not None:
c_output = os.path.basename(os.path.splitext(c_template)[0])
# We always set template as the first element in the source array
# so --template consumes it.
c_cmd = cmd + ['--template', '@INPUT@']
c_sources = [c_template] + sources
# Never install the C file. Complain on bug tracker if you need it.
custom_kwargs['install'] = False
if h_template is not None:
if 'depends' in custom_kwargs:
custom_kwargs['depends'] += [h_target]
else:
custom_kwargs['depends'] = h_target
c_target = self._make_mkenum_custom_target(state, c_sources,
c_output, c_cmd,
custom_kwargs)
targets.insert(0, c_target)
if c_template is None and h_template is None:
generic_cmd = cmd + ['@INPUT@']
custom_kwargs['install'] = install_header
if 'install_dir' not in custom_kwargs:
custom_kwargs['install_dir'] = \
state.environment.coredata.get_builtin_option('includedir')
target = self._make_mkenum_custom_target(state, sources, basename,
generic_cmd, custom_kwargs)
return ModuleReturnValue(target, [target])
elif len(targets) == 1:
return ModuleReturnValue(targets[0], [targets[0]])
else:
return ModuleReturnValue(targets, targets)
@FeatureNew('gnome.mkenums_simple', '0.42.0')
def mkenums_simple(self, state, args, kwargs):
hdr_filename = args[0] + '.h'
body_filename = args[0] + '.c'
# not really needed, just for sanity checking
forbidden_kwargs = ['c_template', 'h_template', 'eprod', 'fhead',
'fprod', 'ftail', 'vhead', 'vtail', 'comments']
for arg in forbidden_kwargs:
if arg in kwargs:
raise MesonException('mkenums_simple() does not take a %s keyword argument' % (arg, ))
# kwargs to pass as-is from mkenums_simple() to mkenums()
shared_kwargs = ['sources', 'install_header', 'install_dir',
'identifier_prefix', 'symbol_prefix']
mkenums_kwargs = {}
for arg in shared_kwargs:
if arg in kwargs:
mkenums_kwargs[arg] = kwargs[arg]
# .c file generation
c_file_kwargs = copy.deepcopy(mkenums_kwargs)
if 'sources' not in kwargs:
raise MesonException('Missing keyword argument "sources".')
sources = kwargs['sources']
if isinstance(sources, str):
sources = [sources]
elif not isinstance(sources, list):
raise MesonException(
'Sources keyword argument must be a string or array.')
# The `install_header` argument will be used by mkenums() when
# not using template files, so we need to forcibly unset it
# when generating the C source file, otherwise we will end up
# installing it
c_file_kwargs['install_header'] = False
header_prefix = kwargs.get('header_prefix', '')
decl_decorator = kwargs.get('decorator', '')
func_prefix = kwargs.get('function_prefix', '')
body_prefix = kwargs.get('body_prefix', '')
# Maybe we should write our own template files into the build dir
# instead, but that seems like much more work, nice as it would be.
fhead = ''
if body_prefix != '':
fhead += '%s\n' % body_prefix
fhead += '#include "%s"\n' % hdr_filename
for hdr in sources:
fhead += '#include "%s"\n' % os.path.basename(str(hdr))
fhead += '''
#define C_ENUM(v) ((gint) v)
#define C_FLAGS(v) ((guint) v)
'''
c_file_kwargs['fhead'] = fhead
c_file_kwargs['fprod'] = '''
/* enumerations from "@basename@" */
'''
c_file_kwargs['vhead'] = '''
GType
%s@enum_name@_get_type (void)
{
static volatile gsize gtype_id = 0;
static const G@Type@Value values[] = {''' % func_prefix
c_file_kwargs['vprod'] = ' { C_@TYPE@(@VALUENAME@), "@VALUENAME@", "@valuenick@" },'
c_file_kwargs['vtail'] = ''' { 0, NULL, NULL }
};
if (g_once_init_enter (>ype_id)) {
GType new_type = g_@type@_register_static ("@EnumName@", values);
g_once_init_leave (>ype_id, new_type);
}
return (GType) gtype_id;
}'''
rv = self.mkenums(state, [body_filename], c_file_kwargs)
c_file = rv.return_value
# .h file generation
h_file_kwargs = copy.deepcopy(mkenums_kwargs)
h_file_kwargs['fhead'] = '''#pragma once
#include <glib-object.h>
{}
G_BEGIN_DECLS
'''.format(header_prefix)
h_file_kwargs['fprod'] = '''
/* enumerations from "@basename@" */
'''
h_file_kwargs['vhead'] = '''
{}
GType {}@enum_name@_get_type (void);
#define @ENUMPREFIX@_TYPE_@ENUMSHORT@ ({}@enum_name@_get_type())'''.format(decl_decorator, func_prefix, func_prefix)
h_file_kwargs['ftail'] = '''
G_END_DECLS'''
rv = self.mkenums(state, [hdr_filename], h_file_kwargs)
h_file = rv.return_value
return ModuleReturnValue([c_file, h_file], [c_file, h_file])
@staticmethod
def _make_mkenum_custom_target(state, sources, output, cmd, kwargs):
custom_kwargs = {
'input': sources,
'output': output,
'capture': True,
'command': cmd
}
custom_kwargs.update(kwargs)
return build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs,
# https://github.com/mesonbuild/meson/issues/973
absolute_paths=True)
@permittedKwargs({'sources', 'prefix', 'install_header', 'install_dir', 'stdinc',
'nostdinc', 'internal', 'skip_source', 'valist_marshallers',
'extra_args'})
def genmarshal(self, state, args, kwargs):
if len(args) != 1:
raise MesonException(
'Genmarshal requires one positional argument.')
output = args[0]
if 'sources' not in kwargs:
raise MesonException('Missing keyword argument "sources".')
sources = kwargs.pop('sources')
if isinstance(sources, str):
sources = [sources]
elif not isinstance(sources, list):
raise MesonException(
'Sources keyword argument must be a string or array.')
new_genmarshal = mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.3')
cmd = [self.interpreter.find_program_impl('glib-genmarshal')]
known_kwargs = ['internal', 'nostdinc', 'skip_source', 'stdinc',
'valist_marshallers', 'extra_args']
known_custom_target_kwargs = ['build_always', 'depends',
'depend_files', 'install_dir',
'install_header']
for arg, value in kwargs.items():
if arg == 'prefix':
cmd += ['--prefix', value]
elif arg == 'extra_args':
if new_genmarshal:
cmd += mesonlib.stringlistify(value)
else:
mlog.warning('The current version of GLib does not support extra arguments \n'
'for glib-genmarshal. You need at least GLib 2.53.3. See ',
mlog.bold('https://github.com/mesonbuild/meson/pull/2049'))
elif arg in known_kwargs and value:
cmd += ['--' + arg.replace('_', '-')]
elif arg not in known_custom_target_kwargs:
raise MesonException(
'Genmarshal does not take a %s keyword argument.' % (
arg, ))
install_header = kwargs.pop('install_header', False)
install_dir = kwargs.pop('install_dir', None)
custom_kwargs = {
'input': sources,
}
# https://github.com/GNOME/glib/commit/0fbc98097fac4d3e647684f344e508abae109fdf
if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.0'):
cmd += ['--output', '@OUTPUT@']
else:
custom_kwargs['capture'] = True
for arg in known_custom_target_kwargs:
if arg in kwargs:
custom_kwargs[arg] = kwargs[arg]
header_file = output + '.h'
custom_kwargs['command'] = cmd + ['--body', '@INPUT@']
if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.4'):
# Silence any warnings about missing prototypes
custom_kwargs['command'] += ['--include-header', header_file]
custom_kwargs['output'] = output + '.c'
body = build.CustomTarget(output + '_c', state.subdir, state.subproject, custom_kwargs)
custom_kwargs['install'] = install_header
if install_dir is not None:
custom_kwargs['install_dir'] = install_dir
if new_genmarshal:
cmd += ['--pragma-once']
custom_kwargs['command'] = cmd + ['--header', '@INPUT@']
custom_kwargs['output'] = header_file
header = build.CustomTarget(output + '_h', state.subdir, state.subproject, custom_kwargs)
rv = [body, header]
return ModuleReturnValue(rv, rv)
@staticmethod
def _vapi_args_to_command(prefix, variable, kwargs, accept_vapi=False):
arg_list = mesonlib.extract_as_list(kwargs, variable)
ret = []
for arg in arg_list:
if not isinstance(arg, str):
types = 'strings' + ' or InternalDependencys' if accept_vapi else ''
raise MesonException('All {} must be {}'.format(variable, types))
ret.append(prefix + arg)
return ret
def _extract_vapi_packages(self, state, kwargs):
'''
Packages are special because we need to:
- Get a list of packages for the .deps file
- Get a list of depends for any VapiTargets
- Get package name from VapiTargets
- Add include dirs for any VapiTargets
'''
arg_list = kwargs.get('packages')
if not arg_list:
return [], [], [], []
arg_list = mesonlib.listify(arg_list)
vapi_depends = []
vapi_packages = []
vapi_includes = []
ret = []
remaining_args = []
for arg in arg_list:
if hasattr(arg, 'held_object'):
arg = arg.held_object
if isinstance(arg, InternalDependency):
targets = [t for t in arg.sources if isinstance(t, VapiTarget)]
for target in targets:
srcdir = os.path.join(state.environment.get_source_dir(),
target.get_subdir())
outdir = os.path.join(state.environment.get_build_dir(),
target.get_subdir())
outfile = target.get_outputs()[0][:-5] # Strip .vapi
ret.append('--vapidir=' + outdir)
ret.append('--girdir=' + outdir)
ret.append('--pkg=' + outfile)
vapi_depends.append(target)
vapi_packages.append(outfile)
vapi_includes.append(srcdir)
else:
vapi_packages.append(arg)
remaining_args.append(arg)
kwargs['packages'] = remaining_args
vapi_args = ret + self._vapi_args_to_command('--pkg=', 'packages', kwargs, accept_vapi=True)
return vapi_args, vapi_depends, vapi_packages, vapi_includes
def _generate_deps(self, state, library, packages, install_dir):
outdir = state.environment.scratch_dir
fname = os.path.join(outdir, library + '.deps')
with open(fname, 'w') as ofile:
for package in packages:
ofile.write(package + '\n')
return build.Data(mesonlib.File(True, outdir, fname), install_dir)
def _get_vapi_link_with(self, target):
link_with = []
for dep in target.get_target_dependencies():
if isinstance(dep, build.SharedLibrary):
link_with.append(dep)
elif isinstance(dep, GirTarget):
link_with += self._get_vapi_link_with(dep)
return link_with
@permittedKwargs({'sources', 'packages', 'metadata_dirs', 'gir_dirs',
'vapi_dirs', 'install', 'install_dir'})
def generate_vapi(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('The library name is required')
if not isinstance(args[0], str):
raise MesonException('The first argument must be the name of the library')
created_values = []
library = args[0]
build_dir = os.path.join(state.environment.get_build_dir(), state.subdir)
source_dir = os.path.join(state.environment.get_source_dir(), state.subdir)
pkg_cmd, vapi_depends, vapi_packages, vapi_includes = self._extract_vapi_packages(state, kwargs)
if 'VAPIGEN' in os.environ:
cmd = [self.interpreter.find_program_impl(os.environ['VAPIGEN'])]
else:
cmd = [self.interpreter.find_program_impl('vapigen')]
cmd += ['--quiet', '--library=' + library, '--directory=' + build_dir]
cmd += self._vapi_args_to_command('--vapidir=', 'vapi_dirs', kwargs)
cmd += self._vapi_args_to_command('--metadatadir=', 'metadata_dirs', kwargs)
cmd += self._vapi_args_to_command('--girdir=', 'gir_dirs', kwargs)
cmd += pkg_cmd
cmd += ['--metadatadir=' + source_dir]
if 'sources' not in kwargs:
raise MesonException('sources are required to generate the vapi file')
inputs = mesonlib.extract_as_list(kwargs, 'sources')
link_with = []
for i in inputs:
if isinstance(i, str):
cmd.append(os.path.join(source_dir, i))
elif hasattr(i, 'held_object') and isinstance(i.held_object, GirTarget):
link_with += self._get_vapi_link_with(i.held_object)
subdir = os.path.join(state.environment.get_build_dir(),
i.held_object.get_subdir())
gir_file = os.path.join(subdir, i.held_object.get_outputs()[0])
cmd.append(gir_file)
else:
raise MesonException('Input must be a str or GirTarget')
vapi_output = library + '.vapi'
custom_kwargs = {
'command': cmd,
'input': inputs,
'output': vapi_output,
'depends': vapi_depends,
}
install_dir = kwargs.get('install_dir',
os.path.join(state.environment.coredata.get_builtin_option('datadir'),
'vala', 'vapi'))
if kwargs.get('install'):
custom_kwargs['install'] = kwargs['install']
custom_kwargs['install_dir'] = install_dir
# We shouldn't need this locally but we install it
deps_target = self._generate_deps(state, library, vapi_packages, install_dir)
created_values.append(deps_target)
vapi_target = VapiTarget(vapi_output, state.subdir, state.subproject, custom_kwargs)
# So to try our best to get this to just work we need:
# - link with with the correct library
# - include the vapi and dependent vapi files in sources
# - add relevant directories to include dirs
incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)]
sources = [vapi_target] + vapi_depends
rv = InternalDependency(None, incs, [], [], link_with, [], sources, [])
created_values.append(rv)
return ModuleReturnValue(rv, created_values)
def initialize(*args, **kwargs):
return GnomeModule(*args, **kwargs)
|
apache-2.0
|
ContinuumIO/topik
|
topik/vectorizers/tfidf.py
|
1
|
1139
|
from math import log
from ._registry import register
from vectorizer_output import VectorizerOutput
from .bag_of_words import _count_words_in_docs
def _count_document_occurences(doc_counts, total_words):
return {word_id: sum(1 for doc in doc_counts.values() if word_id in doc)
for word_id in range(total_words)}
def _calculate_tfidf(tokenized_corpus, vectorizer_output):
tokens = list(tokenized_corpus)
doc_counts = _count_words_in_docs(tokens, vectorizer_output)
document_occurrences = _count_document_occurences(doc_counts, vectorizer_output.global_term_count)
idf = {word_id: log(len(tokens) / (document_occurrences[word_id]))
for word_id in range(vectorizer_output.global_term_count)}
tf_idf = {}
# TODO: this is essentially a sparse matrix multiply and could be done much more efficiently
for id, doc in doc_counts.items():
tf_idf[id] = {}
for word_id, count in doc.items():
tf_idf[id].update({word_id: count*idf[word_id]})
return tf_idf
@register
def tfidf(tokenized_corpus):
return VectorizerOutput(tokenized_corpus, _calculate_tfidf)
|
bsd-3-clause
|
pk-sam/crosswalk-test-suite
|
webapi/webapi-resourcetiming-w3c-tests/inst.apk.py
|
903
|
3180
|
#!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARAMETERS = None
ADB_CMD = "adb"
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s uninstall org.xwalk.%s" % (
ADB_CMD, PARAMETERS.device, os.path.basename(os.path.splitext(file)[0]))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
return action_status
def instPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s install %s" % (ADB_CMD,
PARAMETERS.device, os.path.join(root, file))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.device:
(return_code, output) = doCMD("adb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
if not PARAMETERS.device:
print "No device found"
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
|
bsd-3-clause
|
tumbl3w33d/ansible
|
lib/ansible/modules/cloud/amazon/ec2_eni.py
|
31
|
23840
|
#!/usr/bin/python
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ec2_eni
short_description: Create and optionally attach an Elastic Network Interface (ENI) to an instance
description:
- Create and optionally attach an Elastic Network Interface (ENI) to an instance. If an ENI ID or private_ip is
provided, the existing ENI (if any) will be modified. The 'attached' parameter controls the attachment status
of the network interface.
version_added: "2.0"
author: "Rob White (@wimnat)"
options:
eni_id:
description:
- The ID of the ENI (to modify).
- If I(eni_id=None) and I(state=present), a new eni will be created.
type: str
instance_id:
description:
- Instance ID that you wish to attach ENI to.
- Since version 2.2, use the I(attached) parameter to attach or detach an ENI. Prior to 2.2, to detach an ENI from an instance, use C(None).
type: str
private_ip_address:
description:
- Private IP address.
type: str
subnet_id:
description:
- ID of subnet in which to create the ENI.
type: str
description:
description:
- Optional description of the ENI.
type: str
security_groups:
description:
- List of security groups associated with the interface. Only used when I(state=present).
- Since version 2.2, you can specify security groups by ID or by name or a combination of both. Prior to 2.2, you can specify only by ID.
type: list
elements: str
state:
description:
- Create or delete ENI.
default: present
choices: [ 'present', 'absent' ]
type: str
device_index:
description:
- The index of the device for the network interface attachment on the instance.
default: 0
type: int
attached:
description:
- Specifies if network interface should be attached or detached from instance. If omitted, attachment status
won't change
version_added: 2.2
type: bool
force_detach:
description:
- Force detachment of the interface. This applies either when explicitly detaching the interface by setting I(instance_id=None)
or when deleting an interface with I(state=absent).
default: false
type: bool
delete_on_termination:
description:
- Delete the interface when the instance it is attached to is terminated. You can only specify this flag when the
interface is being modified, not on creation.
required: false
type: bool
source_dest_check:
description:
- By default, interfaces perform source/destination checks. NAT instances however need this check to be disabled.
You can only specify this flag when the interface is being modified, not on creation.
required: false
type: bool
secondary_private_ip_addresses:
description:
- A list of IP addresses to assign as secondary IP addresses to the network interface.
This option is mutually exclusive of I(secondary_private_ip_address_count)
required: false
version_added: 2.2
type: list
elements: str
purge_secondary_private_ip_addresses:
description:
- To be used with I(secondary_private_ip_addresses) to determine whether or not to remove any secondary IP addresses other than those specified.
- Set I(secondary_private_ip_addresses=[]) to purge all secondary addresses.
default: false
type: bool
version_added: 2.5
secondary_private_ip_address_count:
description:
- The number of secondary IP addresses to assign to the network interface. This option is mutually exclusive of I(secondary_private_ip_addresses)
required: false
version_added: 2.2
type: int
allow_reassignment:
description:
- Indicates whether to allow an IP address that is already assigned to another network interface or instance
to be reassigned to the specified network interface.
required: false
default: false
type: bool
version_added: 2.7
extends_documentation_fragment:
- aws
- ec2
notes:
- This module identifies and ENI based on either the I(eni_id), a combination of I(private_ip_address) and I(subnet_id),
or a combination of I(instance_id) and I(device_id). Any of these options will let you specify a particular ENI.
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Create an ENI. As no security group is defined, ENI will be created in default security group
- ec2_eni:
private_ip_address: 172.31.0.20
subnet_id: subnet-xxxxxxxx
state: present
# Create an ENI and attach it to an instance
- ec2_eni:
instance_id: i-xxxxxxx
device_index: 1
private_ip_address: 172.31.0.20
subnet_id: subnet-xxxxxxxx
state: present
# Create an ENI with two secondary addresses
- ec2_eni:
subnet_id: subnet-xxxxxxxx
state: present
secondary_private_ip_address_count: 2
# Assign a secondary IP address to an existing ENI
# This will purge any existing IPs
- ec2_eni:
subnet_id: subnet-xxxxxxxx
eni_id: eni-yyyyyyyy
state: present
secondary_private_ip_addresses:
- 172.16.1.1
# Remove any secondary IP addresses from an existing ENI
- ec2_eni:
subnet_id: subnet-xxxxxxxx
eni_id: eni-yyyyyyyy
state: present
secondary_private_ip_address_count: 0
# Destroy an ENI, detaching it from any instance if necessary
- ec2_eni:
eni_id: eni-xxxxxxx
force_detach: true
state: absent
# Update an ENI
- ec2_eni:
eni_id: eni-xxxxxxx
description: "My new description"
state: present
# Update an ENI identifying it by private_ip_address and subnet_id
- ec2_eni:
subnet_id: subnet-xxxxxxx
private_ip_address: 172.16.1.1
description: "My new description"
# Detach an ENI from an instance
- ec2_eni:
eni_id: eni-xxxxxxx
instance_id: None
state: present
### Delete an interface on termination
# First create the interface
- ec2_eni:
instance_id: i-xxxxxxx
device_index: 1
private_ip_address: 172.31.0.20
subnet_id: subnet-xxxxxxxx
state: present
register: eni
# Modify the interface to enable the delete_on_terminaton flag
- ec2_eni:
eni_id: "{{ eni.interface.id }}"
delete_on_termination: true
'''
RETURN = '''
interface:
description: Network interface attributes
returned: when state != absent
type: complex
contains:
description:
description: interface description
type: str
sample: Firewall network interface
groups:
description: list of security groups
type: list
elements: dict
sample: [ { "sg-f8a8a9da": "default" } ]
id:
description: network interface id
type: str
sample: "eni-1d889198"
mac_address:
description: interface's physical address
type: str
sample: "00:00:5E:00:53:23"
owner_id:
description: aws account id
type: str
sample: 812381371
private_ip_address:
description: primary ip address of this interface
type: str
sample: 10.20.30.40
private_ip_addresses:
description: list of all private ip addresses associated to this interface
type: list
elements: dict
sample: [ { "primary_address": true, "private_ip_address": "10.20.30.40" } ]
source_dest_check:
description: value of source/dest check flag
type: bool
sample: True
status:
description: network interface status
type: str
sample: "pending"
subnet_id:
description: which vpc subnet the interface is bound
type: str
sample: subnet-b0a0393c
vpc_id:
description: which vpc this network interface is bound
type: str
sample: vpc-9a9a9da
'''
import time
import re
try:
import boto.ec2
import boto.vpc
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (AnsibleAWSError, connect_to_aws,
ec2_argument_spec, get_aws_connection_info,
get_ec2_security_group_ids_from_names)
def get_eni_info(interface):
# Private addresses
private_addresses = []
for ip in interface.private_ip_addresses:
private_addresses.append({'private_ip_address': ip.private_ip_address, 'primary_address': ip.primary})
interface_info = {'id': interface.id,
'subnet_id': interface.subnet_id,
'vpc_id': interface.vpc_id,
'description': interface.description,
'owner_id': interface.owner_id,
'status': interface.status,
'mac_address': interface.mac_address,
'private_ip_address': interface.private_ip_address,
'source_dest_check': interface.source_dest_check,
'groups': dict((group.id, group.name) for group in interface.groups),
'private_ip_addresses': private_addresses
}
if interface.attachment is not None:
interface_info['attachment'] = {'attachment_id': interface.attachment.id,
'instance_id': interface.attachment.instance_id,
'device_index': interface.attachment.device_index,
'status': interface.attachment.status,
'attach_time': interface.attachment.attach_time,
'delete_on_termination': interface.attachment.delete_on_termination,
}
return interface_info
def wait_for_eni(eni, status):
while True:
time.sleep(3)
eni.update()
# If the status is detached we just need attachment to disappear
if eni.attachment is None:
if status == "detached":
break
else:
if status == "attached" and eni.attachment.status == "attached":
break
def create_eni(connection, vpc_id, module):
instance_id = module.params.get("instance_id")
attached = module.params.get("attached")
if instance_id == 'None':
instance_id = None
device_index = module.params.get("device_index")
subnet_id = module.params.get('subnet_id')
private_ip_address = module.params.get('private_ip_address')
description = module.params.get('description')
security_groups = get_ec2_security_group_ids_from_names(module.params.get('security_groups'), connection, vpc_id=vpc_id, boto3=False)
secondary_private_ip_addresses = module.params.get("secondary_private_ip_addresses")
secondary_private_ip_address_count = module.params.get("secondary_private_ip_address_count")
changed = False
try:
eni = connection.create_network_interface(subnet_id, private_ip_address, description, security_groups)
if attached and instance_id is not None:
try:
eni.attach(instance_id, device_index)
except BotoServerError:
eni.delete()
raise
# Wait to allow creation / attachment to finish
wait_for_eni(eni, "attached")
eni.update()
if secondary_private_ip_address_count is not None:
try:
connection.assign_private_ip_addresses(network_interface_id=eni.id, secondary_private_ip_address_count=secondary_private_ip_address_count)
except BotoServerError:
eni.delete()
raise
if secondary_private_ip_addresses is not None:
try:
connection.assign_private_ip_addresses(network_interface_id=eni.id, private_ip_addresses=secondary_private_ip_addresses)
except BotoServerError:
eni.delete()
raise
changed = True
except BotoServerError as e:
module.fail_json(msg=e.message)
module.exit_json(changed=changed, interface=get_eni_info(eni))
def modify_eni(connection, vpc_id, module, eni):
instance_id = module.params.get("instance_id")
attached = module.params.get("attached")
do_detach = module.params.get('state') == 'detached'
device_index = module.params.get("device_index")
description = module.params.get('description')
security_groups = module.params.get('security_groups')
force_detach = module.params.get("force_detach")
source_dest_check = module.params.get("source_dest_check")
delete_on_termination = module.params.get("delete_on_termination")
secondary_private_ip_addresses = module.params.get("secondary_private_ip_addresses")
purge_secondary_private_ip_addresses = module.params.get("purge_secondary_private_ip_addresses")
secondary_private_ip_address_count = module.params.get("secondary_private_ip_address_count")
allow_reassignment = module.params.get("allow_reassignment")
changed = False
try:
if description is not None:
if eni.description != description:
connection.modify_network_interface_attribute(eni.id, "description", description)
changed = True
if len(security_groups) > 0:
groups = get_ec2_security_group_ids_from_names(security_groups, connection, vpc_id=vpc_id, boto3=False)
if sorted(get_sec_group_list(eni.groups)) != sorted(groups):
connection.modify_network_interface_attribute(eni.id, "groupSet", groups)
changed = True
if source_dest_check is not None:
if eni.source_dest_check != source_dest_check:
connection.modify_network_interface_attribute(eni.id, "sourceDestCheck", source_dest_check)
changed = True
if delete_on_termination is not None and eni.attachment is not None:
if eni.attachment.delete_on_termination is not delete_on_termination:
connection.modify_network_interface_attribute(eni.id, "deleteOnTermination", delete_on_termination, eni.attachment.id)
changed = True
current_secondary_addresses = [i.private_ip_address for i in eni.private_ip_addresses if not i.primary]
if secondary_private_ip_addresses is not None:
secondary_addresses_to_remove = list(set(current_secondary_addresses) - set(secondary_private_ip_addresses))
if secondary_addresses_to_remove and purge_secondary_private_ip_addresses:
connection.unassign_private_ip_addresses(network_interface_id=eni.id,
private_ip_addresses=list(set(current_secondary_addresses) -
set(secondary_private_ip_addresses)),
dry_run=False)
changed = True
secondary_addresses_to_add = list(set(secondary_private_ip_addresses) - set(current_secondary_addresses))
if secondary_addresses_to_add:
connection.assign_private_ip_addresses(network_interface_id=eni.id,
private_ip_addresses=secondary_addresses_to_add,
secondary_private_ip_address_count=None,
allow_reassignment=allow_reassignment, dry_run=False)
changed = True
if secondary_private_ip_address_count is not None:
current_secondary_address_count = len(current_secondary_addresses)
if secondary_private_ip_address_count > current_secondary_address_count:
connection.assign_private_ip_addresses(network_interface_id=eni.id,
private_ip_addresses=None,
secondary_private_ip_address_count=(secondary_private_ip_address_count -
current_secondary_address_count),
allow_reassignment=allow_reassignment, dry_run=False)
changed = True
elif secondary_private_ip_address_count < current_secondary_address_count:
# How many of these addresses do we want to remove
secondary_addresses_to_remove_count = current_secondary_address_count - secondary_private_ip_address_count
connection.unassign_private_ip_addresses(network_interface_id=eni.id,
private_ip_addresses=current_secondary_addresses[:secondary_addresses_to_remove_count],
dry_run=False)
if attached is True:
if eni.attachment and eni.attachment.instance_id != instance_id:
detach_eni(eni, module)
eni.attach(instance_id, device_index)
wait_for_eni(eni, "attached")
changed = True
if eni.attachment is None:
eni.attach(instance_id, device_index)
wait_for_eni(eni, "attached")
changed = True
elif attached is False:
detach_eni(eni, module)
except BotoServerError as e:
module.fail_json(msg=e.message)
eni.update()
module.exit_json(changed=changed, interface=get_eni_info(eni))
def delete_eni(connection, module):
eni_id = module.params.get("eni_id")
force_detach = module.params.get("force_detach")
try:
eni_result_set = connection.get_all_network_interfaces(eni_id)
eni = eni_result_set[0]
if force_detach is True:
if eni.attachment is not None:
eni.detach(force_detach)
# Wait to allow detachment to finish
wait_for_eni(eni, "detached")
eni.update()
eni.delete()
changed = True
else:
eni.delete()
changed = True
module.exit_json(changed=changed)
except BotoServerError as e:
regex = re.compile('The networkInterface ID \'.*\' does not exist')
if regex.search(e.message) is not None:
module.exit_json(changed=False)
else:
module.fail_json(msg=e.message)
def detach_eni(eni, module):
attached = module.params.get("attached")
force_detach = module.params.get("force_detach")
if eni.attachment is not None:
eni.detach(force_detach)
wait_for_eni(eni, "detached")
if attached:
return
eni.update()
module.exit_json(changed=True, interface=get_eni_info(eni))
else:
module.exit_json(changed=False, interface=get_eni_info(eni))
def uniquely_find_eni(connection, module):
eni_id = module.params.get("eni_id")
private_ip_address = module.params.get('private_ip_address')
subnet_id = module.params.get('subnet_id')
instance_id = module.params.get('instance_id')
device_index = module.params.get('device_index')
attached = module.params.get('attached')
try:
filters = {}
# proceed only if we're univocally specifying an ENI
if eni_id is None and private_ip_address is None and (instance_id is None and device_index is None):
return None
if private_ip_address and subnet_id:
filters['private-ip-address'] = private_ip_address
filters['subnet-id'] = subnet_id
if not attached and instance_id and device_index:
filters['attachment.instance-id'] = instance_id
filters['attachment.device-index'] = device_index
if eni_id is None and len(filters) == 0:
return None
eni_result = connection.get_all_network_interfaces(eni_id, filters=filters)
if len(eni_result) == 1:
return eni_result[0]
else:
return None
except BotoServerError as e:
module.fail_json(msg=e.message)
return None
def get_sec_group_list(groups):
# Build list of remote security groups
remote_security_groups = []
for group in groups:
remote_security_groups.append(group.id.encode())
return remote_security_groups
def _get_vpc_id(connection, module, subnet_id):
try:
return connection.get_all_subnets(subnet_ids=[subnet_id])[0].vpc_id
except BotoServerError as e:
module.fail_json(msg=e.message)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
eni_id=dict(default=None, type='str'),
instance_id=dict(default=None, type='str'),
private_ip_address=dict(type='str'),
subnet_id=dict(type='str'),
description=dict(type='str'),
security_groups=dict(default=[], type='list'),
device_index=dict(default=0, type='int'),
state=dict(default='present', choices=['present', 'absent']),
force_detach=dict(default='no', type='bool'),
source_dest_check=dict(default=None, type='bool'),
delete_on_termination=dict(default=None, type='bool'),
secondary_private_ip_addresses=dict(default=None, type='list'),
purge_secondary_private_ip_addresses=dict(default=False, type='bool'),
secondary_private_ip_address_count=dict(default=None, type='int'),
allow_reassignment=dict(default=False, type='bool'),
attached=dict(default=None, type='bool')
)
)
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=[
['secondary_private_ip_addresses', 'secondary_private_ip_address_count']
],
required_if=([
('state', 'absent', ['eni_id']),
('attached', True, ['instance_id']),
('purge_secondary_private_ip_addresses', True, ['secondary_private_ip_addresses'])
])
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
connection = connect_to_aws(boto.ec2, region, **aws_connect_params)
vpc_connection = connect_to_aws(boto.vpc, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
state = module.params.get("state")
if state == 'present':
eni = uniquely_find_eni(connection, module)
if eni is None:
subnet_id = module.params.get("subnet_id")
if subnet_id is None:
module.fail_json(msg="subnet_id is required when creating a new ENI")
vpc_id = _get_vpc_id(vpc_connection, module, subnet_id)
create_eni(connection, vpc_id, module)
else:
vpc_id = eni.vpc_id
modify_eni(connection, vpc_id, module, eni)
elif state == 'absent':
delete_eni(connection, module)
if __name__ == '__main__':
main()
|
gpl-3.0
|
NeCTAR-RC/nova
|
nova/virt/libvirt/storage/dmcrypt.py
|
47
|
3448
|
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_utils import excutils
from nova.i18n import _LE
from nova.virt.libvirt import utils
LOG = logging.getLogger(__name__)
_dmcrypt_suffix = '-dmcrypt'
def volume_name(base):
"""Returns the suffixed dmcrypt volume name.
This is to avoid collisions with similarly named device mapper names for
LVM volumes
"""
return base + _dmcrypt_suffix
def is_encrypted(path):
"""Returns true if the path corresponds to an encrypted disk."""
if path.startswith('/dev/mapper'):
return path.rpartition('/')[2].endswith(_dmcrypt_suffix)
else:
return False
def create_volume(target, device, cipher, key_size, key):
"""Sets up a dmcrypt mapping
:param target: device mapper logical device name
:param device: underlying block device
:param cipher: encryption cipher string digestible by cryptsetup
:param key_size: encryption key size
:param key: encryption key as an array of unsigned bytes
"""
cmd = ('cryptsetup',
'create',
target,
device,
'--cipher=' + cipher,
'--key-size=' + str(key_size),
'--key-file=-')
key = ''.join(map(lambda byte: "%02x" % byte, key))
try:
utils.execute(*cmd, process_input=key, run_as_root=True)
except processutils.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Could not start encryption for disk %(device)s: "
"%(exception)s"), {'device': device, 'exception': e})
def delete_volume(target):
"""Deletes a dmcrypt mapping
:param target: name of the mapped logical device
"""
try:
utils.execute('cryptsetup', 'remove', target, run_as_root=True)
except processutils.ProcessExecutionError as e:
# cryptsetup returns 4 when attempting to destroy a non-existent
# dm-crypt device. It indicates that the device is invalid, which
# means that the device is invalid (i.e., it has already been
# destroyed).
if e.exit_code == 4:
LOG.debug("Ignoring exit code 4, volume already destroyed")
else:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Could not disconnect encrypted volume "
"%(volume)s. If dm-crypt device is still active "
"it will have to be destroyed manually for "
"cleanup to succeed."), {'volume': target})
def list_volumes():
"""Function enumerates encrypted volumes."""
return [dmdev for dmdev in os.listdir('/dev/mapper')
if dmdev.endswith('-dmcrypt')]
|
apache-2.0
|
olapaola/olapaola-android-scripting
|
python-build/python-libs/gdata/src/gdata/analytics/service.py
|
213
|
13293
|
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
# Refactored in 2009 to work for Google Analytics by Sal Uryasev at Juice Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
AccountsService extends the GDataService to streamline Google Analytics
account information operations.
AnalyticsDataService: Provides methods to query google analytics data feeds.
Extends GDataService.
DataQuery: Queries a Google Analytics Data list feed.
AccountQuery: Queries a Google Analytics Account list feed.
"""
__author__ = 'api.suryasev (Sal Uryasev)'
import urllib
import atom
import gdata.service
import gdata.analytics
class AccountsService(gdata.service.GDataService):
"""Client extension for the Google Analytics Account List feed."""
def __init__(self, email="", password=None, source=None,
server='www.google.com/analytics', additional_headers=None,
**kwargs):
"""Creates a client for the Google Analytics service.
Args:
email: string (optional) The user's email address, used for
authentication.
password: string (optional) The user's password.
source: string (optional) The name of the user's application.
server: string (optional) The name of the server to which a connection
will be opened.
**kwargs: The other parameters to pass to gdata.service.GDataService
constructor.
"""
gdata.service.GDataService.__init__(
self, email=email, password=password, service='analytics',
source=source, server=server, additional_headers=additional_headers,
**kwargs)
def QueryAccountListFeed(self, uri):
"""Retrieves an AccountListFeed by retrieving a URI based off the Document
List feed, including any query parameters. An AccountListFeed object
can be used to construct these parameters.
Args:
uri: string The URI of the feed being retrieved possibly with query
parameters.
Returns:
An AccountListFeed object representing the feed returned by the server.
"""
return self.Get(uri, converter=gdata.analytics.AccountListFeedFromString)
def GetAccountListEntry(self, uri):
"""Retrieves a particular AccountListEntry by its unique URI.
Args:
uri: string The unique URI of an entry in an Account List feed.
Returns:
An AccountLisFeed object representing the retrieved entry.
"""
return self.Get(uri, converter=gdata.analytics.AccountListEntryFromString)
def GetAccountList(self, max_results=1000, text_query=None,
params=None, categories=None):
"""Retrieves a feed containing all of a user's accounts and profiles."""
q = gdata.analytics.service.AccountQuery(max_results=max_results,
text_query=text_query,
params=params,
categories=categories);
return self.QueryAccountListFeed(q.ToUri())
class AnalyticsDataService(gdata.service.GDataService):
"""Client extension for the Google Analytics service Data List feed."""
def __init__(self, email=None, password=None, source=None,
server='www.google.com/analytics', additional_headers=None,
**kwargs):
"""Creates a client for the Google Analytics service.
Args:
email: string (optional) The user's email address, used for
authentication.
password: string (optional) The user's password.
source: string (optional) The name of the user's application.
server: string (optional) The name of the server to which a connection
will be opened. Default value: 'docs.google.com'.
**kwargs: The other parameters to pass to gdata.service.GDataService
constructor.
"""
gdata.service.GDataService.__init__(self,
email=email, password=password, service='analytics', source=source,
server=server, additional_headers=additional_headers, **kwargs)
def GetData(self, ids='', dimensions='', metrics='',
sort='', filters='', start_date='',
end_date='', start_index='',
max_results=''):
"""Retrieves a feed containing a user's data
ids: comma-separated string of analytics accounts.
dimensions: comma-separated string of dimensions.
metrics: comma-separated string of metrics.
sort: comma-separated string of dimensions and metrics for sorting.
This may be previxed with a minus to sort in reverse order.
(e.g. '-ga:keyword')
If ommited, the first dimension passed in will be used.
filters: comma-separated string of filter parameters.
(e.g. 'ga:keyword==google')
start_date: start date for data pull.
end_date: end date for data pull.
start_index: used in combination with max_results to pull more than 1000
entries. This defaults to 1.
max_results: maximum results that the pull will return. This defaults
to, and maxes out at 1000.
"""
q = gdata.analytics.service.DataQuery(ids=ids,
dimensions=dimensions,
metrics=metrics,
filters=filters,
sort=sort,
start_date=start_date,
end_date=end_date,
start_index=start_index,
max_results=max_results);
return self.AnalyticsDataFeed(q.ToUri())
def AnalyticsDataFeed(self, uri):
"""Retrieves an AnalyticsListFeed by retrieving a URI based off the
Document List feed, including any query parameters. An
AnalyticsListFeed object can be used to construct these parameters.
Args:
uri: string The URI of the feed being retrieved possibly with query
parameters.
Returns:
An AnalyticsListFeed object representing the feed returned by the
server.
"""
return self.Get(uri,
converter=gdata.analytics.AnalyticsDataFeedFromString)
"""
Account Fetching
"""
def QueryAccountListFeed(self, uri):
"""Retrieves an Account ListFeed by retrieving a URI based off the Account
List feed, including any query parameters. A AccountQuery object can
be used to construct these parameters.
Args:
uri: string The URI of the feed being retrieved possibly with query
parameters.
Returns:
An AccountListFeed object representing the feed returned by the server.
"""
return self.Get(uri, converter=gdata.analytics.AccountListFeedFromString)
def GetAccountListEntry(self, uri):
"""Retrieves a particular AccountListEntry by its unique URI.
Args:
uri: string The unique URI of an entry in an Account List feed.
Returns:
An AccountListEntry object representing the retrieved entry.
"""
return self.Get(uri, converter=gdata.analytics.AccountListEntryFromString)
def GetAccountList(self, username="default", max_results=1000,
start_index=1):
"""Retrieves a feed containing all of a user's accounts and profiles.
The username parameter is soon to be deprecated, with 'default'
becoming the only allowed parameter.
"""
if not username:
raise Exception("username is a required parameter")
q = gdata.analytics.service.AccountQuery(username=username,
max_results=max_results,
start_index=start_index);
return self.QueryAccountListFeed(q.ToUri())
class DataQuery(gdata.service.Query):
"""Object used to construct a URI to a data feed"""
def __init__(self, feed='/feeds/data', text_query=None,
params=None, categories=None, ids="",
dimensions="", metrics="", sort="", filters="",
start_date="", end_date="", start_index="",
max_results=""):
"""Constructor for Analytics List Query
Args:
feed: string (optional) The path for the feed. (e.g. '/feeds/data')
text_query: string (optional) The contents of the q query parameter.
This string is URL escaped upon conversion to a URI.
params: dict (optional) Parameter value string pairs which become URL
params when translated to a URI. These parameters are added to
the query's items.
categories: list (optional) List of category strings which should be
included as query categories. See gdata.service.Query for
additional documentation.
ids: comma-separated string of analytics accounts.
dimensions: comma-separated string of dimensions.
metrics: comma-separated string of metrics.
sort: comma-separated string of dimensions and metrics.
This may be previxed with a minus to sort in reverse order
(e.g. '-ga:keyword').
If ommited, the first dimension passed in will be used.
filters: comma-separated string of filter parameters
(e.g. 'ga:keyword==google').
start_date: start date for data pull.
end_date: end date for data pull.
start_index: used in combination with max_results to pull more than 1000
entries. This defaults to 1.
max_results: maximum results that the pull will return. This defaults
to, and maxes out at 1000.
Yields:
A DocumentQuery object used to construct a URI based on the Document
List feed.
"""
self.elements = {'ids': ids,
'dimensions': dimensions,
'metrics': metrics,
'sort': sort,
'filters': filters,
'start-date': start_date,
'end-date': end_date,
'start-index': start_index,
'max-results': max_results}
gdata.service.Query.__init__(self, feed, text_query, params, categories)
def ToUri(self):
"""Generates a URI from the query parameters set in the object.
Returns:
A string containing the URI used to retrieve entries from the Analytics
List feed.
"""
old_feed = self.feed
self.feed = '/'.join([old_feed]) + '?' + \
urllib.urlencode(dict([(key, value) for key, value in \
self.elements.iteritems() if value]))
new_feed = gdata.service.Query.ToUri(self)
self.feed = old_feed
return new_feed
class AccountQuery(gdata.service.Query):
"""Object used to construct a URI to query the Google Account List feed"""
def __init__(self, feed='/feeds/accounts', start_index=1,
max_results=1000, username='default', text_query=None,
params=None, categories=None):
"""Constructor for Account List Query
Args:
feed: string (optional) The path for the feed. (e.g. '/feeds/documents')
visibility: string (optional) The visibility chosen for the current
feed.
projection: string (optional) The projection chosen for the current
feed.
text_query: string (optional) The contents of the q query parameter.
This string is URL escaped upon conversion to a URI.
params: dict (optional) Parameter value string pairs which become URL
params when translated to a URI. These parameters are added to
the query's items.
categories: list (optional) List of category strings which should be
included as query categories. See gdata.service.Query for
additional documentation.
username: string (deprecated) This value should now always be passed as
'default'.
Yields:
A DocumentQuery object used to construct a URI based on the Document
List feed.
"""
self.max_results = max_results
self.start_index = start_index
self.username = username
gdata.service.Query.__init__(self, feed, text_query, params, categories)
def ToUri(self):
"""Generates a URI from the query parameters set in the object.
Returns:
A string containing the URI used to retrieve entries from the Account
List feed.
"""
old_feed = self.feed
self.feed = '/'.join([old_feed, self.username]) + '?' + \
'&'.join(['max-results=' + str(self.max_results),
'start-index=' + str(self.start_index)])
new_feed = self.feed
self.feed = old_feed
return new_feed
|
apache-2.0
|
keyurpatel076/MissionPlannerGit
|
Lib/unittest/case.py
|
41
|
42078
|
"""Test case implementation"""
import collections
import sys
import functools
import difflib
import pprint
import re
import warnings
from . import result
from .util import (
strclass, safe_repr, unorderable_list_difference,
_count_diff_all_purpose, _count_diff_hashable
)
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestResult.skip() or one of the skipping decorators
instead of raising this directly.
"""
pass
class _ExpectedFailure(Exception):
"""
Raise this when a test is expected to fail.
This is an implementation detail.
"""
def __init__(self, exc_info):
super(_ExpectedFailure, self).__init__()
self.exc_info = exc_info
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
pass
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not (isinstance(test_item, type) and issubclass(test_item, TestCase)):
@functools.wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
return wrapper
class _AssertRaisesContext(object):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __init__(self, expected, test_case, expected_regexp=None):
self.expected = expected
self.failureException = test_case.failureException
self.expected_regexp = expected_regexp
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise self.failureException(
"{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True
class TestCase(object):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
"""
# This attribute determines which exception will be raised when
# the instance's assertion methods fail; test methods raising this
# exception will be deemed to have 'failed' rather than 'errored'
failureException = AssertionError
# This attribute determines whether long messages (including repr of
# objects used in assert methods) will be printed on failure in *addition*
# to any explicit message passed.
longMessage = False
# This attribute sets the maximum length of a diff in failure messages
# by assert methods using difflib. It is looked up as an instance attribute
# so can be configured by individual tests if required.
maxDiff = 80*8
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._resultForDoCleanups = None
try:
testMethod = getattr(self, methodName)
except AttributeError:
raise ValueError("no such test method in %s: %s" %
(self.__class__, methodName))
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = {}
self.addTypeEqualityFunc(dict, self.assertDictEqual)
self.addTypeEqualityFunc(list, self.assertListEqual)
self.addTypeEqualityFunc(tuple, self.assertTupleEqual)
self.addTypeEqualityFunc(set, self.assertSetEqual)
self.addTypeEqualityFunc(frozenset, self.assertSetEqual)
self.addTypeEqualityFunc(unicode, self.assertMultiLineEqual)
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
pass
def tearDown(self):
"Hook method for deconstructing the test fixture after testing it."
pass
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
@classmethod
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
def countTestCases(self):
return 1
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(self, reason)
else:
warnings.warn("TestResult has no addSkip method, skips not reported",
RuntimeWarning, 2)
result.addSuccess(self)
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
self._resultForDoCleanups = result
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, skip_why)
finally:
result.stopTest(self)
return
try:
success = False
try:
self.setUp()
except SkipTest as e:
self._addSkip(result, str(e))
except KeyboardInterrupt:
raise
except:
result.addError(self, sys.exc_info())
else:
try:
testMethod()
except KeyboardInterrupt:
raise
except self.failureException:
result.addFailure(self, sys.exc_info())
except _ExpectedFailure as e:
addExpectedFailure = getattr(result, 'addExpectedFailure', None)
if addExpectedFailure is not None:
addExpectedFailure(self, e.exc_info)
else:
warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
RuntimeWarning)
result.addSuccess(self)
except _UnexpectedSuccess:
addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
if addUnexpectedSuccess is not None:
addUnexpectedSuccess(self)
else:
warnings.warn("TestResult has no addUnexpectedSuccess method, reporting as failures",
RuntimeWarning)
result.addFailure(self, sys.exc_info())
except SkipTest as e:
self._addSkip(result, str(e))
except:
result.addError(self, sys.exc_info())
else:
success = True
try:
self.tearDown()
except KeyboardInterrupt:
raise
except:
result.addError(self, sys.exc_info())
success = False
cleanUpSuccess = self.doCleanups()
success = success and cleanUpSuccess
if success:
result.addSuccess(self)
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
result = self._resultForDoCleanups
ok = True
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
try:
function(*args, **kwargs)
except KeyboardInterrupt:
raise
except:
ok = False
result.addError(self, sys.exc_info())
return ok
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"""Check that the expression is false."""
if expr:
msg = self._formatMessage(msg, "%s is not false" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Check that the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not true" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
# don't switch to '{}' formatting in Python 2.X
# it changes the way unicode input is handled
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_repr(standardMsg), safe_repr(msg))
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
thrown, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with callableObj omitted or None, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
context = _AssertRaisesContext(excClass, self)
if callableObj is None:
return context
with context:
callableObj(*args, **kwargs)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % (safe_repr(first), safe_repr(second))
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '=='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
# Synonyms for assertion methods
# The plurals are undocumented. Keep them that way to discourage use.
# Do not add more. Do not remove.
# Going through a deprecation cycle on these would annoy many people.
assertEquals = assertEqual
assertNotEquals = assertNotEqual
assertAlmostEquals = assertAlmostEqual
assertNotAlmostEquals = assertNotAlmostEqual
assert_ = assertTrue
# These fail* assertion method names are pending deprecation and will
# be a DeprecationWarning in 3.2; http://bugs.python.org/issue2578
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
'Please use {0} instead.'.format(original_func.__name__),
PendingDeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
failUnlessEqual = _deprecate(assertEqual)
failIfEqual = _deprecate(assertNotEqual)
failUnlessAlmostEqual = _deprecate(assertAlmostEqual)
failIfAlmostEqual = _deprecate(assertNotAlmostEqual)
failUnless = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
def assertSequenceEqual(self, seq1, seq2, msg=None, seq_type=None):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
seq1_repr = safe_repr(seq1)
seq2_repr = safe_repr(seq2)
if len(seq1_repr) > 30:
seq1_repr = seq1_repr[:30] + '...'
if len(seq2_repr) > 30:
seq2_repr = seq2_repr[:30] + '...'
elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
differing = '%ss differ: %s != %s\n' % elements
for i in xrange(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
(i, item1, item2))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, seq1[len2]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, seq2[len1]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support different types of sets, and
is optimized for sets specifically (parameters must support a
difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError, e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError, e:
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError, e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError, e:
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1),
safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assertIsInstance(d1, dict, 'First argument is not a dictionary')
self.assertIsInstance(d2, dict, 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % (safe_repr(d1, True), safe_repr(d2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, expected, actual, msg=None):
"""Checks whether actual is a superset of expected."""
missing = []
mismatched = []
for key, value in expected.iteritems():
if key not in actual:
missing.append(key)
elif value != actual[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(actual[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertItemsEqual(self, expected_seq, actual_seq, msg=None):
"""An unordered sequence specific comparison. It asserts that
actual_seq and expected_seq have the same element counts.
Equivalent to::
self.assertEqual(Counter(iter(actual_seq)),
Counter(iter(expected_seq)))
Asserts that each element has the same count in both sequences.
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
first_seq, second_seq = list(actual_seq), list(expected_seq)
with warnings.catch_warnings():
if sys.py3kwarning:
# Silence Py3k warning raised during the sorting
for _msg in ["(code|dict|type) inequality comparisons",
"builtin_function_or_method order comparisons",
"comparing unequal types"]:
warnings.filterwarnings("ignore", _msg, DeprecationWarning)
try:
first = collections.Counter(first_seq)
second = collections.Counter(second_seq)
except TypeError:
# Handle case with unhashable elements
differences = _count_diff_all_purpose(first_seq, second_seq)
else:
if first == second:
return
differences = _count_diff_hashable(first_seq, second_seq)
if differences:
standardMsg = 'Element counts were not equal:\n'
lines = ['First has %d, Second has %d: %r' % diff for diff in differences]
diffMsg = '\n'.join(lines)
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assertIsInstance(first, basestring,
'First argument is not a string')
self.assertIsInstance(second, basestring,
'Second argument is not a string')
if first != second:
firstlines = first.splitlines(True)
secondlines = second.splitlines(True)
if len(firstlines) == 1 and first.strip('\r\n') == first:
firstlines = [first + '\n']
secondlines = [second + '\n']
standardMsg = '%s != %s' % (safe_repr(first, True),
safe_repr(second, True))
diff = '\n' + ''.join(difflib.ndiff(firstlines, secondlines))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regexp.
Args:
expected_exception: Exception class expected to be raised.
expected_regexp: Regexp (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
args: Extra args.
kwargs: Extra kwargs.
"""
context = _AssertRaisesContext(expected_exception, self, expected_regexp)
if callable_obj is None:
return context
with context:
callable_obj(*args, **kwargs)
def assertRegexpMatches(self, text, expected_regexp, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(text):
msg = msg or "Regexp didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regexp.pattern, text)
raise self.failureException(msg)
def assertNotRegexpMatches(self, text, unexpected_regexp, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regexp, basestring):
unexpected_regexp = re.compile(unexpected_regexp)
match = unexpected_regexp.search(text)
if match:
msg = msg or "Regexp matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regexp.pattern,
text)
raise self.failureException(msg)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s tec=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
|
gpl-3.0
|
tensorflow/lucid
|
tests/misc/io/test_saving.py
|
1
|
6630
|
import time
import pytest
import numpy as np
from lucid.misc.io.saving import save, CaptureSaveContext, batch_save
from lucid.misc.io.loading import load
from lucid.misc.io.scoping import io_scope, current_io_scopes
from concurrent.futures import ThreadPoolExecutor
import os.path
import io
import tensorflow as tf
dictionary = {"key": "value"}
dictionary_json = """{
"key": "value"
}"""
array1 = np.eye(10, 10)
array2 = np.dstack([np.eye(10, 10, k=i - 1) for i in range(3)])
def _remove(path):
try:
os.remove(path)
except OSError:
pass
def test_save_json():
path = "./tests/fixtures/generated_outputs/dictionary.json"
_remove(path)
save(dictionary, path)
assert os.path.isfile(path)
content = io.open(path, "rt").read()
assert content == dictionary_json
def test_save_npy():
path = "./tests/fixtures/generated_outputs/array.npy"
_remove(path)
save(array1, path)
assert os.path.isfile(path)
re_read_array = np.load(path)
assert np.array_equal(array1, re_read_array)
def test_save_npz_array():
path = "./tests/fixtures/generated_outputs/arrays.npz"
_remove(path)
save([array1, array2], path)
assert os.path.isfile(path)
re_read_arrays = np.load(path)
assert all(arr in re_read_arrays for arr in ("arr_0", "arr_1"))
assert np.array_equal(array1, re_read_arrays["arr_0"])
assert np.array_equal(array2, re_read_arrays["arr_1"])
def test_save_npz_dict():
path = "./tests/fixtures/generated_outputs/arrays.npz"
_remove(path)
arrays = {"array1": array1, "array2": array2}
save(arrays, path)
assert os.path.isfile(path)
re_read_arrays = np.load(path)
assert all(arr in re_read_arrays for arr in list(arrays))
assert np.array_equal(arrays["array1"], re_read_arrays["array1"])
def test_save_image_png():
path = "./tests/fixtures/generated_outputs/rgbeye.png"
_remove(path)
save(array2, path)
assert os.path.isfile(path)
def test_save_image_jpg():
path = "./tests/fixtures/generated_outputs/rgbeye.jpg"
_remove(path)
save(array2, path)
assert os.path.isfile(path)
def test_save_array_txt():
path = "./tests/fixtures/generated_outputs/multiline.txt"
_remove(path)
stringarray = ["Line {:d}".format(i) for i in range(10)]
save(stringarray, path)
assert os.path.isfile(path)
def test_save_txt():
path = "./tests/fixtures/generated_outputs/multiline.txt"
_remove(path)
string = "".join(["Line {:d}\n".format(i) for i in range(10)])
save(string, path)
assert os.path.isfile(path)
def test_save_named_handle():
path = "./tests/fixtures/generated_outputs/rgbeye.jpg"
_remove(path)
with io.open(path, "wb") as handle:
save(array2, handle)
assert os.path.isfile(path)
def test_save_compressed_npy():
uncompressed_path = "./tests/fixtures/generated_outputs/array.npy"
_remove(uncompressed_path)
save(array2, uncompressed_path)
compressed_path = "./tests/fixtures/generated_outputs/array.npy.xz"
_remove(compressed_path)
save(array2, compressed_path)
assert os.path.isfile(uncompressed_path)
assert os.path.isfile(compressed_path)
re_read_array = load(compressed_path)
assert np.array_equal(array2, re_read_array)
uncompressed_size = os.path.getsize(uncompressed_path)
compressed_size = os.path.getsize(compressed_path)
assert compressed_size < uncompressed_size
def test_save_load_pickle():
path = "./tests/fixtures/generated_outputs/some_data.pickle"
data = {
'test': [1, 2, 3, "some string"],
'numpy_values': array2
}
_remove(path)
with io.open(path, "wb") as handle:
with pytest.raises(ValueError):
save(data, handle)
save(data, handle, allow_unsafe_formats=True)
assert os.path.isfile(path)
with pytest.raises(ValueError):
loaded_data = load(path)
loaded_data = load(path, allow_unsafe_formats=True)
assert loaded_data['test'] == data['test']
assert np.array_equal(loaded_data['numpy_values'], data['numpy_values'])
def test_unknown_extension():
with pytest.raises(ValueError):
save({}, "test.unknown")
def test_unknown_compressor():
with pytest.raises(ValueError):
save(array2, "test.npy.gz") # .gz is not currently supported, only xy
def test_save_protobuf():
path = "./tests/fixtures/generated_outputs/graphdef.pb"
_remove(path)
with tf.Graph().as_default():
a = tf.Variable(42)
graphdef = a.graph.as_graph_def()
save(graphdef, path)
assert os.path.isfile(path)
def test_write_scope_compatibility():
path = "./tests/fixtures/generated_outputs/write_scope_compatibility.txt"
_remove(path)
with io_scope("./tests/fixtures/generated_outputs"):
save("test content", 'write_scope_compatibility.txt')
assert os.path.isfile(path)
def test_capturing_saves():
path = "./tests/fixtures/generated_outputs/test_capturing_saves.txt"
_remove(path)
context = CaptureSaveContext()
with context, io_scope("./tests/fixtures/generated_outputs"):
save("test", "test_capturing_saves.txt")
captured = context.captured_saves
assert len(captured) == 1
assert "type" in captured[0]
assert captured[0]["type"] == "txt"
def test_threadlocal_io_scopes():
""" This tests that scopes are thread local and they don't clobber each other when different threads are competing"""
def _return_io_scope(io_scope_path):
with io_scope(io_scope_path):
time.sleep(np.random.uniform(0.05, 0.1))
return current_io_scopes()[-1]
n_tasks = 16
n_workers = 8
with ThreadPoolExecutor(max_workers=n_workers) as executor:
futures = {executor.submit(_return_io_scope, f'gs://test-{i}'): f'gs://test-{i}' for i in range(n_tasks)}
results = [f.result() for f in futures]
assert results == list(futures.values())
def test_batch_saves():
save_ops = [(str(i), f"write_batch_{i}.txt") for i in range(5)]
[_remove(f"./tests/fixtures/generated_outputs/write_batch_{i}.txt") for i in range(5)]
context = CaptureSaveContext()
with context, io_scope("./tests/fixtures/generated_outputs"):
results = batch_save(save_ops)
assert len(results) == 5
assert len(context.captured_saves) == 5
assert context.captured_saves[0]['type'] == 'txt'
print(context.captured_saves)
assert 'write_batch_' in context.captured_saves[0]['url']
assert all([os.path.isfile(f"./tests/fixtures/generated_outputs/write_batch_{i}.txt") for i in range(5)])
|
apache-2.0
|
Snake4100/Mon-Site
|
vendor/doctrine/orm/docs/en/conf.py
|
2448
|
6497
|
# -*- coding: utf-8 -*-
#
# Doctrine 2 ORM documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 3 18:10:24 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('_exts'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['configurationblock']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Doctrine 2 ORM'
copyright = u'2010-12, Doctrine Project Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2'
# The full version, including alpha/beta/rc tags.
release = '2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'doctrine'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Doctrine2ORMdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Doctrine2ORM.tex', u'Doctrine 2 ORM Documentation',
u'Doctrine Project Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
primary_domain = "dcorm"
def linkcode_resolve(domain, info):
if domain == 'dcorm':
return 'http://'
return None
|
mit
|
collinjackson/mojo
|
nacl_bindings_generator/interface_dsl.py
|
5
|
4095
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Interface(object):
def __init__(self):
self.functions = []
def Func(self, name, return_type):
f = Function(self, len(self.functions), name, return_type)
self.functions.append(f)
return f
def Finalize(self):
for f in self.functions:
f.Finalize()
class Function(object):
def __init__(self, parent, uid, name, return_type):
self.parent = parent
self.uid = uid
self.name = name
self.return_type = return_type
self.params = []
self.param_by_name = {}
self.result_param = None
self.broken_in_nacl = False
def Param(self, name, param_type=None):
p = Param(self, len(self.params), name, param_type)
self.params.append(p)
self.param_by_name[name] = p
return p
def ParamList(self):
return [param.param_type + ' ' + param.name for param in self.params]
def ParamDecl(self):
if self.params:
return ', '.join(self.ParamList())
else:
return 'void'
def IsBrokenInNaCl(self):
self.broken_in_nacl = True
def Finalize(self):
self.result_param = Param(self, len(self.params), 'result')
self.result_param.Out(self.return_type).AlwaysWritten()
class Param(object):
def __init__(self, parent, uid, name, param_type=None):
self.parent = parent
self.uid = uid
self.name = name
self.base_type = param_type
self.param_type = param_type
self.size = None
self.is_input = False
self.is_output = False
self.is_array = False
self.is_struct = False
self.is_extensible = False
self.is_optional = False
self.is_always_written = False
self.is_pointer = False
def GetSizeParam(self):
assert self.size
return self.parent.param_by_name[self.size]
def In(self, ty):
self.base_type = ty
self.param_type = ty
self.is_input = True
self.is_pointer = ty.endswith('*')
return self
def InArray(self, ty, size):
self.base_type = ty
self.param_type = 'const ' + ty + '*'
self.size = size
self.is_input = True
self.is_array = True
return self
# An "extensible" struct is one where we don't know the exact size - rather
# the first 4 bytes of the struct declare the length of the struct. This
# allows forwards and backwards compatibility with additive changes to the
# structure definition.
def InExtensibleStruct(self, ty):
self.base_type = ty
self.param_type = 'const struct ' + ty + '*'
self.is_input = True
self.is_struct = True
self.is_extensible = True
return self
def InOut(self, ty):
self.base_type = ty
self.param_type = ty + '*'
self.is_input = True
self.is_output = True
self.is_pointer = ty.endswith('*')
return self
def Out(self, ty):
self.base_type = ty
self.param_type = ty + '*'
self.is_output = True
self.is_pointer = ty.endswith('*')
return self
def OutArray(self, ty, size):
self.base_type = ty
self.param_type = ty + '*'
self.size = size
self.is_array = True
self.is_output = True
return self
# The size of the struct is fixed by the API, it cannot be extended.
def OutFixedStruct(self, ty):
self.base_type = ty
self.param_type = 'struct ' + ty + '*'
self.is_output = True
self.is_struct = True
self.is_extensible = False
return self
def OutFixedStructArray(self, ty, size):
self.base_type = ty
self.param_type = 'struct ' + ty + '*'
self.size = size
self.is_array = True
self.is_output = True
return self
# Declares that it is valid to pass a null pointer.
def Optional(self):
assert not self.IsPassedByValue()
self.is_optional = True
return self
def AlwaysWritten(self):
assert self.is_output, self
self.is_always_written = True
return self
def IsScalar(self):
return not self.is_array and not self.is_struct
def IsPassedByValue(self):
return not self.is_output and self.IsScalar()
|
bsd-3-clause
|
idea4bsd/idea4bsd
|
plugins/hg4idea/testData/bin/mercurial/hgweb/hgweb_mod.py
|
91
|
15218
|
# hgweb/hgweb_mod.py - Web interface for a repository.
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge <[email protected]>
# Copyright 2005-2007 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import os
from mercurial import ui, hg, hook, error, encoding, templater, util, repoview
from mercurial.templatefilters import websub
from mercurial.i18n import _
from common import get_stat, ErrorResponse, permhooks, caching
from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST
from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR
from request import wsgirequest
import webcommands, protocol, webutil, re
perms = {
'changegroup': 'pull',
'changegroupsubset': 'pull',
'getbundle': 'pull',
'stream_out': 'pull',
'listkeys': 'pull',
'unbundle': 'push',
'pushkey': 'push',
}
def makebreadcrumb(url, prefix=''):
'''Return a 'URL breadcrumb' list
A 'URL breadcrumb' is a list of URL-name pairs,
corresponding to each of the path items on a URL.
This can be used to create path navigation entries.
'''
if url.endswith('/'):
url = url[:-1]
if prefix:
url = '/' + prefix + url
relpath = url
if relpath.startswith('/'):
relpath = relpath[1:]
breadcrumb = []
urlel = url
pathitems = [''] + relpath.split('/')
for pathel in reversed(pathitems):
if not pathel or not urlel:
break
breadcrumb.append({'url': urlel, 'name': pathel})
urlel = os.path.dirname(urlel)
return reversed(breadcrumb)
class hgweb(object):
def __init__(self, repo, name=None, baseui=None):
if isinstance(repo, str):
if baseui:
u = baseui.copy()
else:
u = ui.ui()
self.repo = hg.repository(u, repo)
else:
self.repo = repo
self.repo = self._getview(self.repo)
self.repo.ui.setconfig('ui', 'report_untrusted', 'off')
self.repo.baseui.setconfig('ui', 'report_untrusted', 'off')
self.repo.ui.setconfig('ui', 'nontty', 'true')
self.repo.baseui.setconfig('ui', 'nontty', 'true')
hook.redirect(True)
self.mtime = -1
self.size = -1
self.reponame = name
self.archives = 'zip', 'gz', 'bz2'
self.stripecount = 1
# a repo owner may set web.templates in .hg/hgrc to get any file
# readable by the user running the CGI script
self.templatepath = self.config('web', 'templates')
self.websubtable = self.loadwebsub()
# The CGI scripts are often run by a user different from the repo owner.
# Trust the settings from the .hg/hgrc files by default.
def config(self, section, name, default=None, untrusted=True):
return self.repo.ui.config(section, name, default,
untrusted=untrusted)
def configbool(self, section, name, default=False, untrusted=True):
return self.repo.ui.configbool(section, name, default,
untrusted=untrusted)
def configlist(self, section, name, default=None, untrusted=True):
return self.repo.ui.configlist(section, name, default,
untrusted=untrusted)
def _getview(self, repo):
viewconfig = self.config('web', 'view', 'served')
if viewconfig == 'all':
return repo.unfiltered()
elif viewconfig in repoview.filtertable:
return repo.filtered(viewconfig)
else:
return repo.filtered('served')
def refresh(self, request=None):
st = get_stat(self.repo.spath)
# compare changelog size in addition to mtime to catch
# rollbacks made less than a second ago
if st.st_mtime != self.mtime or st.st_size != self.size:
self.mtime = st.st_mtime
self.size = st.st_size
r = hg.repository(self.repo.baseui, self.repo.root)
self.repo = self._getview(r)
self.maxchanges = int(self.config("web", "maxchanges", 10))
self.stripecount = int(self.config("web", "stripes", 1))
self.maxshortchanges = int(self.config("web", "maxshortchanges",
60))
self.maxfiles = int(self.config("web", "maxfiles", 10))
self.allowpull = self.configbool("web", "allowpull", True)
encoding.encoding = self.config("web", "encoding",
encoding.encoding)
if request:
self.repo.ui.environ = request.env
def run(self):
if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
raise RuntimeError("This function is only intended to be "
"called while running as a CGI script.")
import mercurial.hgweb.wsgicgi as wsgicgi
wsgicgi.launch(self)
def __call__(self, env, respond):
req = wsgirequest(env, respond)
return self.run_wsgi(req)
def run_wsgi(self, req):
self.refresh(req)
# work with CGI variables to create coherent structure
# use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
req.url = req.env['SCRIPT_NAME']
if not req.url.endswith('/'):
req.url += '/'
if 'REPO_NAME' in req.env:
req.url += req.env['REPO_NAME'] + '/'
if 'PATH_INFO' in req.env:
parts = req.env['PATH_INFO'].strip('/').split('/')
repo_parts = req.env.get('REPO_NAME', '').split('/')
if parts[:len(repo_parts)] == repo_parts:
parts = parts[len(repo_parts):]
query = '/'.join(parts)
else:
query = req.env['QUERY_STRING'].split('&', 1)[0]
query = query.split(';', 1)[0]
# process this if it's a protocol request
# protocol bits don't need to create any URLs
# and the clients always use the old URL structure
cmd = req.form.get('cmd', [''])[0]
if protocol.iscmd(cmd):
try:
if query:
raise ErrorResponse(HTTP_NOT_FOUND)
if cmd in perms:
self.check_perm(req, perms[cmd])
return protocol.call(self.repo, req, cmd)
except ErrorResponse, inst:
# A client that sends unbundle without 100-continue will
# break if we respond early.
if (cmd == 'unbundle' and
(req.env.get('HTTP_EXPECT',
'').lower() != '100-continue') or
req.env.get('X-HgHttp2', '')):
req.drain()
req.respond(inst, protocol.HGTYPE,
body='0\n%s\n' % inst.message)
return ''
# translate user-visible url structure to internal structure
args = query.split('/', 2)
if 'cmd' not in req.form and args and args[0]:
cmd = args.pop(0)
style = cmd.rfind('-')
if style != -1:
req.form['style'] = [cmd[:style]]
cmd = cmd[style + 1:]
# avoid accepting e.g. style parameter as command
if util.safehasattr(webcommands, cmd):
req.form['cmd'] = [cmd]
else:
cmd = ''
if cmd == 'static':
req.form['file'] = ['/'.join(args)]
else:
if args and args[0]:
node = args.pop(0)
req.form['node'] = [node]
if args:
req.form['file'] = args
ua = req.env.get('HTTP_USER_AGENT', '')
if cmd == 'rev' and 'mercurial' in ua:
req.form['style'] = ['raw']
if cmd == 'archive':
fn = req.form['node'][0]
for type_, spec in self.archive_specs.iteritems():
ext = spec[2]
if fn.endswith(ext):
req.form['node'] = [fn[:-len(ext)]]
req.form['type'] = [type_]
# process the web interface request
try:
tmpl = self.templater(req)
ctype = tmpl('mimetype', encoding=encoding.encoding)
ctype = templater.stringify(ctype)
# check read permissions non-static content
if cmd != 'static':
self.check_perm(req, None)
if cmd == '':
req.form['cmd'] = [tmpl.cache['default']]
cmd = req.form['cmd'][0]
if self.configbool('web', 'cache', True):
caching(self, req) # sets ETag header or raises NOT_MODIFIED
if cmd not in webcommands.__all__:
msg = 'no such method: %s' % cmd
raise ErrorResponse(HTTP_BAD_REQUEST, msg)
elif cmd == 'file' and 'raw' in req.form.get('style', []):
self.ctype = ctype
content = webcommands.rawfile(self, req, tmpl)
else:
content = getattr(webcommands, cmd)(self, req, tmpl)
req.respond(HTTP_OK, ctype)
return content
except (error.LookupError, error.RepoLookupError), err:
req.respond(HTTP_NOT_FOUND, ctype)
msg = str(err)
if (util.safehasattr(err, 'name') and
not isinstance(err, error.ManifestLookupError)):
msg = 'revision not found: %s' % err.name
return tmpl('error', error=msg)
except (error.RepoError, error.RevlogError), inst:
req.respond(HTTP_SERVER_ERROR, ctype)
return tmpl('error', error=str(inst))
except ErrorResponse, inst:
req.respond(inst, ctype)
if inst.code == HTTP_NOT_MODIFIED:
# Not allowed to return a body on a 304
return ['']
return tmpl('error', error=inst.message)
def loadwebsub(self):
websubtable = []
websubdefs = self.repo.ui.configitems('websub')
# we must maintain interhg backwards compatibility
websubdefs += self.repo.ui.configitems('interhg')
for key, pattern in websubdefs:
# grab the delimiter from the character after the "s"
unesc = pattern[1]
delim = re.escape(unesc)
# identify portions of the pattern, taking care to avoid escaped
# delimiters. the replace format and flags are optional, but
# delimiters are required.
match = re.match(
r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
% (delim, delim, delim), pattern)
if not match:
self.repo.ui.warn(_("websub: invalid pattern for %s: %s\n")
% (key, pattern))
continue
# we need to unescape the delimiter for regexp and format
delim_re = re.compile(r'(?<!\\)\\%s' % delim)
regexp = delim_re.sub(unesc, match.group(1))
format = delim_re.sub(unesc, match.group(2))
# the pattern allows for 6 regexp flags, so set them if necessary
flagin = match.group(3)
flags = 0
if flagin:
for flag in flagin.upper():
flags |= re.__dict__[flag]
try:
regexp = re.compile(regexp, flags)
websubtable.append((regexp, format))
except re.error:
self.repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
% (key, regexp))
return websubtable
def templater(self, req):
# determine scheme, port and server name
# this is needed to create absolute urls
proto = req.env.get('wsgi.url_scheme')
if proto == 'https':
proto = 'https'
default_port = "443"
else:
proto = 'http'
default_port = "80"
port = req.env["SERVER_PORT"]
port = port != default_port and (":" + port) or ""
urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
logourl = self.config("web", "logourl", "http://mercurial.selenic.com/")
logoimg = self.config("web", "logoimg", "hglogo.png")
staticurl = self.config("web", "staticurl") or req.url + 'static/'
if not staticurl.endswith('/'):
staticurl += '/'
# some functions for the templater
def header(**map):
yield tmpl('header', encoding=encoding.encoding, **map)
def footer(**map):
yield tmpl("footer", **map)
def motd(**map):
yield self.config("web", "motd", "")
# figure out which style to use
vars = {}
styles = (
req.form.get('style', [None])[0],
self.config('web', 'style'),
'paper',
)
style, mapfile = templater.stylemap(styles, self.templatepath)
if style == styles[0]:
vars['style'] = style
start = req.url[-1] == '?' and '&' or '?'
sessionvars = webutil.sessionvars(vars, start)
if not self.reponame:
self.reponame = (self.config("web", "name")
or req.env.get('REPO_NAME')
or req.url.strip('/') or self.repo.root)
def websubfilter(text):
return websub(text, self.websubtable)
# create the templater
tmpl = templater.templater(mapfile,
filters={"websub": websubfilter},
defaults={"url": req.url,
"logourl": logourl,
"logoimg": logoimg,
"staticurl": staticurl,
"urlbase": urlbase,
"repo": self.reponame,
"header": header,
"footer": footer,
"motd": motd,
"sessionvars": sessionvars,
"pathdef": makebreadcrumb(req.url),
})
return tmpl
def archivelist(self, nodeid):
allowed = self.configlist("web", "allow_archive")
for i, spec in self.archive_specs.iteritems():
if i in allowed or self.configbool("web", "allow" + i):
yield {"type" : i, "extension" : spec[2], "node" : nodeid}
archive_specs = {
'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None),
'gz': ('application/x-gzip', 'tgz', '.tar.gz', None),
'zip': ('application/zip', 'zip', '.zip', None),
}
def check_perm(self, req, op):
for hook in permhooks:
hook(self, req, op)
|
apache-2.0
|
oconnor663/peru
|
peru/display.py
|
1
|
9334
|
import asyncio
import io
import re
import sys
# The display classes deal with output from subprocesses. The FancyDisplay
# gives a multi-line, real-time view of each running process that looks nice in
# the terminal. The VerboseDisplay collects output from each job and prints it
# all when the job is finished, in a way that's suitable for logs. The
# QuietDisplay prints nothing.
#
# All of the display types inherit from BaseDisplay and provide the same
# interface. Callers use get_handle() to get a display handle for each
# subprocess job that's going to run. The handle is used as a context manager
# (inside a with statement) to indicate when the job is starting and stopping,
# and all of the output from the subprocess is passed to the handle's write()
# method. There is also a print() method on the display, for output that's not
# tied to a particular job, which prints to the terminal in a way that won't
# get stomped on by FancyDisplay's redrawing.
#
# Like other errors, we handle job errors by throwing a PrintableError, which
# get caught in main. So the displays don't need to do anything special to show
# errors.
ANSI_CURSOR_UP_ONE_LINE = '\x1b[1A'
ANSI_CLEAR_LINE = '\x1b[2K'
ANSI_DISABLE_LINE_WRAP = '\x1b[?7l'
ANSI_ENABLE_LINE_WRAP = '\x1b[?7h'
class BaseDisplay:
def __init__(self, output=None):
self.output = output or sys.stdout
# Every job/handle gets a unique id.
self._next_job_id = 0
# Output from each job is buffered.
self.buffers = {}
# Each job has a title, like the name of the module being fetched.
self.titles = {}
# We also keep track of any handles that haven't been entered yet, so
# that the FancyDisplay can know when to finally clean up.
self.outstanding_jobs = set()
def get_handle(self, title):
job_id = self._next_job_id
self._next_job_id += 1
self.titles[job_id] = title
self.buffers[job_id] = io.StringIO()
self.outstanding_jobs.add(job_id)
return _DisplayHandle(self, job_id)
# FancyDisplay overrides print() to avoid conflicting with redraws.
def print(self, *args, **kwargs):
print(*args, file=self.output, **kwargs)
# Callbacks that get overridden by subclasses.
def _job_started(self, job_id):
pass
def _job_written(self, job_id, string):
pass
def _job_finished(self, job_id):
pass
# Callbacks for handles.
def _handle_start(self, job_id):
self._job_started(job_id)
def _handle_write(self, job_id, string):
self.buffers[job_id].write(string)
self._job_written(job_id, string)
def _handle_finish(self, job_id):
self.outstanding_jobs.remove(job_id)
self._job_finished(job_id)
class QuietDisplay(BaseDisplay):
'''Prints nothing.'''
pass
class VerboseDisplay(BaseDisplay):
'''Waits until jobs are finished and then prints all of their output at
once, to make sure jobs don't get interleaved. We use '===' as a delimiter
to try to separate jobs from one another, and from other output.'''
def _job_started(self, job_id):
print('===', 'started', self.titles[job_id], '===', file=self.output)
def _job_finished(self, job_id):
print('===', 'finished', self.titles[job_id], '===', file=self.output)
outputstr = self.buffers[job_id].getvalue()
if outputstr:
self.output.write(outputstr)
print('===', file=self.output)
class FancyDisplay(BaseDisplay):
'''Prints a multi-line, real-time display of all the latest output lines
from each job.'''
def __init__(self, *args):
super().__init__(*args)
# Every time we draw we need to erase the lines that were printed
# before. This keeps track of that number. Note that we split output on
# newlines and use no-wrap control codes in the terminal, so we only
# need to count the number of jobs drawn.
self._lines_printed = 0
# This is the list of all active jobs. There's no guarantee that jobs
# start in any particular order, so this list also helps us keep the
# order stable.
self._job_slots = []
# The last line output from each job. This is what gets drawn.
self._output_lines = {}
# Lines that need to be printed above the display. This has to happen
# during the next draw, right after the display is cleared.
self._to_print = []
# To avoid flicker, we draw on a short timeout instead of every time we
# receive output. When this asyncio handle is set, it means a draw is
# already pending.
self._draw_later_handle = None
def print(self, *args, **kwargs):
output = io.StringIO()
print(*args, file=output, **kwargs)
self._to_print.append(output.getvalue())
# If we use _draw_later, the program might exit before the draw timer
# fires. Drawing right now ensures that output never gets dropped.
self._draw()
def _draw(self):
self._cancel_draw_later()
# Erase everything we printed before.
for i in range(self._lines_printed):
self.output.write(ANSI_CURSOR_UP_ONE_LINE)
self.output.write(ANSI_CLEAR_LINE)
self._lines_printed = 0
# If we have any lines from print(), print them now. They will end up
# above the display like regular output.
for string in self._to_print:
self.output.write(string)
self._to_print.clear()
# Redraw all the jobs.
self.output.write(ANSI_DISABLE_LINE_WRAP)
for slot, job_id in enumerate(self._job_slots):
# Fancy unicode box characters in the left column.
if slot == 0:
self.output.write('┌' if len(self._job_slots) > 1 else '╶')
elif slot < len(self._job_slots) - 1:
self.output.write('├')
else:
self.output.write('└')
self.output.write(' ')
self.output.write(self.titles[job_id])
self.output.write(': ')
self.output.write(self._output_lines[job_id])
# Some terminals keep overwriting the last character in no-wrap
# mode. Make the trailing character a space.
self.output.write(' ')
self.output.write('\n')
self._lines_printed += 1
self.output.write(ANSI_ENABLE_LINE_WRAP)
# Finally, flush output to the terminal. Hopefully everything gets
# painted in one frame.
self.output.flush()
def _draw_later(self):
if self._draw_later_handle:
# There is already a draw pending.
return
self._draw_later_handle = asyncio.get_event_loop().call_later(
0.1, self._draw)
def _cancel_draw_later(self):
if self._draw_later_handle:
self._draw_later_handle.cancel()
self._draw_later_handle = None
def _job_started(self, job_id):
self._job_slots.append(job_id)
self._output_lines[job_id] = ''
self._draw_later()
def _job_written(self, job_id, string):
# We need to split output on newlines. Some programs (git) also use
# carriage return to redraw a line, so we split on that too.
any_newlines = '(?:\n|\r)+' # (?: is non-capturing, for split()
lines = [line.strip() for line in re.split(any_newlines, string)]
# NB: We don't make any attempt here to join lines that might span
# multiple write() calls. `create_subprocess_with_handle()` reads
# output in 4096 byte chunks, so this isn't likely, but it's possible.
for line in lines:
# Ignore empty lines, both from the job and from re.split().
if line:
self._output_lines[job_id] = line
self._draw_later()
def _job_finished(self, job_id):
self._job_slots.remove(job_id)
if not self.outstanding_jobs:
# If the last job is finished, the event loop might be about to
# stop. Clear the terminal right now, because _draw_later might
# never run.
self._draw()
else:
# If there are pending jobs, don't clear the display immediately.
# This avoids flickering between jobs when only one job is running
# at a time (-j1).
self._draw_later()
class _DisplayHandle:
def __init__(self, display, job_id):
self._display = display
self._job_id = job_id
self._opened = False
self._closed = False
def write(self, string):
assert self._opened and not self._closed
self._display._handle_write(self._job_id, string)
# Context manager interface. We're extra careful to make sure that the
# handle is only written to inside a with statment, and only used once.
def __enter__(self):
assert not self._opened and not self._closed
self._opened = True
self._display._handle_start(self._job_id)
return self
def __exit__(self, *args):
assert self._opened and not self._closed
self._display._handle_finish(self._job_id)
self._job_id = None
self._closed = True
|
mit
|
Cyberjusticelab/JusticeAI
|
src/ml_service/feature_extraction/post_processing/regex/regex_entity_extraction.py
|
1
|
7500
|
from feature_extraction.post_processing.regex.regex_lib import RegexLib
import re
import datetime
import time
import unicodedata
from util.log import Log
import math
class EntityExtraction:
regex_bin = None
one_month = 86400 * 30 # unix time for 1 month
month_dict = {
'janvier': 1,
'fevrier': 2,
'mars': 3,
'avril': 4,
'mai': 5,
'juin': 6,
'juillet': 7,
'aout': 8,
'septembre': 9,
"octobre": 10,
'novembre': 11,
'decembre': 12
}
def __init__(self):
pass
@staticmethod
def match_any_regex(text, regex_array, regex_type):
"""
1) Loads the regex binaries only once. If it is loaded then continue.
2) Iterate all the regex and search text
3) if regex finds a match then extract entity from this sub sentence
:param text: String representation of precedent
:param regex_array: List of regex
:param regex_type: Entity we look for in a particular regex match
:return: (Boolean, entity<int>)
"""
if EntityExtraction.regex_bin is None:
EntityExtraction.regex_bin = RegexLib.model
for regex in regex_array:
regex_result = regex.search(text)
if regex_result:
sentence = regex_result.group(0).lower()
return EntityExtraction.__extract_regex_entity(sentence, regex_type)
return False, 0
@staticmethod
def __extract_regex_entity(sentence, regex_type):
"""
Entity extraction from the text
1) If the type is BOOLEAN then simply return True, 1
2) If the type is MONEY_REGEX then extract the money value and format string so that it is
convertible to integer
3) else return False, 1
:param sentence: sub sentence from text to apply regex
:param regex_type: type of information to extract
:return: (boolean, int)
"""
# removes accents
nfkd_form = unicodedata.normalize('NFKD', sentence)
sentence = u"".join([character for character in nfkd_form if not unicodedata.combining(character)])
if regex_type == 'BOOLEAN':
return True, 1
elif regex_type == 'MONEY_REGEX':
return EntityExtraction.__regex_money(regex_type, sentence)
elif regex_type == 'DATE_REGEX':
return EntityExtraction.get_fact_duration(sentence)
return False, 0
@staticmethod
def get_fact_duration(sentence):
"""
Tries to find date range within a sentence by trying to match it against regexes.
First regex looks for the following format: 1er decembre 20** [a|au ...] 30 mai 20**
Second regex looks for 1 or more months being stated
convert to unix.
1) unless specified, start date is assumes to be the first day of the month
2) unless specified, end date is assume to be the last day of the month. 28 is chosen because
every month have at least 28 days
The information captured be the regexes above allows us to get the time difference in days
:param sentence: sentence to extract entities
:return: boolean (date found), integer (months between dates)
"""
# Verify if the sentence is about non-payment
non_payment_regex = re.compile("pas paye", re.IGNORECASE)
if re.findall(non_payment_regex, sentence).__len__() == 0:
return False, 0
# First regex
start_end_date_regex = re.compile(RegexLib.DATE_RANGE_REGEX, re.IGNORECASE)
entities = re.findall(start_end_date_regex, sentence)
if entities.__len__() > 0:
entities = re.findall(start_end_date_regex, sentence).pop(0)
try:
start_day = int(entities[0])
except ValueError as error:
Log.write(str(error) + ": could not convert " + entities[0] + " to an int")
start_day = '1'
start_month = ''
try:
start_month = str(EntityExtraction.month_dict[entities[1]])
except IndexError as error:
Log.write(str(error) + ":" + str(start_month) + " is not a month or has spelling mistake")
return False, 0
try:
start_year = int(entities[2])
except ValueError as error:
Log.write(str(error) + ": could not find start year")
start_year = entities[5] # set end year value
try:
end_day = int(entities[3])
except ValueError as error:
Log.write(str(error) + ": could not convert " + entities[3] + " to an int")
end_day = '28'
end_month = ''
try:
end_month = str(EntityExtraction.month_dict[entities[4]])
except IndexError as error:
Log.write(str(error) + ":" + str(end_month) + " is not a month or has spelling mistake")
return False, 0
end_year = entities[5]
start_unix = EntityExtraction.__date_to_unix([str(start_day), str(start_month), str(start_year)])
end_unix = EntityExtraction.__date_to_unix([str(end_day), str(end_month), str(end_year)])
return True, EntityExtraction.__get_time_interval_in_months(start_unix, end_unix)
# Second regex
month_regex = re.compile(RegexLib.DATE_REGEX, re.IGNORECASE)
entities = re.findall(month_regex, sentence)
if entities.__len__() > 0:
return True, entities.__len__() # total months found
return False, 0
@staticmethod
def __regex_money(regex_type, sentence):
"""
1) create the date regex --> re.compile(regex string)
2) Find the dollar amount in the sentence
3) filter the string by removing unecessary characters
4) return the entity
:param regex_type: str(MONEY_REGEX)
:param sentence: boolean, integer
:return:
"""
generic_regex = re.compile(EntityExtraction.regex_bin[regex_type])
entity = generic_regex.search(sentence).group(0)
# Functional but not sure about how optimal it is
entity = entity.replace("$", "")
entity = entity.replace(" ", "")
entity = entity.replace(",", ".")
if entity[-1] == '.':
entity = entity[:-1]
return True, entity
@staticmethod
def __date_to_unix(date):
"""
Given a date list (ex: [30,12,2019]) this function gets the unix time that represents this date
:param date: date to convert into unix time
:return: unix time representing the input date
"""
date_string = " ".join(date)
try:
unix_time = time.mktime(datetime.datetime.strptime(date_string, '%d %m %Y').timetuple())
except (ValueError, OverflowError) as error:
Log.write(str(error) + ": " + str(date_string))
return None
return unix_time
@staticmethod
def __get_time_interval_in_months(first_date, second_date):
"""
Calculates the time difference between 2 dates
:param first_date: date in unix time
:param second_date: date in unix time
:return: time difference between 2 dates
"""
return math.ceil(abs(first_date - second_date) / EntityExtraction.one_month)
|
mit
|
mollstam/UnrealPy
|
UnrealPyEmbed/Source/Python/Lib/python27/lib2to3/main.py
|
250
|
11605
|
"""
Main program for 2to3.
"""
from __future__ import with_statement
import sys
import os
import difflib
import logging
import shutil
import optparse
from . import refactor
def diff_texts(a, b, filename):
"""Return a unified diff of two strings."""
a = a.splitlines()
b = b.splitlines()
return difflib.unified_diff(a, b, filename, filename,
"(original)", "(refactored)",
lineterm="")
class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
"""
A refactoring tool that can avoid overwriting its input files.
Prints output to stdout.
Output files can optionally be written to a different directory and or
have an extra file suffix appended to their name for use in situations
where you do not want to replace the input files.
"""
def __init__(self, fixers, options, explicit, nobackups, show_diffs,
input_base_dir='', output_dir='', append_suffix=''):
"""
Args:
fixers: A list of fixers to import.
options: A dict with RefactoringTool configuration.
explicit: A list of fixers to run even if they are explicit.
nobackups: If true no backup '.bak' files will be created for those
files that are being refactored.
show_diffs: Should diffs of the refactoring be printed to stdout?
input_base_dir: The base directory for all input files. This class
will strip this path prefix off of filenames before substituting
it with output_dir. Only meaningful if output_dir is supplied.
All files processed by refactor() must start with this path.
output_dir: If supplied, all converted files will be written into
this directory tree instead of input_base_dir.
append_suffix: If supplied, all files output by this tool will have
this appended to their filename. Useful for changing .py to
.py3 for example by passing append_suffix='3'.
"""
self.nobackups = nobackups
self.show_diffs = show_diffs
if input_base_dir and not input_base_dir.endswith(os.sep):
input_base_dir += os.sep
self._input_base_dir = input_base_dir
self._output_dir = output_dir
self._append_suffix = append_suffix
super(StdoutRefactoringTool, self).__init__(fixers, options, explicit)
def log_error(self, msg, *args, **kwargs):
self.errors.append((msg, args, kwargs))
self.logger.error(msg, *args, **kwargs)
def write_file(self, new_text, filename, old_text, encoding):
orig_filename = filename
if self._output_dir:
if filename.startswith(self._input_base_dir):
filename = os.path.join(self._output_dir,
filename[len(self._input_base_dir):])
else:
raise ValueError('filename %s does not start with the '
'input_base_dir %s' % (
filename, self._input_base_dir))
if self._append_suffix:
filename += self._append_suffix
if orig_filename != filename:
output_dir = os.path.dirname(filename)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
self.log_message('Writing converted %s to %s.', orig_filename,
filename)
if not self.nobackups:
# Make backup
backup = filename + ".bak"
if os.path.lexists(backup):
try:
os.remove(backup)
except os.error, err:
self.log_message("Can't remove backup %s", backup)
try:
os.rename(filename, backup)
except os.error, err:
self.log_message("Can't rename %s to %s", filename, backup)
# Actually write the new file
write = super(StdoutRefactoringTool, self).write_file
write(new_text, filename, old_text, encoding)
if not self.nobackups:
shutil.copymode(backup, filename)
if orig_filename != filename:
# Preserve the file mode in the new output directory.
shutil.copymode(orig_filename, filename)
def print_output(self, old, new, filename, equal):
if equal:
self.log_message("No changes to %s", filename)
else:
self.log_message("Refactored %s", filename)
if self.show_diffs:
diff_lines = diff_texts(old, new, filename)
try:
if self.output_lock is not None:
with self.output_lock:
for line in diff_lines:
print line
sys.stdout.flush()
else:
for line in diff_lines:
print line
except UnicodeEncodeError:
warn("couldn't encode %s's diff for your terminal" %
(filename,))
return
def warn(msg):
print >> sys.stderr, "WARNING: %s" % (msg,)
def main(fixer_pkg, args=None):
"""Main program.
Args:
fixer_pkg: the name of a package where the fixers are located.
args: optional; a list of command line arguments. If omitted,
sys.argv[1:] is used.
Returns a suggested exit status (0, 1, 2).
"""
# Set up option parser
parser = optparse.OptionParser(usage="2to3 [options] file|dir ...")
parser.add_option("-d", "--doctests_only", action="store_true",
help="Fix up doctests only")
parser.add_option("-f", "--fix", action="append", default=[],
help="Each FIX specifies a transformation; default: all")
parser.add_option("-j", "--processes", action="store", default=1,
type="int", help="Run 2to3 concurrently")
parser.add_option("-x", "--nofix", action="append", default=[],
help="Prevent a transformation from being run")
parser.add_option("-l", "--list-fixes", action="store_true",
help="List available transformations")
parser.add_option("-p", "--print-function", action="store_true",
help="Modify the grammar so that print() is a function")
parser.add_option("-v", "--verbose", action="store_true",
help="More verbose logging")
parser.add_option("--no-diffs", action="store_true",
help="Don't show diffs of the refactoring")
parser.add_option("-w", "--write", action="store_true",
help="Write back modified files")
parser.add_option("-n", "--nobackups", action="store_true", default=False,
help="Don't write backups for modified files")
parser.add_option("-o", "--output-dir", action="store", type="str",
default="", help="Put output files in this directory "
"instead of overwriting the input files. Requires -n.")
parser.add_option("-W", "--write-unchanged-files", action="store_true",
help="Also write files even if no changes were required"
" (useful with --output-dir); implies -w.")
parser.add_option("--add-suffix", action="store", type="str", default="",
help="Append this string to all output filenames."
" Requires -n if non-empty. "
"ex: --add-suffix='3' will generate .py3 files.")
# Parse command line arguments
refactor_stdin = False
flags = {}
options, args = parser.parse_args(args)
if options.write_unchanged_files:
flags["write_unchanged_files"] = True
if not options.write:
warn("--write-unchanged-files/-W implies -w.")
options.write = True
# If we allowed these, the original files would be renamed to backup names
# but not replaced.
if options.output_dir and not options.nobackups:
parser.error("Can't use --output-dir/-o without -n.")
if options.add_suffix and not options.nobackups:
parser.error("Can't use --add-suffix without -n.")
if not options.write and options.no_diffs:
warn("not writing files and not printing diffs; that's not very useful")
if not options.write and options.nobackups:
parser.error("Can't use -n without -w")
if options.list_fixes:
print "Available transformations for the -f/--fix option:"
for fixname in refactor.get_all_fix_names(fixer_pkg):
print fixname
if not args:
return 0
if not args:
print >> sys.stderr, "At least one file or directory argument required."
print >> sys.stderr, "Use --help to show usage."
return 2
if "-" in args:
refactor_stdin = True
if options.write:
print >> sys.stderr, "Can't write to stdin."
return 2
if options.print_function:
flags["print_function"] = True
# Set up logging handler
level = logging.DEBUG if options.verbose else logging.INFO
logging.basicConfig(format='%(name)s: %(message)s', level=level)
logger = logging.getLogger('lib2to3.main')
# Initialize the refactoring tool
avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg))
unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix)
explicit = set()
if options.fix:
all_present = False
for fix in options.fix:
if fix == "all":
all_present = True
else:
explicit.add(fixer_pkg + ".fix_" + fix)
requested = avail_fixes.union(explicit) if all_present else explicit
else:
requested = avail_fixes.union(explicit)
fixer_names = requested.difference(unwanted_fixes)
input_base_dir = os.path.commonprefix(args)
if (input_base_dir and not input_base_dir.endswith(os.sep)
and not os.path.isdir(input_base_dir)):
# One or more similar names were passed, their directory is the base.
# os.path.commonprefix() is ignorant of path elements, this corrects
# for that weird API.
input_base_dir = os.path.dirname(input_base_dir)
if options.output_dir:
input_base_dir = input_base_dir.rstrip(os.sep)
logger.info('Output in %r will mirror the input directory %r layout.',
options.output_dir, input_base_dir)
rt = StdoutRefactoringTool(
sorted(fixer_names), flags, sorted(explicit),
options.nobackups, not options.no_diffs,
input_base_dir=input_base_dir,
output_dir=options.output_dir,
append_suffix=options.add_suffix)
# Refactor all files and directories passed as arguments
if not rt.errors:
if refactor_stdin:
rt.refactor_stdin()
else:
try:
rt.refactor(args, options.write, options.doctests_only,
options.processes)
except refactor.MultiprocessingUnsupported:
assert options.processes > 1
print >> sys.stderr, "Sorry, -j isn't " \
"supported on this platform."
return 1
rt.summarize()
# Return error status (0 if rt.errors is zero)
return int(bool(rt.errors))
|
mit
|
jocelynmass/nrf51
|
toolchain/arm_cm0_deprecated/arm-none-eabi/share/gdb/python/gdb/frames.py
|
68
|
8031
|
# Frame-filter commands.
# Copyright (C) 2013-2015 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Internal functions for working with frame-filters."""
import gdb
from gdb.FrameIterator import FrameIterator
from gdb.FrameDecorator import FrameDecorator
import itertools
import collections
def get_priority(filter_item):
""" Internal worker function to return the frame-filter's priority
from a frame filter object. This is a fail free function as it is
used in sorting and filtering. If a badly implemented frame
filter does not implement the priority attribute, return zero
(otherwise sorting/filtering will fail and prevent other frame
filters from executing).
Arguments:
filter_item: An object conforming to the frame filter
interface.
Returns:
The priority of the frame filter from the "priority"
attribute, or zero.
"""
# Do not fail here, as the sort will fail. If a filter has not
# (incorrectly) set a priority, set it to zero.
return getattr(filter_item, "priority", 0)
def set_priority(filter_item, priority):
""" Internal worker function to set the frame-filter's priority.
Arguments:
filter_item: An object conforming to the frame filter
interface.
priority: The priority to assign as an integer.
"""
filter_item.priority = priority
def get_enabled(filter_item):
""" Internal worker function to return a filter's enabled state
from a frame filter object. This is a fail free function as it is
used in sorting and filtering. If a badly implemented frame
filter does not implement the enabled attribute, return False
(otherwise sorting/filtering will fail and prevent other frame
filters from executing).
Arguments:
filter_item: An object conforming to the frame filter
interface.
Returns:
The enabled state of the frame filter from the "enabled"
attribute, or False.
"""
# If the filter class is badly implemented when called from the
# Python filter command, do not cease filter operations, just set
# enabled to False.
return getattr(filter_item, "enabled", False)
def set_enabled(filter_item, state):
""" Internal Worker function to set the frame-filter's enabled
state.
Arguments:
filter_item: An object conforming to the frame filter
interface.
state: True or False, depending on desired state.
"""
filter_item.enabled = state
def return_list(name):
""" Internal Worker function to return the frame filter
dictionary, depending on the name supplied as an argument. If the
name is not "all", "global" or "progspace", it is assumed to name
an object-file.
Arguments:
name: The name of the list, as specified by GDB user commands.
Returns:
A dictionary object for a single specified dictionary, or a
list containing all the items for "all"
Raises:
gdb.GdbError: A dictionary of that name cannot be found.
"""
# If all dictionaries are wanted in the case of "all" we
# cannot return a combined dictionary as keys() may clash in
# between different dictionaries. As we just want all the frame
# filters to enable/disable them all, just return the combined
# items() as a chained iterator of dictionary values.
if name == "all":
glob = gdb.frame_filters.values()
prog = gdb.current_progspace().frame_filters.values()
return_iter = itertools.chain(glob, prog)
for objfile in gdb.objfiles():
return_iter = itertools.chain(return_iter, objfile.frame_filters.values())
return return_iter
if name == "global":
return gdb.frame_filters
else:
if name == "progspace":
cp = gdb.current_progspace()
return cp.frame_filters
else:
for objfile in gdb.objfiles():
if name == objfile.filename:
return objfile.frame_filters
msg = "Cannot find frame-filter dictionary for '" + name + "'"
raise gdb.GdbError(msg)
def _sort_list():
""" Internal Worker function to merge all known frame-filter
lists, prune any filters with the state set to "disabled", and
sort the list on the frame-filter's "priority" attribute.
Returns:
sorted_list: A sorted, pruned list of frame filters to
execute.
"""
all_filters = return_list("all")
sorted_frame_filters = sorted(all_filters, key = get_priority,
reverse = True)
sorted_frame_filters = filter(get_enabled,
sorted_frame_filters)
return sorted_frame_filters
def execute_frame_filters(frame, frame_low, frame_high):
""" Internal function called from GDB that will execute the chain
of frame filters. Each filter is executed in priority order.
After the execution completes, slice the iterator to frame_low -
frame_high range.
Arguments:
frame: The initial frame.
frame_low: The low range of the slice. If this is a negative
integer then it indicates a backward slice (ie bt -4) which
counts backward from the last frame in the backtrace.
frame_high: The high range of the slice. If this is -1 then
it indicates all frames until the end of the stack from
frame_low.
Returns:
frame_iterator: The sliced iterator after all frame
filters have had a change to execute, or None if no frame
filters are registered.
"""
# Get a sorted list of frame filters.
sorted_list = list(_sort_list())
# Check to see if there are any frame-filters. If not, just
# return None and let default backtrace printing occur.
if len(sorted_list) == 0:
return None
frame_iterator = FrameIterator(frame)
# Apply a basic frame decorator to all gdb.Frames. This unifies
# the interface. Python 3.x moved the itertools.imap
# functionality to map(), so check if it is available.
if hasattr(itertools,"imap"):
frame_iterator = itertools.imap(FrameDecorator, frame_iterator)
else:
frame_iterator = map(FrameDecorator, frame_iterator)
for ff in sorted_list:
frame_iterator = ff.filter(frame_iterator)
# Slicing
# Is this a slice from the end of the backtrace, ie bt -2?
if frame_low < 0:
count = 0
slice_length = abs(frame_low)
# We cannot use MAXLEN argument for deque as it is 2.6 onwards
# and some GDB versions might be < 2.6.
sliced = collections.deque()
for frame_item in frame_iterator:
if count >= slice_length:
sliced.popleft();
count = count + 1
sliced.append(frame_item)
return iter(sliced)
# -1 for frame_high means until the end of the backtrace. Set to
# None if that is the case, to indicate to itertools.islice to
# slice to the end of the iterator.
if frame_high == -1:
frame_high = None
else:
# As frames start from 0, add one to frame_high so islice
# correctly finds the end
frame_high = frame_high + 1;
sliced = itertools.islice(frame_iterator, frame_low, frame_high)
return sliced
|
gpl-2.0
|
magne4000/festival
|
app.py
|
1
|
1221
|
import os
import re
from flask import Flask
from datetime import timedelta
def interval_to_timedelta(interval):
if isinstance(interval, int):
interval = "%ds" % interval
ratios = {
's': 'seconds',
'm': 'minutes',
'h': 'hours',
'd': 'days',
'w': 'weeks'
}
return timedelta(**{ratios[interval[-1:]]: int(interval[0:-1])})
def shape_config(myapp, args):
myapp.config.from_pyfile(args.config or os.path.join(os.path.dirname(__file__), 'settings.cfg'))
myapp.config['SCANNER_MODES'] = ['tags']
if myapp.config['SCANNER_FOLDER_PATTERNS'] is not None and len(myapp.config['SCANNER_FOLDER_PATTERNS']) > 0:
myapp.config['SCANNER_MODES'].append('folder')
for i, pattern in enumerate(myapp.config['SCANNER_FOLDER_PATTERNS']):
myapp.config['SCANNER_FOLDER_PATTERNS'][i] = re.compile(pattern)
myapp.config['COVERS_FETCH_ONLINE_INTERVAL'] = interval_to_timedelta(myapp.config['COVERS_FETCH_ONLINE_INTERVAL'])
myapp.config['SCANNER_REFRESH_INTERVAL'] = interval_to_timedelta(myapp.config['SCANNER_REFRESH_INTERVAL'])
return myapp
def get_app(args):
myapp = Flask(__name__)
return shape_config(myapp, args)
|
mit
|
anbasile/flask_sample
|
flask/lib/python2.7/site-packages/flask/testsuite/blueprints.py
|
563
|
28089
|
# -*- coding: utf-8 -*-
"""
flask.testsuite.blueprints
~~~~~~~~~~~~~~~~~~~~~~~~~~
Blueprints (and currently modules)
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
import warnings
from flask.testsuite import FlaskTestCase, emits_module_deprecation_warning
from flask._compat import text_type
from werkzeug.exceptions import NotFound
from werkzeug.http import parse_cache_control_header
from jinja2 import TemplateNotFound
# import moduleapp here because it uses deprecated features and we don't
# want to see the warnings
warnings.simplefilter('ignore', DeprecationWarning)
from moduleapp import app as moduleapp
warnings.simplefilter('default', DeprecationWarning)
class ModuleTestCase(FlaskTestCase):
@emits_module_deprecation_warning
def test_basic_module(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin', url_prefix='/admin')
@admin.route('/')
def admin_index():
return 'admin index'
@admin.route('/login')
def admin_login():
return 'admin login'
@admin.route('/logout')
def admin_logout():
return 'admin logout'
@app.route('/')
def index():
return 'the index'
app.register_module(admin)
c = app.test_client()
self.assert_equal(c.get('/').data, b'the index')
self.assert_equal(c.get('/admin/').data, b'admin index')
self.assert_equal(c.get('/admin/login').data, b'admin login')
self.assert_equal(c.get('/admin/logout').data, b'admin logout')
@emits_module_deprecation_warning
def test_default_endpoint_name(self):
app = flask.Flask(__name__)
mod = flask.Module(__name__, 'frontend')
def index():
return 'Awesome'
mod.add_url_rule('/', view_func=index)
app.register_module(mod)
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'Awesome')
with app.test_request_context():
self.assert_equal(flask.url_for('frontend.index'), '/')
@emits_module_deprecation_warning
def test_request_processing(self):
catched = []
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin', url_prefix='/admin')
@admin.before_request
def before_admin_request():
catched.append('before-admin')
@admin.after_request
def after_admin_request(response):
catched.append('after-admin')
return response
@admin.route('/')
def admin_index():
return 'the admin'
@app.before_request
def before_request():
catched.append('before-app')
@app.after_request
def after_request(response):
catched.append('after-app')
return response
@app.route('/')
def index():
return 'the index'
app.register_module(admin)
c = app.test_client()
self.assert_equal(c.get('/').data, b'the index')
self.assert_equal(catched, ['before-app', 'after-app'])
del catched[:]
self.assert_equal(c.get('/admin/').data, b'the admin')
self.assert_equal(catched, ['before-app', 'before-admin',
'after-admin', 'after-app'])
@emits_module_deprecation_warning
def test_context_processors(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin', url_prefix='/admin')
@app.context_processor
def inject_all_regular():
return {'a': 1}
@admin.context_processor
def inject_admin():
return {'b': 2}
@admin.app_context_processor
def inject_all_module():
return {'c': 3}
@app.route('/')
def index():
return flask.render_template_string('{{ a }}{{ b }}{{ c }}')
@admin.route('/')
def admin_index():
return flask.render_template_string('{{ a }}{{ b }}{{ c }}')
app.register_module(admin)
c = app.test_client()
self.assert_equal(c.get('/').data, b'13')
self.assert_equal(c.get('/admin/').data, b'123')
@emits_module_deprecation_warning
def test_late_binding(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin')
@admin.route('/')
def index():
return '42'
app.register_module(admin, url_prefix='/admin')
self.assert_equal(app.test_client().get('/admin/').data, b'42')
@emits_module_deprecation_warning
def test_error_handling(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin')
@admin.app_errorhandler(404)
def not_found(e):
return 'not found', 404
@admin.app_errorhandler(500)
def internal_server_error(e):
return 'internal server error', 500
@admin.route('/')
def index():
flask.abort(404)
@admin.route('/error')
def error():
1 // 0
app.register_module(admin)
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.status_code, 404)
self.assert_equal(rv.data, b'not found')
rv = c.get('/error')
self.assert_equal(rv.status_code, 500)
self.assert_equal(b'internal server error', rv.data)
def test_templates_and_static(self):
app = moduleapp
app.testing = True
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.data, b'Hello from the Frontend')
rv = c.get('/admin/')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/index2')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/static/test.txt')
self.assert_equal(rv.data.strip(), b'Admin File')
rv.close()
rv = c.get('/admin/static/css/test.css')
self.assert_equal(rv.data.strip(), b'/* nested file */')
rv.close()
with app.test_request_context():
self.assert_equal(flask.url_for('admin.static', filename='test.txt'),
'/admin/static/test.txt')
with app.test_request_context():
try:
flask.render_template('missing.html')
except TemplateNotFound as e:
self.assert_equal(e.name, 'missing.html')
else:
self.assert_true(0, 'expected exception')
with flask.Flask(__name__).test_request_context():
self.assert_equal(flask.render_template('nested/nested.txt'), 'I\'m nested')
def test_safe_access(self):
app = moduleapp
with app.test_request_context():
f = app.view_functions['admin.static']
try:
f('/etc/passwd')
except NotFound:
pass
else:
self.assert_true(0, 'expected exception')
try:
f('../__init__.py')
except NotFound:
pass
else:
self.assert_true(0, 'expected exception')
# testcase for a security issue that may exist on windows systems
import os
import ntpath
old_path = os.path
os.path = ntpath
try:
try:
f('..\\__init__.py')
except NotFound:
pass
else:
self.assert_true(0, 'expected exception')
finally:
os.path = old_path
@emits_module_deprecation_warning
def test_endpoint_decorator(self):
from werkzeug.routing import Submount, Rule
from flask import Module
app = flask.Flask(__name__)
app.testing = True
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
module = Module(__name__, __name__)
@module.endpoint('bar')
def bar():
return 'bar'
@module.endpoint('index')
def index():
return 'index'
app.register_module(module)
c = app.test_client()
self.assert_equal(c.get('/foo/').data, b'index')
self.assert_equal(c.get('/foo/bar').data, b'bar')
class BlueprintTestCase(FlaskTestCase):
def test_blueprint_specific_error_handling(self):
frontend = flask.Blueprint('frontend', __name__)
backend = flask.Blueprint('backend', __name__)
sideend = flask.Blueprint('sideend', __name__)
@frontend.errorhandler(403)
def frontend_forbidden(e):
return 'frontend says no', 403
@frontend.route('/frontend-no')
def frontend_no():
flask.abort(403)
@backend.errorhandler(403)
def backend_forbidden(e):
return 'backend says no', 403
@backend.route('/backend-no')
def backend_no():
flask.abort(403)
@sideend.route('/what-is-a-sideend')
def sideend_no():
flask.abort(403)
app = flask.Flask(__name__)
app.register_blueprint(frontend)
app.register_blueprint(backend)
app.register_blueprint(sideend)
@app.errorhandler(403)
def app_forbidden(e):
return 'application itself says no', 403
c = app.test_client()
self.assert_equal(c.get('/frontend-no').data, b'frontend says no')
self.assert_equal(c.get('/backend-no').data, b'backend says no')
self.assert_equal(c.get('/what-is-a-sideend').data, b'application itself says no')
def test_blueprint_url_definitions(self):
bp = flask.Blueprint('test', __name__)
@bp.route('/foo', defaults={'baz': 42})
def foo(bar, baz):
return '%s/%d' % (bar, baz)
@bp.route('/bar')
def bar(bar):
return text_type(bar)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/1', url_defaults={'bar': 23})
app.register_blueprint(bp, url_prefix='/2', url_defaults={'bar': 19})
c = app.test_client()
self.assert_equal(c.get('/1/foo').data, b'23/42')
self.assert_equal(c.get('/2/foo').data, b'19/42')
self.assert_equal(c.get('/1/bar').data, b'23')
self.assert_equal(c.get('/2/bar').data, b'19')
def test_blueprint_url_processors(self):
bp = flask.Blueprint('frontend', __name__, url_prefix='/<lang_code>')
@bp.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', flask.g.lang_code)
@bp.url_value_preprocessor
def pull_lang_code(endpoint, values):
flask.g.lang_code = values.pop('lang_code')
@bp.route('/')
def index():
return flask.url_for('.about')
@bp.route('/about')
def about():
return flask.url_for('.index')
app = flask.Flask(__name__)
app.register_blueprint(bp)
c = app.test_client()
self.assert_equal(c.get('/de/').data, b'/de/about')
self.assert_equal(c.get('/de/about').data, b'/de/')
def test_templates_and_static(self):
from blueprintapp import app
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.data, b'Hello from the Frontend')
rv = c.get('/admin/')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/index2')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/static/test.txt')
self.assert_equal(rv.data.strip(), b'Admin File')
rv.close()
rv = c.get('/admin/static/css/test.css')
self.assert_equal(rv.data.strip(), b'/* nested file */')
rv.close()
# try/finally, in case other tests use this app for Blueprint tests.
max_age_default = app.config['SEND_FILE_MAX_AGE_DEFAULT']
try:
expected_max_age = 3600
if app.config['SEND_FILE_MAX_AGE_DEFAULT'] == expected_max_age:
expected_max_age = 7200
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = expected_max_age
rv = c.get('/admin/static/css/test.css')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, expected_max_age)
rv.close()
finally:
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = max_age_default
with app.test_request_context():
self.assert_equal(flask.url_for('admin.static', filename='test.txt'),
'/admin/static/test.txt')
with app.test_request_context():
try:
flask.render_template('missing.html')
except TemplateNotFound as e:
self.assert_equal(e.name, 'missing.html')
else:
self.assert_true(0, 'expected exception')
with flask.Flask(__name__).test_request_context():
self.assert_equal(flask.render_template('nested/nested.txt'), 'I\'m nested')
def test_default_static_cache_timeout(self):
app = flask.Flask(__name__)
class MyBlueprint(flask.Blueprint):
def get_send_file_max_age(self, filename):
return 100
blueprint = MyBlueprint('blueprint', __name__, static_folder='static')
app.register_blueprint(blueprint)
# try/finally, in case other tests use this app for Blueprint tests.
max_age_default = app.config['SEND_FILE_MAX_AGE_DEFAULT']
try:
with app.test_request_context():
unexpected_max_age = 3600
if app.config['SEND_FILE_MAX_AGE_DEFAULT'] == unexpected_max_age:
unexpected_max_age = 7200
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = unexpected_max_age
rv = blueprint.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 100)
rv.close()
finally:
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = max_age_default
def test_templates_list(self):
from blueprintapp import app
templates = sorted(app.jinja_env.list_templates())
self.assert_equal(templates, ['admin/index.html',
'frontend/index.html'])
def test_dotted_names(self):
frontend = flask.Blueprint('myapp.frontend', __name__)
backend = flask.Blueprint('myapp.backend', __name__)
@frontend.route('/fe')
def frontend_index():
return flask.url_for('myapp.backend.backend_index')
@frontend.route('/fe2')
def frontend_page2():
return flask.url_for('.frontend_index')
@backend.route('/be')
def backend_index():
return flask.url_for('myapp.frontend.frontend_index')
app = flask.Flask(__name__)
app.register_blueprint(frontend)
app.register_blueprint(backend)
c = app.test_client()
self.assert_equal(c.get('/fe').data.strip(), b'/be')
self.assert_equal(c.get('/fe2').data.strip(), b'/fe')
self.assert_equal(c.get('/be').data.strip(), b'/fe')
def test_dotted_names_from_app(self):
app = flask.Flask(__name__)
app.testing = True
test = flask.Blueprint('test', __name__)
@app.route('/')
def app_index():
return flask.url_for('test.index')
@test.route('/test/')
def index():
return flask.url_for('app_index')
app.register_blueprint(test)
with app.test_client() as c:
rv = c.get('/')
self.assert_equal(rv.data, b'/test/')
def test_empty_url_defaults(self):
bp = flask.Blueprint('bp', __name__)
@bp.route('/', defaults={'page': 1})
@bp.route('/page/<int:page>')
def something(page):
return str(page)
app = flask.Flask(__name__)
app.register_blueprint(bp)
c = app.test_client()
self.assert_equal(c.get('/').data, b'1')
self.assert_equal(c.get('/page/2').data, b'2')
def test_route_decorator_custom_endpoint(self):
bp = flask.Blueprint('bp', __name__)
@bp.route('/foo')
def foo():
return flask.request.endpoint
@bp.route('/bar', endpoint='bar')
def foo_bar():
return flask.request.endpoint
@bp.route('/bar/123', endpoint='123')
def foo_bar_foo():
return flask.request.endpoint
@bp.route('/bar/foo')
def bar_foo():
return flask.request.endpoint
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.request.endpoint
c = app.test_client()
self.assertEqual(c.get('/').data, b'index')
self.assertEqual(c.get('/py/foo').data, b'bp.foo')
self.assertEqual(c.get('/py/bar').data, b'bp.bar')
self.assertEqual(c.get('/py/bar/123').data, b'bp.123')
self.assertEqual(c.get('/py/bar/foo').data, b'bp.bar_foo')
def test_route_decorator_custom_endpoint_with_dots(self):
bp = flask.Blueprint('bp', __name__)
@bp.route('/foo')
def foo():
return flask.request.endpoint
try:
@bp.route('/bar', endpoint='bar.bar')
def foo_bar():
return flask.request.endpoint
except AssertionError:
pass
else:
raise AssertionError('expected AssertionError not raised')
try:
@bp.route('/bar/123', endpoint='bar.123')
def foo_bar_foo():
return flask.request.endpoint
except AssertionError:
pass
else:
raise AssertionError('expected AssertionError not raised')
def foo_foo_foo():
pass
self.assertRaises(
AssertionError,
lambda: bp.add_url_rule(
'/bar/123', endpoint='bar.123', view_func=foo_foo_foo
)
)
self.assertRaises(
AssertionError,
bp.route('/bar/123', endpoint='bar.123'),
lambda: None
)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
c = app.test_client()
self.assertEqual(c.get('/py/foo').data, b'bp.foo')
# The rule's didn't actually made it through
rv = c.get('/py/bar')
assert rv.status_code == 404
rv = c.get('/py/bar/123')
assert rv.status_code == 404
def test_template_filter(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter()
def my_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('my_reverse', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse)
self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba')
def test_add_template_filter(self):
bp = flask.Blueprint('bp', __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('my_reverse', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse)
self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba')
def test_template_filter_with_name(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter('strrev')
def my_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('strrev', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['strrev'], my_reverse)
self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba')
def test_add_template_filter_with_name(self):
bp = flask.Blueprint('bp', __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse, 'strrev')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('strrev', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['strrev'], my_reverse)
self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba')
def test_template_filter_with_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter()
def super_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_filter_after_route_with_template(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter()
def super_reverse(s):
return s[::-1]
app.register_blueprint(bp, url_prefix='/py')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_add_template_filter_with_template(self):
bp = flask.Blueprint('bp', __name__)
def super_reverse(s):
return s[::-1]
bp.add_app_template_filter(super_reverse)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_filter_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter('super_reverse')
def my_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_add_template_filter_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse, 'super_reverse')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_test(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test()
def is_boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('is_boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['is_boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['is_boolean'](False))
def test_add_template_test(self):
bp = flask.Blueprint('bp', __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('is_boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['is_boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['is_boolean'](False))
def test_template_test_with_name(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test('boolean')
def is_boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['boolean'](False))
def test_add_template_test_with_name(self):
bp = flask.Blueprint('bp', __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean, 'boolean')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['boolean'](False))
def test_template_test_with_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test()
def boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_template_test_after_route_with_template(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test()
def boolean(value):
return isinstance(value, bool)
app.register_blueprint(bp, url_prefix='/py')
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_add_template_test_with_template(self):
bp = flask.Blueprint('bp', __name__)
def boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(boolean)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_template_test_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test('boolean')
def is_boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_add_template_test_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean, 'boolean')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(BlueprintTestCase))
suite.addTest(unittest.makeSuite(ModuleTestCase))
return suite
|
mit
|
amir-qayyum-khan/edx-platform
|
lms/djangoapps/courseware/tests/test_self_paced_overrides.py
|
23
|
6451
|
"""Tests for self-paced course due date overrides."""
# pylint: disable=missing-docstring
import datetime
import pytz
from django.test.utils import override_settings
from mock import patch
from courseware.tests.factories import BetaTesterFactory
from courseware.access import has_access
from lms.djangoapps.ccx.tests.test_overrides import inject_field_overrides
from lms.djangoapps.django_comment_client.utils import get_accessible_discussion_xblocks
from lms.djangoapps.courseware.field_overrides import OverrideFieldData, OverrideModulestoreFieldData
from openedx.core.djangoapps.self_paced.models import SelfPacedConfiguration
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
@override_settings(
XBLOCK_FIELD_DATA_WRAPPERS=['lms.djangoapps.courseware.field_overrides:OverrideModulestoreFieldData.wrap'],
MODULESTORE_FIELD_OVERRIDE_PROVIDERS=['courseware.self_paced_overrides.SelfPacedDateOverrideProvider'],
)
class SelfPacedDateOverrideTest(ModuleStoreTestCase):
"""
Tests for self-paced due date overrides.
"""
def setUp(self):
self.reset_setting_cache_variables()
super(SelfPacedDateOverrideTest, self).setUp()
SelfPacedConfiguration(enabled=True).save()
self.non_staff_user, __ = self.create_non_staff_user()
self.now = datetime.datetime.now(pytz.UTC).replace(microsecond=0)
self.future = self.now + datetime.timedelta(days=30)
def tearDown(self):
self.reset_setting_cache_variables()
super(SelfPacedDateOverrideTest, self).tearDown()
def reset_setting_cache_variables(self):
"""
The overridden settings for this class get cached on class variables.
Reset those to None before and after running the test to ensure clean
behavior.
"""
OverrideFieldData.provider_classes = None
OverrideModulestoreFieldData.provider_classes = None
def setup_course(self, **course_kwargs):
"""Set up a course with provided course attributes.
Creates a child block with a due date, and ensures that field
overrides are correctly applied for both blocks.
"""
course = CourseFactory.create(**course_kwargs)
section = ItemFactory.create(parent=course, due=self.now)
inject_field_overrides((course, section), course, self.user)
return (course, section)
def create_discussion_xblocks(self, parent):
# Create a released discussion xblock
ItemFactory.create(
parent=parent,
category='discussion',
display_name='released',
start=self.now,
)
# Create a scheduled discussion xblock
ItemFactory.create(
parent=parent,
category='discussion',
display_name='scheduled',
start=self.future,
)
def test_instructor_paced_due_date(self):
__, ip_section = self.setup_course(display_name="Instructor Paced Course", self_paced=False)
self.assertEqual(ip_section.due, self.now)
def test_self_paced_due_date(self):
__, sp_section = self.setup_course(display_name="Self-Paced Course", self_paced=True)
self.assertIsNone(sp_section.due)
def test_self_paced_disabled_due_date(self):
SelfPacedConfiguration(enabled=False).save()
__, sp_section = self.setup_course(display_name="Self-Paced Course", self_paced=True)
self.assertEqual(sp_section.due, self.now)
@patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_course_access_to_beta_users(self):
"""
Test that beta testers can access `self_paced` course prior to start date.
"""
now = datetime.datetime.now(pytz.UTC)
one_month_from_now = now + datetime.timedelta(days=30)
course_options = {
'days_early_for_beta': 100,
'self_paced': True,
'start': one_month_from_now,
}
# Create a `self_paced` course and add a beta tester in it
self_paced_course, self_paced_section = self.setup_course(**course_options)
beta_tester = BetaTesterFactory(course_key=self_paced_course.id)
# Verify course is `self_paced` and course has start date but not section.
self.assertTrue(self_paced_course.self_paced)
self.assertEqual(self_paced_course.start, one_month_from_now)
self.assertIsNone(self_paced_section.start)
# Verify that non-staff user do not have access to the course
self.assertFalse(has_access(self.non_staff_user, 'load', self_paced_course))
# Verify beta tester can access the course as well as the course sections
self.assertTrue(has_access(beta_tester, 'load', self_paced_course))
self.assertTrue(has_access(beta_tester, 'load', self_paced_section, self_paced_course.id))
@patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_instructor_paced_discussion_xblock_visibility(self):
"""
Verify that discussion xblocks scheduled for release in the future are
not visible to students in an instructor-paced course.
"""
course, section = self.setup_course(start=self.now, self_paced=False)
self.create_discussion_xblocks(section)
# Only the released xblocks should be visible when the course is instructor-paced.
xblocks = get_accessible_discussion_xblocks(course, self.non_staff_user)
self.assertTrue(
all(xblock.display_name == 'released' for xblock in xblocks)
)
@patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_self_paced_discussion_xblock_visibility(self):
"""
Regression test. Verify that discussion xblocks scheduled for release
in the future are visible to students in a self-paced course.
"""
course, section = self.setup_course(start=self.now, self_paced=True)
self.create_discussion_xblocks(section)
# The scheduled xblocks should be visible when the course is self-paced.
xblocks = get_accessible_discussion_xblocks(course, self.non_staff_user)
self.assertEqual(len(xblocks), 2)
self.assertTrue(
any(xblock.display_name == 'scheduled' for xblock in xblocks)
)
|
agpl-3.0
|
Limags/MissionPlanner
|
LogAnalyzer/tests/TestIMUMatch.py
|
61
|
3781
|
from LogAnalyzer import Test,TestResult
import DataflashLog
from math import sqrt
class TestIMUMatch(Test):
'''test for empty or near-empty logs'''
def __init__(self):
Test.__init__(self)
self.name = "IMU Mismatch"
def run(self, logdata, verbose):
#tuning parameters:
warn_threshold = .75
fail_threshold = 1.5
filter_tc = 5.0
self.result = TestResult()
self.result.status = TestResult.StatusType.GOOD
if ("IMU" in logdata.channels) and (not "IMU2" in logdata.channels):
self.result.status = TestResult.StatusType.NA
self.result.statusMessage = "No IMU2"
return
if (not "IMU" in logdata.channels) or (not "IMU2" in logdata.channels):
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No IMU log data"
return
imu1 = logdata.channels["IMU"]
imu2 = logdata.channels["IMU2"]
imu1_timems = imu1["TimeMS"].listData
imu1_accx = imu1["AccX"].listData
imu1_accy = imu1["AccY"].listData
imu1_accz = imu1["AccZ"].listData
imu2_timems = imu2["TimeMS"].listData
imu2_accx = imu2["AccX"].listData
imu2_accy = imu2["AccY"].listData
imu2_accz = imu2["AccZ"].listData
imu1 = []
imu2 = []
for i in range(len(imu1_timems)):
imu1.append({ 't': imu1_timems[i][1]*1.0E-3, 'x': imu1_accx[i][1], 'y': imu1_accy[i][1], 'z': imu1_accz[i][1]})
for i in range(len(imu2_timems)):
imu2.append({ 't': imu2_timems[i][1]*1.0E-3, 'x': imu2_accx[i][1], 'y': imu2_accy[i][1], 'z': imu2_accz[i][1]})
imu1.sort(key=lambda x: x['t'])
imu2.sort(key=lambda x: x['t'])
imu2_index = 0
last_t = None
xdiff_filtered = 0
ydiff_filtered = 0
zdiff_filtered = 0
max_diff_filtered = 0
for i in range(len(imu1)):
#find closest imu2 value
t = imu1[i]['t']
dt = 0 if last_t is None else t-last_t
dt=min(dt,.1)
next_imu2 = None
for i in range(imu2_index,len(imu2)):
next_imu2 = imu2[i]
imu2_index=i
if next_imu2['t'] >= t:
break
prev_imu2 = imu2[imu2_index-1]
closest_imu2 = next_imu2 if abs(next_imu2['t']-t)<abs(prev_imu2['t']-t) else prev_imu2
xdiff = imu1[i]['x']-closest_imu2['x']
ydiff = imu1[i]['y']-closest_imu2['y']
zdiff = imu1[i]['z']-closest_imu2['z']
xdiff_filtered += (xdiff-xdiff_filtered)*dt/filter_tc
ydiff_filtered += (ydiff-ydiff_filtered)*dt/filter_tc
zdiff_filtered += (zdiff-zdiff_filtered)*dt/filter_tc
diff_filtered = sqrt(xdiff_filtered**2+ydiff_filtered**2+zdiff_filtered**2)
max_diff_filtered = max(max_diff_filtered,diff_filtered)
#print max_diff_filtered
last_t = t
if max_diff_filtered > fail_threshold:
self.result.statusMessage = "Check vibration or accelerometer calibration. (Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold,fail_threshold)
self.result.status = TestResult.StatusType.FAIL
elif max_diff_filtered > warn_threshold:
self.result.statusMessage = "Check vibration or accelerometer calibration. (Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold,fail_threshold)
self.result.status = TestResult.StatusType.WARN
else:
self.result.statusMessage = "(Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold, fail_threshold)
|
gpl-3.0
|
jfpla/odoo
|
openerp/addons/base/ir/ir_model.py
|
7
|
60967
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2014 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from collections import defaultdict
import logging
import re
import time
import types
import openerp
from openerp import SUPERUSER_ID
from openerp import models, tools, api
from openerp.modules.registry import RegistryManager
from openerp.osv import fields, osv
from openerp.osv.orm import BaseModel, Model, MAGIC_COLUMNS, except_orm
from openerp.tools import config
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
MODULE_UNINSTALL_FLAG = '_force_unlink'
def _get_fields_type(self, cr, uid, context=None):
# Avoid too many nested `if`s below, as RedHat's Python 2.6
# break on it. See bug 939653.
return sorted([(k,k) for k,v in fields.__dict__.iteritems()
if type(v) == types.TypeType and \
issubclass(v, fields._column) and \
v != fields._column and \
not v._deprecated and \
not issubclass(v, fields.function)])
def _in_modules(self, cr, uid, ids, field_name, arg, context=None):
#pseudo-method used by fields.function in ir.model/ir.model.fields
module_pool = self.pool["ir.module.module"]
installed_module_ids = module_pool.search(cr, uid, [('state','=','installed')])
installed_module_names = module_pool.read(cr, uid, installed_module_ids, ['name'], context=context)
installed_modules = set(x['name'] for x in installed_module_names)
result = {}
xml_ids = osv.osv._get_xml_ids(self, cr, uid, ids)
for k,v in xml_ids.iteritems():
result[k] = ', '.join(sorted(installed_modules & set(xml_id.split('.')[0] for xml_id in v)))
return result
class unknown(models.AbstractModel):
"""
Abstract model used as a substitute for relational fields with an unknown
comodel.
"""
_name = '_unknown'
class ir_model(osv.osv):
_name = 'ir.model'
_description = "Models"
_order = 'model'
def _is_osv_memory(self, cr, uid, ids, field_name, arg, context=None):
models = self.browse(cr, uid, ids, context=context)
res = dict.fromkeys(ids)
for model in models:
if model.model in self.pool:
res[model.id] = self.pool[model.model].is_transient()
else:
_logger.error('Missing model %s' % (model.model, ))
return res
def _search_osv_memory(self, cr, uid, model, name, domain, context=None):
if not domain:
return []
__, operator, value = domain[0]
if operator not in ['=', '!=']:
raise osv.except_osv(_("Invalid Search Criteria"), _('The osv_memory field can only be compared with = and != operator.'))
value = bool(value) if operator == '=' else not bool(value)
all_model_ids = self.search(cr, uid, [], context=context)
is_osv_mem = self._is_osv_memory(cr, uid, all_model_ids, 'osv_memory', arg=None, context=context)
return [('id', 'in', [id for id in is_osv_mem if bool(is_osv_mem[id]) == value])]
def _view_ids(self, cr, uid, ids, field_name, arg, context=None):
models = self.browse(cr, uid, ids)
res = {}
for model in models:
res[model.id] = self.pool["ir.ui.view"].search(cr, uid, [('model', '=', model.model)])
return res
def _inherited_models(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for model in self.browse(cr, uid, ids, context=context):
res[model.id] = []
inherited_models = [model_name for model_name in self.pool[model.model]._inherits]
if inherited_models:
res[model.id] = self.search(cr, uid, [('model', 'in', inherited_models)], context=context)
return res
_columns = {
'name': fields.char('Model Description', translate=True, required=True),
'model': fields.char('Model', required=True, select=1),
'info': fields.text('Information'),
'field_id': fields.one2many('ir.model.fields', 'model_id', 'Fields', required=True, copy=True),
'inherited_model_ids': fields.function(_inherited_models, type="many2many", obj="ir.model", string="Inherited models",
help="The list of models that extends the current model."),
'state': fields.selection([('manual','Custom Object'),('base','Base Object')],'Type', readonly=True),
'access_ids': fields.one2many('ir.model.access', 'model_id', 'Access'),
'osv_memory': fields.function(_is_osv_memory, string='Transient Model', type='boolean',
fnct_search=_search_osv_memory,
help="This field specifies whether the model is transient or not (i.e. if records are automatically deleted from the database or not)"),
'modules': fields.function(_in_modules, type='char', string='In Modules', help='List of modules in which the object is defined or inherited'),
'view_ids': fields.function(_view_ids, type='one2many', obj='ir.ui.view', string='Views'),
}
_defaults = {
'model': 'x_',
'state': 'manual',
}
def _check_model_name(self, cr, uid, ids, context=None):
for model in self.browse(cr, uid, ids, context=context):
if model.state=='manual':
if not model.model.startswith('x_'):
return False
if not re.match('^[a-z_A-Z0-9.]+$',model.model):
return False
return True
def _model_name_msg(self, cr, uid, ids, context=None):
return _('The Object name must start with x_ and not contain any special character !')
_constraints = [
(_check_model_name, _model_name_msg, ['model']),
]
_sql_constraints = [
('obj_name_uniq', 'unique (model)', 'Each model must be unique!'),
]
# overridden to allow searching both on model name (model field)
# and model description (name field)
def _name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
if args is None:
args = []
domain = args + ['|', ('model', operator, name), ('name', operator, name)]
return self.name_get(cr, name_get_uid or uid,
super(ir_model, self).search(cr, uid, domain, limit=limit, context=context),
context=context)
def _drop_table(self, cr, uid, ids, context=None):
for model in self.browse(cr, uid, ids, context):
model_pool = self.pool[model.model]
cr.execute('select relkind from pg_class where relname=%s', (model_pool._table,))
result = cr.fetchone()
if result and result[0] == 'v':
cr.execute('DROP view %s' % (model_pool._table,))
elif result and result[0] == 'r':
cr.execute('DROP TABLE %s CASCADE' % (model_pool._table,))
return True
def unlink(self, cr, user, ids, context=None):
# Prevent manual deletion of module tables
if context is None: context = {}
if isinstance(ids, (int, long)):
ids = [ids]
if not context.get(MODULE_UNINSTALL_FLAG):
for model in self.browse(cr, user, ids, context):
if model.state != 'manual':
raise except_orm(_('Error'), _("Model '%s' contains module data and cannot be removed!") % (model.name,))
self._drop_table(cr, user, ids, context)
res = super(ir_model, self).unlink(cr, user, ids, context)
if not context.get(MODULE_UNINSTALL_FLAG):
# only reload pool for normal unlink. For module uninstall the
# reload is done independently in openerp.modules.loading
cr.commit() # must be committed before reloading registry in new cursor
api.Environment.reset()
RegistryManager.new(cr.dbname)
RegistryManager.signal_registry_change(cr.dbname)
return res
def write(self, cr, user, ids, vals, context=None):
if context:
context = dict(context)
context.pop('__last_update', None)
# Filter out operations 4 link from field id, because openerp-web
# always write (4,id,False) even for non dirty items
if 'field_id' in vals:
vals['field_id'] = [op for op in vals['field_id'] if op[0] != 4]
return super(ir_model,self).write(cr, user, ids, vals, context)
def create(self, cr, user, vals, context=None):
if context is None:
context = {}
res = super(ir_model,self).create(cr, user, vals, context)
if vals.get('state','manual')=='manual':
# add model in registry
self.instanciate(cr, user, vals['model'], context)
self.pool.setup_models(cr, partial=(not self.pool.ready))
# update database schema
model = self.pool[vals['model']]
ctx = dict(context,
field_name=vals['name'],
field_state='manual',
select=vals.get('select_level', '0'),
update_custom_fields=True)
model._auto_init(cr, ctx)
model._auto_end(cr, ctx) # actually create FKs!
RegistryManager.signal_registry_change(cr.dbname)
return res
def instanciate(self, cr, user, model, context=None):
if isinstance(model, unicode):
model = model.encode('utf-8')
class CustomModel(models.Model):
_name = model
_module = False
_custom = True
CustomModel._build_model(self.pool, cr)
class ir_model_fields(osv.osv):
_name = 'ir.model.fields'
_description = "Fields"
_rec_name = 'field_description'
_columns = {
'name': fields.char('Name', required=True, select=1),
'complete_name': fields.char('Complete Name', select=1),
'model': fields.char('Object Name', required=True, select=1,
help="The technical name of the model this field belongs to"),
'relation': fields.char('Object Relation',
help="For relationship fields, the technical name of the target model"),
'relation_field': fields.char('Relation Field',
help="For one2many fields, the field on the target model that implement the opposite many2one relationship"),
'model_id': fields.many2one('ir.model', 'Model', required=True, select=True, ondelete='cascade',
help="The model this field belongs to"),
'field_description': fields.char('Field Label', required=True),
'ttype': fields.selection(_get_fields_type, 'Field Type', required=True),
'selection': fields.char('Selection Options', help="List of options for a selection field, "
"specified as a Python expression defining a list of (key, label) pairs. "
"For example: [('blue','Blue'),('yellow','Yellow')]"),
'required': fields.boolean('Required'),
'readonly': fields.boolean('Readonly'),
'select_level': fields.selection([('0','Not Searchable'),('1','Always Searchable'),('2','Advanced Search (deprecated)')],'Searchable', required=True),
'translate': fields.boolean('Translatable', help="Whether values for this field can be translated (enables the translation mechanism for that field)"),
'size': fields.integer('Size'),
'state': fields.selection([('manual','Custom Field'),('base','Base Field')],'Type', required=True, readonly=True, select=1),
'on_delete': fields.selection([('cascade', 'Cascade'), ('set null', 'Set NULL'), ('restrict', 'Restrict')],
'On Delete', help='On delete property for many2one fields'),
'domain': fields.char('Domain', help="The optional domain to restrict possible values for relationship fields, "
"specified as a Python expression defining a list of triplets. "
"For example: [('color','=','red')]"),
'groups': fields.many2many('res.groups', 'ir_model_fields_group_rel', 'field_id', 'group_id', 'Groups'),
'selectable': fields.boolean('Selectable'),
'modules': fields.function(_in_modules, type='char', string='In Modules', help='List of modules in which the field is defined'),
'serialization_field_id': fields.many2one('ir.model.fields', 'Serialization Field', domain = "[('ttype','=','serialized')]",
ondelete='cascade', help="If set, this field will be stored in the sparse "
"structure of the serialization field, instead "
"of having its own database column. This cannot be "
"changed after creation."),
}
_rec_name='field_description'
_defaults = {
'selection': "",
'domain': "[]",
'name': 'x_',
'state': 'manual',
'on_delete': 'set null',
'select_level': '0',
'field_description': '',
'selectable': 1,
}
_order = "name"
def _check_selection(self, cr, uid, selection, context=None):
try:
selection_list = eval(selection)
except Exception:
_logger.warning('Invalid selection list definition for fields.selection', exc_info=True)
raise except_orm(_('Error'),
_("The Selection Options expression is not a valid Pythonic expression."
"Please provide an expression in the [('key','Label'), ...] format."))
check = True
if not (isinstance(selection_list, list) and selection_list):
check = False
else:
for item in selection_list:
if not (isinstance(item, (tuple,list)) and len(item) == 2):
check = False
break
if not check:
raise except_orm(_('Error'),
_("The Selection Options expression is must be in the [('key','Label'), ...] format!"))
return True
def _size_gt_zero_msg(self, cr, user, ids, context=None):
return _('Size of the field can never be less than 0 !')
_sql_constraints = [
('size_gt_zero', 'CHECK (size>=0)',_size_gt_zero_msg ),
]
def _drop_column(self, cr, uid, ids, context=None):
for field in self.browse(cr, uid, ids, context):
if field.name in MAGIC_COLUMNS:
continue
model = self.pool[field.model]
cr.execute('select relkind from pg_class where relname=%s', (model._table,))
result = cr.fetchone()
cr.execute("SELECT column_name FROM information_schema.columns WHERE table_name ='%s' and column_name='%s'" %(model._table, field.name))
column_name = cr.fetchone()
if column_name and (result and result[0] == 'r'):
cr.execute('ALTER table "%s" DROP column "%s" cascade' % (model._table, field.name))
# remove m2m relation table for custom fields
# we consider the m2m relation is only one way as it's not possible
# to specify the relation table in the interface for custom fields
# TODO master: maybe use ir.model.relations for custom fields
if field.state == 'manual' and field.ttype == 'many2many':
rel_name = model._fields[field.name].relation
cr.execute('DROP table "%s"' % (rel_name))
model._pop_field(field.name)
return True
def unlink(self, cr, user, ids, context=None):
# Prevent manual deletion of module columns
if context is None: context = {}
if isinstance(ids, (int, long)):
ids = [ids]
if not context.get(MODULE_UNINSTALL_FLAG) and \
any(field.state != 'manual' for field in self.browse(cr, user, ids, context)):
raise except_orm(_('Error'), _("This column contains module data and cannot be removed!"))
self._drop_column(cr, user, ids, context)
res = super(ir_model_fields, self).unlink(cr, user, ids, context)
if not context.get(MODULE_UNINSTALL_FLAG):
# The field we just deleted might have be inherited, and registry is
# inconsistent in this case; therefore we reload the registry.
cr.commit()
api.Environment.reset()
RegistryManager.new(cr.dbname)
RegistryManager.signal_registry_change(cr.dbname)
return res
def create(self, cr, user, vals, context=None):
if 'model_id' in vals:
model_data = self.pool['ir.model'].browse(cr, user, vals['model_id'])
vals['model'] = model_data.model
if context is None:
context = {}
if vals.get('ttype', False) == 'selection':
if not vals.get('selection',False):
raise except_orm(_('Error'), _('For selection fields, the Selection Options must be given!'))
self._check_selection(cr, user, vals['selection'], context=context)
res = super(ir_model_fields,self).create(cr, user, vals, context)
if vals.get('state','manual') == 'manual':
if not vals['name'].startswith('x_'):
raise except_orm(_('Error'), _("Custom fields must have a name that starts with 'x_' !"))
if vals.get('relation',False) and not self.pool['ir.model'].search(cr, user, [('model','=',vals['relation'])]):
raise except_orm(_('Error'), _("Model %s does not exist!") % vals['relation'])
self.pool.clear_manual_fields()
if vals['model'] in self.pool:
model = self.pool[vals['model']]
if vals['model'].startswith('x_') and vals['name'] == 'x_name':
model._rec_name = 'x_name'
# re-initialize model in registry
model.__init__(self.pool, cr)
self.pool.setup_models(cr, partial=(not self.pool.ready))
# update database schema
model = self.pool[vals['model']]
ctx = dict(context,
field_name=vals['name'],
field_state='manual',
select=vals.get('select_level', '0'),
update_custom_fields=True)
model._auto_init(cr, ctx)
model._auto_end(cr, ctx) # actually create FKs!
RegistryManager.signal_registry_change(cr.dbname)
return res
def write(self, cr, user, ids, vals, context=None):
if context is None:
context = {}
#For the moment renaming a sparse field or changing the storing system is not allowed. This may be done later
if 'serialization_field_id' in vals or 'name' in vals:
for field in self.browse(cr, user, ids, context=context):
if 'serialization_field_id' in vals and field.serialization_field_id.id != vals['serialization_field_id']:
raise except_orm(_('Error!'), _('Changing the storing system for field "%s" is not allowed.')%field.name)
if field.serialization_field_id and (field.name != vals['name']):
raise except_orm(_('Error!'), _('Renaming sparse field "%s" is not allowed')%field.name)
# if set, *one* column can be renamed here
column_rename = None
# names of the models to patch
patched_models = set()
if vals and ids:
checked_selection = False # need only check it once, so defer
for item in self.browse(cr, user, ids, context=context):
obj = self.pool.get(item.model)
field = getattr(obj, '_fields', {}).get(item.name)
if item.state != 'manual':
raise except_orm(_('Error!'),
_('Properties of base fields cannot be altered in this manner! '
'Please modify them through Python code, '
'preferably through a custom addon!'))
if item.ttype == 'selection' and 'selection' in vals \
and not checked_selection:
self._check_selection(cr, user, vals['selection'], context=context)
checked_selection = True
final_name = item.name
if 'name' in vals and vals['name'] != item.name:
# We need to rename the column
if column_rename:
raise except_orm(_('Error!'), _('Can only rename one column at a time!'))
if vals['name'] in obj._columns:
raise except_orm(_('Error!'), _('Cannot rename column to %s, because that column already exists!') % vals['name'])
if vals.get('state', 'manual') == 'manual' and not vals['name'].startswith('x_'):
raise except_orm(_('Error!'), _('New column name must still start with x_ , because it is a custom field!'))
if '\'' in vals['name'] or '"' in vals['name'] or ';' in vals['name']:
raise ValueError('Invalid character in column name')
column_rename = (obj, (obj._table, item.name, vals['name']))
final_name = vals['name']
if 'model_id' in vals and vals['model_id'] != item.model_id.id:
raise except_orm(_("Error!"), _("Changing the model of a field is forbidden!"))
if 'ttype' in vals and vals['ttype'] != item.ttype:
raise except_orm(_("Error!"), _("Changing the type of a column is not yet supported. "
"Please drop it and create it again!"))
# We don't check the 'state', because it might come from the context
# (thus be set for multiple fields) and will be ignored anyway.
if obj is not None and field is not None:
patched_models.add(obj._name)
# These shall never be written (modified)
for column_name in ('model_id', 'model', 'state'):
if column_name in vals:
del vals[column_name]
res = super(ir_model_fields,self).write(cr, user, ids, vals, context=context)
self.pool.clear_manual_fields()
if column_rename:
obj, rename = column_rename
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % rename)
if column_rename or patched_models:
# setup models, this will reload all manual fields in registry
self.pool.setup_models(cr, partial=(not self.pool.ready))
if patched_models:
# We have to update _columns of the model(s) and then call their
# _auto_init to sync the db with the model. Hopefully, since write()
# was called earlier, they will be in-sync before the _auto_init.
# Anything we don't update in _columns now will be reset from
# the model into ir.model.fields (db).
ctx = dict(context,
select=vals.get('select_level', '0'),
update_custom_fields=True,
)
for model_name in patched_models:
obj = self.pool[model_name]
obj._auto_init(cr, ctx)
obj._auto_end(cr, ctx) # actually create FKs!
if column_rename or patched_models:
RegistryManager.signal_registry_change(cr.dbname)
return res
class ir_model_constraint(Model):
"""
This model tracks PostgreSQL foreign keys and constraints used by OpenERP
models.
"""
_name = 'ir.model.constraint'
_columns = {
'name': fields.char('Constraint', required=True, select=1,
help="PostgreSQL constraint or foreign key name."),
'model': fields.many2one('ir.model', string='Model',
required=True, select=1),
'module': fields.many2one('ir.module.module', string='Module',
required=True, select=1),
'type': fields.char('Constraint Type', required=True, size=1, select=1,
help="Type of the constraint: `f` for a foreign key, "
"`u` for other constraints."),
'date_update': fields.datetime('Update Date'),
'date_init': fields.datetime('Initialization Date')
}
_sql_constraints = [
('module_name_uniq', 'unique(name, module)',
'Constraints with the same name are unique per module.'),
]
def _module_data_uninstall(self, cr, uid, ids, context=None):
"""
Delete PostgreSQL foreign keys and constraints tracked by this model.
"""
if uid != SUPERUSER_ID and not self.pool['ir.model.access'].check_groups(cr, uid, "base.group_system"):
raise except_orm(_('Permission Denied'), (_('Administrator access is required to uninstall a module')))
context = dict(context or {})
ids_set = set(ids)
ids.sort()
ids.reverse()
for data in self.browse(cr, uid, ids, context):
model = data.model.model
model_obj = self.pool[model]
name = openerp.tools.ustr(data.name)
typ = data.type
# double-check we are really going to delete all the owners of this schema element
cr.execute("""SELECT id from ir_model_constraint where name=%s""", (data.name,))
external_ids = [x[0] for x in cr.fetchall()]
if set(external_ids)-ids_set:
# as installed modules have defined this element we must not delete it!
continue
if typ == 'f':
# test if FK exists on this table (it could be on a related m2m table, in which case we ignore it)
cr.execute("""SELECT 1 from pg_constraint cs JOIN pg_class cl ON (cs.conrelid = cl.oid)
WHERE cs.contype=%s and cs.conname=%s and cl.relname=%s""", ('f', name, model_obj._table))
if cr.fetchone():
cr.execute('ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (model_obj._table, name),)
_logger.info('Dropped FK CONSTRAINT %s@%s', name, model)
if typ == 'u':
# test if constraint exists
cr.execute("""SELECT 1 from pg_constraint cs JOIN pg_class cl ON (cs.conrelid = cl.oid)
WHERE cs.contype=%s and cs.conname=%s and cl.relname=%s""", ('u', name, model_obj._table))
if cr.fetchone():
cr.execute('ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (model_obj._table, name),)
_logger.info('Dropped CONSTRAINT %s@%s', name, model)
self.unlink(cr, uid, ids, context)
class ir_model_relation(Model):
"""
This model tracks PostgreSQL tables used to implement OpenERP many2many
relations.
"""
_name = 'ir.model.relation'
_columns = {
'name': fields.char('Relation Name', required=True, select=1,
help="PostgreSQL table name implementing a many2many relation."),
'model': fields.many2one('ir.model', string='Model',
required=True, select=1),
'module': fields.many2one('ir.module.module', string='Module',
required=True, select=1),
'date_update': fields.datetime('Update Date'),
'date_init': fields.datetime('Initialization Date')
}
def _module_data_uninstall(self, cr, uid, ids, context=None):
"""
Delete PostgreSQL many2many relations tracked by this model.
"""
if uid != SUPERUSER_ID and not self.pool['ir.model.access'].check_groups(cr, uid, "base.group_system"):
raise except_orm(_('Permission Denied'), (_('Administrator access is required to uninstall a module')))
ids_set = set(ids)
to_drop_table = []
ids.sort()
ids.reverse()
for data in self.browse(cr, uid, ids, context):
model = data.model
name = openerp.tools.ustr(data.name)
# double-check we are really going to delete all the owners of this schema element
cr.execute("""SELECT id from ir_model_relation where name = %s""", (data.name,))
external_ids = [x[0] for x in cr.fetchall()]
if set(external_ids)-ids_set:
# as installed modules have defined this element we must not delete it!
continue
cr.execute("SELECT 1 FROM information_schema.tables WHERE table_name=%s", (name,))
if cr.fetchone() and not name in to_drop_table:
to_drop_table.append(name)
self.unlink(cr, uid, ids, context)
# drop m2m relation tables
for table in to_drop_table:
cr.execute('DROP TABLE %s CASCADE'% table,)
_logger.info('Dropped table %s', table)
cr.commit()
class ir_model_access(osv.osv):
_name = 'ir.model.access'
_columns = {
'name': fields.char('Name', required=True, select=True),
'active': fields.boolean('Active', help='If you uncheck the active field, it will disable the ACL without deleting it (if you delete a native ACL, it will be re-created when you reload the module.'),
'model_id': fields.many2one('ir.model', 'Object', required=True, domain=[('osv_memory','=', False)], select=True, ondelete='cascade'),
'group_id': fields.many2one('res.groups', 'Group', ondelete='cascade', select=True),
'perm_read': fields.boolean('Read Access'),
'perm_write': fields.boolean('Write Access'),
'perm_create': fields.boolean('Create Access'),
'perm_unlink': fields.boolean('Delete Access'),
}
_defaults = {
'active': True,
}
def check_groups(self, cr, uid, group):
grouparr = group.split('.')
if not grouparr:
return False
cr.execute("select 1 from res_groups_users_rel where uid=%s and gid IN (select res_id from ir_model_data where module=%s and name=%s)", (uid, grouparr[0], grouparr[1],))
return bool(cr.fetchone())
def check_group(self, cr, uid, model, mode, group_ids):
""" Check if a specific group has the access mode to the specified model"""
assert mode in ['read','write','create','unlink'], 'Invalid access mode'
if isinstance(model, BaseModel):
assert model._name == 'ir.model', 'Invalid model object'
model_name = model.name
else:
model_name = model
if isinstance(group_ids, (int, long)):
group_ids = [group_ids]
for group_id in group_ids:
cr.execute("SELECT perm_" + mode + " "
" FROM ir_model_access a "
" JOIN ir_model m ON (m.id = a.model_id) "
" WHERE m.model = %s AND a.active IS True "
" AND a.group_id = %s", (model_name, group_id)
)
r = cr.fetchone()
if r is None:
cr.execute("SELECT perm_" + mode + " "
" FROM ir_model_access a "
" JOIN ir_model m ON (m.id = a.model_id) "
" WHERE m.model = %s AND a.active IS True "
" AND a.group_id IS NULL", (model_name, )
)
r = cr.fetchone()
access = bool(r and r[0])
if access:
return True
# pass no groups -> no access
return False
def group_names_with_access(self, cr, model_name, access_mode):
"""Returns the names of visible groups which have been granted ``access_mode`` on
the model ``model_name``.
:rtype: list
"""
assert access_mode in ['read','write','create','unlink'], 'Invalid access mode: %s' % access_mode
cr.execute('''SELECT
c.name, g.name
FROM
ir_model_access a
JOIN ir_model m ON (a.model_id=m.id)
JOIN res_groups g ON (a.group_id=g.id)
LEFT JOIN ir_module_category c ON (c.id=g.category_id)
WHERE
m.model=%s AND
a.active IS True AND
a.perm_''' + access_mode, (model_name,))
return [('%s/%s' % x) if x[0] else x[1] for x in cr.fetchall()]
# The context parameter is useful when the method translates error messages.
# But as the method raises an exception in that case, the key 'lang' might
# not be really necessary as a cache key, unless the `ormcache_context`
# decorator catches the exception (it does not at the moment.)
@tools.ormcache_context(accepted_keys=('lang',))
def check(self, cr, uid, model, mode='read', raise_exception=True, context=None):
if uid==1:
# User root have all accesses
# TODO: exclude xml-rpc requests
return True
assert mode in ['read','write','create','unlink'], 'Invalid access mode'
if isinstance(model, BaseModel):
assert model._name == 'ir.model', 'Invalid model object'
model_name = model.model
else:
model_name = model
# TransientModel records have no access rights, only an implicit access rule
if model_name not in self.pool:
_logger.error('Missing model %s' % (model_name, ))
elif self.pool[model_name].is_transient():
return True
# We check if a specific rule exists
cr.execute('SELECT MAX(CASE WHEN perm_' + mode + ' THEN 1 ELSE 0 END) '
' FROM ir_model_access a '
' JOIN ir_model m ON (m.id = a.model_id) '
' JOIN res_groups_users_rel gu ON (gu.gid = a.group_id) '
' WHERE m.model = %s '
' AND gu.uid = %s '
' AND a.active IS True '
, (model_name, uid,)
)
r = cr.fetchone()[0]
if r is None:
# there is no specific rule. We check the generic rule
cr.execute('SELECT MAX(CASE WHEN perm_' + mode + ' THEN 1 ELSE 0 END) '
' FROM ir_model_access a '
' JOIN ir_model m ON (m.id = a.model_id) '
' WHERE a.group_id IS NULL '
' AND m.model = %s '
' AND a.active IS True '
, (model_name,)
)
r = cr.fetchone()[0]
if not r and raise_exception:
groups = '\n\t'.join('- %s' % g for g in self.group_names_with_access(cr, model_name, mode))
msg_heads = {
# Messages are declared in extenso so they are properly exported in translation terms
'read': _("Sorry, you are not allowed to access this document."),
'write': _("Sorry, you are not allowed to modify this document."),
'create': _("Sorry, you are not allowed to create this kind of document."),
'unlink': _("Sorry, you are not allowed to delete this document."),
}
if groups:
msg_tail = _("Only users with the following access level are currently allowed to do that") + ":\n%s\n\n(" + _("Document model") + ": %s)"
msg_params = (groups, model_name)
else:
msg_tail = _("Please contact your system administrator if you think this is an error.") + "\n\n(" + _("Document model") + ": %s)"
msg_params = (model_name,)
_logger.warning('Access Denied by ACLs for operation: %s, uid: %s, model: %s', mode, uid, model_name)
msg = '%s %s' % (msg_heads[mode], msg_tail)
raise openerp.exceptions.AccessError(msg % msg_params)
return bool(r)
__cache_clearing_methods = []
def register_cache_clearing_method(self, model, method):
self.__cache_clearing_methods.append((model, method))
def unregister_cache_clearing_method(self, model, method):
try:
i = self.__cache_clearing_methods.index((model, method))
del self.__cache_clearing_methods[i]
except ValueError:
pass
def call_cache_clearing_methods(self, cr):
self.invalidate_cache(cr, SUPERUSER_ID)
self.check.clear_cache(self) # clear the cache of check function
for model, method in self.__cache_clearing_methods:
if model in self.pool:
getattr(self.pool[model], method)()
#
# Check rights on actions
#
def write(self, cr, uid, ids, values, context=None):
self.call_cache_clearing_methods(cr)
res = super(ir_model_access, self).write(cr, uid, ids, values, context=context)
return res
def create(self, cr, uid, values, context=None):
self.call_cache_clearing_methods(cr)
res = super(ir_model_access, self).create(cr, uid, values, context=context)
return res
def unlink(self, cr, uid, ids, context=None):
self.call_cache_clearing_methods(cr)
res = super(ir_model_access, self).unlink(cr, uid, ids, context=context)
return res
class ir_model_data(osv.osv):
"""Holds external identifier keys for records in the database.
This has two main uses:
* allows easy data integration with third-party systems,
making import/export/sync of data possible, as records
can be uniquely identified across multiple systems
* allows tracking the origin of data installed by OpenERP
modules themselves, thus making it possible to later
update them seamlessly.
"""
_name = 'ir.model.data'
_order = 'module,model,name'
def name_get(self, cr, uid, ids, context=None):
bymodel = defaultdict(dict)
names = {}
for res in self.browse(cr, uid, ids, context=context):
bymodel[res.model][res.res_id] = res
names[res.id] = res.complete_name
#result[res.model][res.res_id] = res.id
for model, id_map in bymodel.iteritems():
try:
ng = dict(self.pool[model].name_get(cr, uid, id_map.keys(), context=context))
except Exception:
pass
else:
for r in id_map.itervalues():
names[r.id] = ng.get(r.res_id, r.complete_name)
return [(i, names[i]) for i in ids]
def _complete_name_get(self, cr, uid, ids, prop, unknow_none, context=None):
result = {}
for res in self.browse(cr, uid, ids, context=context):
result[res.id] = (res.module and (res.module + '.') or '')+res.name
return result
_columns = {
'name': fields.char('External Identifier', required=True, select=1,
help="External Key/Identifier that can be used for "
"data integration with third-party systems"),
'complete_name': fields.function(_complete_name_get, type='char', string='Complete ID'),
'model': fields.char('Model Name', required=True, select=1),
'module': fields.char('Module', required=True, select=1),
'res_id': fields.integer('Record ID', select=1,
help="ID of the target record in the database"),
'noupdate': fields.boolean('Non Updatable'),
'date_update': fields.datetime('Update Date'),
'date_init': fields.datetime('Init Date')
}
_defaults = {
'date_init': fields.datetime.now,
'date_update': fields.datetime.now,
'noupdate': False,
'module': ''
}
_sql_constraints = [
('module_name_uniq', 'unique(name, module)', 'You cannot have multiple records with the same external ID in the same module!'),
]
def __init__(self, pool, cr):
osv.osv.__init__(self, pool, cr)
# also stored in pool to avoid being discarded along with this osv instance
if getattr(pool, 'model_data_reference_ids', None) is None:
self.pool.model_data_reference_ids = {}
# put loads on the class, in order to share it among all instances
type(self).loads = self.pool.model_data_reference_ids
def _auto_init(self, cr, context=None):
res = super(ir_model_data, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_model_data_module_name_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_model_data_module_name_index ON ir_model_data (module, name)')
return res
# NEW V8 API
@tools.ormcache(skiparg=3)
def xmlid_lookup(self, cr, uid, xmlid):
"""Low level xmlid lookup
Return (id, res_model, res_id) or raise ValueError if not found
"""
module, name = xmlid.split('.', 1)
ids = self.search(cr, uid, [('module','=',module), ('name','=', name)])
if not ids:
raise ValueError('External ID not found in the system: %s' % (xmlid))
# the sql constraints ensure us we have only one result
res = self.read(cr, uid, ids[0], ['model', 'res_id'])
if not res['res_id']:
raise ValueError('External ID not found in the system: %s' % (xmlid))
return ids[0], res['model'], res['res_id']
def xmlid_to_res_model_res_id(self, cr, uid, xmlid, raise_if_not_found=False):
""" Return (res_model, res_id)"""
try:
return self.xmlid_lookup(cr, uid, xmlid)[1:3]
except ValueError:
if raise_if_not_found:
raise
return (False, False)
def xmlid_to_res_id(self, cr, uid, xmlid, raise_if_not_found=False):
""" Returns res_id """
return self.xmlid_to_res_model_res_id(cr, uid, xmlid, raise_if_not_found)[1]
def xmlid_to_object(self, cr, uid, xmlid, raise_if_not_found=False, context=None):
""" Return a browse_record
if not found and raise_if_not_found is True return None
"""
t = self.xmlid_to_res_model_res_id(cr, uid, xmlid, raise_if_not_found)
res_model, res_id = t
if res_model and res_id:
record = self.pool[res_model].browse(cr, uid, res_id, context=context)
if record.exists():
return record
if raise_if_not_found:
raise ValueError('No record found for unique ID %s. It may have been deleted.' % (xmlid))
return None
# OLD API
def _get_id(self, cr, uid, module, xml_id):
"""Returns the id of the ir.model.data record corresponding to a given module and xml_id (cached) or raise a ValueError if not found"""
return self.xmlid_lookup(cr, uid, "%s.%s" % (module, xml_id))[0]
def get_object_reference(self, cr, uid, module, xml_id):
"""Returns (model, res_id) corresponding to a given module and xml_id (cached) or raise ValueError if not found"""
return self.xmlid_lookup(cr, uid, "%s.%s" % (module, xml_id))[1:3]
def check_object_reference(self, cr, uid, module, xml_id, raise_on_access_error=False):
"""Returns (model, res_id) corresponding to a given module and xml_id (cached), if and only if the user has the necessary access rights
to see that object, otherwise raise a ValueError if raise_on_access_error is True or returns a tuple (model found, False)"""
model, res_id = self.get_object_reference(cr, uid, module, xml_id)
#search on id found in result to check if current user has read access right
check_right = self.pool.get(model).search(cr, uid, [('id', '=', res_id)])
if check_right:
return model, res_id
if raise_on_access_error:
raise ValueError('Not enough access rights on the external ID: %s.%s' % (module, xml_id))
return model, False
def get_object(self, cr, uid, module, xml_id, context=None):
""" Returns a browsable record for the given module name and xml_id.
If not found, raise a ValueError or return None, depending
on the value of `raise_exception`.
"""
return self.xmlid_to_object(cr, uid, "%s.%s" % (module, xml_id), raise_if_not_found=True, context=context)
def _update_dummy(self,cr, uid, model, module, xml_id=False, store=True):
if not xml_id:
return False
id = False
try:
# One step to check the ID is defined and the record actually exists
record = self.get_object(cr, uid, module, xml_id)
if record:
id = record.id
self.loads[(module,xml_id)] = (model,id)
for table, inherit_field in self.pool[model]._inherits.iteritems():
parent_id = record[inherit_field].id
parent_xid = '%s_%s' % (xml_id, table.replace('.', '_'))
self.loads[(module, parent_xid)] = (table, parent_id)
except Exception:
pass
return id
def clear_caches(self):
""" Clears all orm caches on the object's methods
:returns: itself
"""
self.xmlid_lookup.clear_cache(self)
return self
def unlink(self, cr, uid, ids, context=None):
""" Regular unlink method, but make sure to clear the caches. """
self.clear_caches()
return super(ir_model_data,self).unlink(cr, uid, ids, context=context)
def _update(self,cr, uid, model, module, values, xml_id=False, store=True, noupdate=False, mode='init', res_id=False, context=None):
model_obj = self.pool[model]
if not context:
context = {}
# records created during module install should not display the messages of OpenChatter
context = dict(context, install_mode=True)
if xml_id and ('.' in xml_id):
assert len(xml_id.split('.'))==2, _("'%s' contains too many dots. XML ids should not contain dots ! These are used to refer to other modules data, as in module.reference_id") % xml_id
module, xml_id = xml_id.split('.')
action_id = False
if xml_id:
cr.execute('''SELECT imd.id, imd.res_id, md.id, imd.model, imd.noupdate
FROM ir_model_data imd LEFT JOIN %s md ON (imd.res_id = md.id)
WHERE imd.module=%%s AND imd.name=%%s''' % model_obj._table,
(module, xml_id))
results = cr.fetchall()
for imd_id2,res_id2,real_id2,real_model,noupdate_imd in results:
# In update mode, do not update a record if it's ir.model.data is flagged as noupdate
if mode == 'update' and noupdate_imd:
return res_id2
if not real_id2:
self.clear_caches()
cr.execute('delete from ir_model_data where id=%s', (imd_id2,))
res_id = False
else:
assert model == real_model, "External ID conflict, %s already refers to a `%s` record,"\
" you can't define a `%s` record with this ID." % (xml_id, real_model, model)
res_id,action_id = res_id2,imd_id2
if action_id and res_id:
model_obj.write(cr, uid, [res_id], values, context=context)
self.write(cr, SUPERUSER_ID, [action_id], {
'date_update': time.strftime('%Y-%m-%d %H:%M:%S'),
},context=context)
elif res_id:
model_obj.write(cr, uid, [res_id], values, context=context)
if xml_id:
if model_obj._inherits:
for table in model_obj._inherits:
inherit_id = model_obj.browse(cr, uid,
res_id,context=context)[model_obj._inherits[table]]
self.create(cr, SUPERUSER_ID, {
'name': xml_id + '_' + table.replace('.', '_'),
'model': table,
'module': module,
'res_id': inherit_id.id,
'noupdate': noupdate,
},context=context)
self.create(cr, SUPERUSER_ID, {
'name': xml_id,
'model': model,
'module':module,
'res_id':res_id,
'noupdate': noupdate,
},context=context)
else:
if mode=='init' or (mode=='update' and xml_id):
inherit_xml_ids = []
if xml_id:
for table, field_name in model_obj._inherits.items():
xml_ids = self.pool['ir.model.data'].search(cr, uid, [
('module', '=', module),
('name', '=', xml_id + '_' + table.replace('.', '_')),
], context=context)
# XML ID found in the database, try to recover an existing record
if xml_ids:
found_xml_id = self.pool['ir.model.data'].browse(cr, uid, xml_ids[0], context=context)
record = self.pool[found_xml_id.model].browse(cr, uid, [found_xml_id.res_id], context=context)[0]
# The record exists, store the id and don't recreate the XML ID
if record.exists():
inherit_xml_ids.append(found_xml_id.model)
values[field_name] = found_xml_id.res_id
# Orphan XML ID, delete it
else:
found_xml_id.unlink()
res_id = model_obj.create(cr, uid, values, context=context)
if xml_id:
if model_obj._inherits:
for table in model_obj._inherits:
if table in inherit_xml_ids:
continue
inherit_id = model_obj.browse(cr, uid,
res_id,context=context)[model_obj._inherits[table]]
self.create(cr, SUPERUSER_ID, {
'name': xml_id + '_' + table.replace('.', '_'),
'model': table,
'module': module,
'res_id': inherit_id.id,
'noupdate': noupdate,
},context=context)
self.create(cr, SUPERUSER_ID, {
'name': xml_id,
'model': model,
'module': module,
'res_id': res_id,
'noupdate': noupdate
},context=context)
if xml_id and res_id:
self.loads[(module, xml_id)] = (model, res_id)
for table, inherit_field in model_obj._inherits.iteritems():
inherit_id = model_obj.read(cr, uid, [res_id],
[inherit_field])[0][inherit_field]
self.loads[(module, xml_id + '_' + table.replace('.', '_'))] = (table, inherit_id)
return res_id
def ir_set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None, xml_id=False):
ir_values_obj = openerp.registry(cr.dbname)['ir.values']
ir_values_obj.set(cr, uid, key, key2, name, models, value, replace, isobject, meta)
return True
def _module_data_uninstall(self, cr, uid, modules_to_remove, context=None):
"""Deletes all the records referenced by the ir.model.data entries
``ids`` along with their corresponding database backed (including
dropping tables, columns, FKs, etc, as long as there is no other
ir.model.data entry holding a reference to them (which indicates that
they are still owned by another module).
Attempts to perform the deletion in an appropriate order to maximize
the chance of gracefully deleting all records.
This step is performed as part of the full uninstallation of a module.
"""
ids = self.search(cr, uid, [('module', 'in', modules_to_remove)])
if uid != 1 and not self.pool['ir.model.access'].check_groups(cr, uid, "base.group_system"):
raise except_orm(_('Permission Denied'), (_('Administrator access is required to uninstall a module')))
context = dict(context or {})
context[MODULE_UNINSTALL_FLAG] = True # enable model/field deletion
ids_set = set(ids)
wkf_todo = []
to_unlink = []
ids.sort()
ids.reverse()
for data in self.browse(cr, uid, ids, context):
model = data.model
res_id = data.res_id
pair_to_unlink = (model, res_id)
if pair_to_unlink not in to_unlink:
to_unlink.append(pair_to_unlink)
if model == 'workflow.activity':
# Special treatment for workflow activities: temporarily revert their
# incoming transition and trigger an update to force all workflow items
# to move out before deleting them
cr.execute('select res_type,res_id from wkf_instance where id IN (select inst_id from wkf_workitem where act_id=%s)', (res_id,))
wkf_todo.extend(cr.fetchall())
cr.execute("update wkf_transition set condition='True', group_id=NULL, signal=NULL,act_to=act_from,act_from=%s where act_to=%s", (res_id,res_id))
self.invalidate_cache(cr, uid, context=context)
for model,res_id in wkf_todo:
try:
openerp.workflow.trg_write(uid, model, res_id, cr)
except Exception:
_logger.info('Unable to force processing of workflow for item %s@%s in order to leave activity to be deleted', res_id, model, exc_info=True)
def unlink_if_refcount(to_unlink):
for model, res_id in to_unlink:
external_ids = self.search(cr, uid, [('model', '=', model),('res_id', '=', res_id)])
if set(external_ids)-ids_set:
# if other modules have defined this record, we must not delete it
continue
if model == 'ir.model.fields':
# Don't remove the LOG_ACCESS_COLUMNS unless _log_access
# has been turned off on the model.
field = self.pool[model].browse(cr, uid, [res_id], context=context)[0]
if not field.exists():
_logger.info('Deleting orphan external_ids %s', external_ids)
self.unlink(cr, uid, external_ids)
continue
if field.name in openerp.models.LOG_ACCESS_COLUMNS and self.pool[field.model]._log_access:
continue
if field.name == 'id':
continue
_logger.info('Deleting %s@%s', res_id, model)
try:
cr.execute('SAVEPOINT record_unlink_save')
self.pool[model].unlink(cr, uid, [res_id], context=context)
except Exception:
_logger.info('Unable to delete %s@%s', res_id, model, exc_info=True)
cr.execute('ROLLBACK TO SAVEPOINT record_unlink_save')
else:
cr.execute('RELEASE SAVEPOINT record_unlink_save')
# Remove non-model records first, then model fields, and finish with models
unlink_if_refcount((model, res_id) for model, res_id in to_unlink
if model not in ('ir.model','ir.model.fields','ir.model.constraint'))
unlink_if_refcount((model, res_id) for model, res_id in to_unlink
if model == 'ir.model.constraint')
ir_module_module = self.pool['ir.module.module']
ir_model_constraint = self.pool['ir.model.constraint']
modules_to_remove_ids = ir_module_module.search(cr, uid, [('name', 'in', modules_to_remove)], context=context)
constraint_ids = ir_model_constraint.search(cr, uid, [('module', 'in', modules_to_remove_ids)], context=context)
ir_model_constraint._module_data_uninstall(cr, uid, constraint_ids, context)
unlink_if_refcount((model, res_id) for model, res_id in to_unlink
if model == 'ir.model.fields')
ir_model_relation = self.pool['ir.model.relation']
relation_ids = ir_model_relation.search(cr, uid, [('module', 'in', modules_to_remove_ids)])
ir_model_relation._module_data_uninstall(cr, uid, relation_ids, context)
unlink_if_refcount((model, res_id) for model, res_id in to_unlink
if model == 'ir.model')
cr.commit()
self.unlink(cr, uid, ids, context)
def _process_end(self, cr, uid, modules):
""" Clear records removed from updated module data.
This method is called at the end of the module loading process.
It is meant to removed records that are no longer present in the
updated data. Such records are recognised as the one with an xml id
and a module in ir_model_data and noupdate set to false, but not
present in self.loads.
"""
if not modules or config.get('import_partial'):
return True
bad_imd_ids = []
context = {MODULE_UNINSTALL_FLAG: True}
cr.execute("""SELECT id,name,model,res_id,module FROM ir_model_data
WHERE module IN %s AND res_id IS NOT NULL AND noupdate=%s ORDER BY id DESC
""", (tuple(modules), False))
for (id, name, model, res_id, module) in cr.fetchall():
if (module, name) not in self.loads:
if model in self.pool:
_logger.info('Deleting %s@%s (%s.%s)', res_id, model, module, name)
if self.pool[model].exists(cr, uid, [res_id], context=context):
self.pool[model].unlink(cr, uid, [res_id], context=context)
else:
bad_imd_ids.append(id)
if bad_imd_ids:
self.unlink(cr, uid, bad_imd_ids, context=context)
self.loads.clear()
class wizard_model_menu(osv.osv_memory):
_name = 'wizard.ir.model.menu.create'
_columns = {
'menu_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True),
'name': fields.char('Menu Name', required=True),
}
def menu_create(self, cr, uid, ids, context=None):
if not context:
context = {}
model_pool = self.pool.get('ir.model')
for menu in self.browse(cr, uid, ids, context):
model = model_pool.browse(cr, uid, context.get('model_id'), context=context)
val = {
'name': menu.name,
'res_model': model.model,
'view_type': 'form',
'view_mode': 'tree,form'
}
action_id = self.pool.get('ir.actions.act_window').create(cr, uid, val)
self.pool.get('ir.ui.menu').create(cr, uid, {
'name': menu.name,
'parent_id': menu.menu_id.id,
'action': 'ir.actions.act_window,%d' % (action_id,),
'icon': 'STOCK_INDENT'
}, context)
return {'type':'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
job/exscript
|
tests/Exscript/util/interactTest.py
|
6
|
2739
|
import sys, unittest, re, os.path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..', 'src'))
from tempfile import NamedTemporaryFile
import Exscript.util.interact
from Exscript.util.interact import InputHistory
class InputHistoryTest(unittest.TestCase):
CORRELATE = InputHistory
def setUp(self):
t = NamedTemporaryFile()
self.history = InputHistory(t.name)
def testConstructor(self):
t = NamedTemporaryFile()
h = InputHistory()
h = InputHistory(t.name)
h = InputHistory(t.name, 'foo')
h.set('aaa', 'bbb')
self.assertEqual(open(t.name).read(), '[foo]\naaa = bbb\n\n')
def testGet(self):
self.assertEqual(self.history.get('bar'), None)
self.assertEqual(self.history.get('bar', None), None)
self.assertEqual(self.history.get('bar', '...'), '...')
self.history.set('bar', 'myvalue')
self.assertEqual(self.history.get('bar'), 'myvalue')
self.assertEqual(self.history.get('bar', '...'), 'myvalue')
self.assertEqual(self.history.get('bar', None), 'myvalue')
def testSet(self):
self.testGet()
self.history.set('bar', 'myvalue2')
self.assertEqual(self.history.get('bar'), 'myvalue2')
self.assertEqual(self.history.get('bar', '...'), 'myvalue2')
self.assertEqual(self.history.get('bar', None), 'myvalue2')
self.history.set('bar', None)
self.assertEqual(self.history.get('bar'), 'myvalue2')
self.assertEqual(self.history.get('bar', '...'), 'myvalue2')
self.assertEqual(self.history.get('bar', None), 'myvalue2')
class interactTest(unittest.TestCase):
CORRELATE = Exscript.util.interact
def testPrompt(self):
from Exscript.util.interact import prompt
# Can't really be tested, as it is interactive.
def testGetFilename(self):
from Exscript.util.interact import get_filename
# Can't really be tested, as it is interactive.
def testGetUser(self):
from Exscript.util.interact import get_user
# Can't really be tested, as it is interactive.
def testGetLogin(self):
from Exscript.util.interact import get_login
# Can't really be tested, as it is interactive.
def testReadLogin(self):
from Exscript.util.interact import read_login
# Can't really be tested, as it is interactive.
def suite():
loader = unittest.TestLoader()
thesuite = unittest.TestSuite()
thesuite.addTest(loader.loadTestsFromTestCase(InputHistoryTest))
thesuite.addTest(loader.loadTestsFromTestCase(interactTest))
return thesuite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity = 2).run(suite())
|
gpl-2.0
|
ntt-sic/cinder
|
cinder/tests/backup/fake_swift_client.py
|
5
|
4331
|
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import httplib
import json
import os
import socket
import zlib
from cinder.openstack.common import log as logging
from swiftclient import client as swift
LOG = logging.getLogger(__name__)
class FakeSwiftClient(object):
"""Logs calls instead of executing."""
def __init__(self, *args, **kwargs):
pass
@classmethod
def Connection(self, *args, **kargs):
LOG.debug("fake FakeSwiftClient Connection")
return FakeSwiftConnection()
class FakeSwiftConnection(object):
"""Logging calls instead of executing"""
def __init__(self, *args, **kwargs):
pass
def head_container(self, container):
LOG.debug("fake head_container(%s)" % container)
if container == 'missing_container':
raise swift.ClientException('fake exception',
http_status=httplib.NOT_FOUND)
elif container == 'unauthorized_container':
raise swift.ClientException('fake exception',
http_status=httplib.UNAUTHORIZED)
elif container == 'socket_error_on_head':
raise socket.error(111, 'ECONNREFUSED')
pass
def put_container(self, container):
LOG.debug("fake put_container(%s)" % container)
pass
def get_container(self, container, **kwargs):
LOG.debug("fake get_container(%s)" % container)
fake_header = None
fake_body = [{'name': 'backup_001'},
{'name': 'backup_002'},
{'name': 'backup_003'}]
return fake_header, fake_body
def head_object(self, container, name):
LOG.debug("fake put_container(%s, %s)" % (container, name))
return {'etag': 'fake-md5-sum'}
def get_object(self, container, name):
LOG.debug("fake get_object(%s, %s)" % (container, name))
if container == 'socket_error_on_get':
raise socket.error(111, 'ECONNREFUSED')
if 'metadata' in name:
fake_object_header = None
metadata = {}
if container == 'unsupported_version':
metadata['version'] = '9.9.9'
else:
metadata['version'] = '1.0.0'
metadata['backup_id'] = 123
metadata['volume_id'] = 123
metadata['backup_name'] = 'fake backup'
metadata['backup_description'] = 'fake backup description'
metadata['created_at'] = '2013-02-19 11:20:54,805'
metadata['objects'] = [{
'backup_001': {'compression': 'zlib', 'length': 10},
'backup_002': {'compression': 'zlib', 'length': 10},
'backup_003': {'compression': 'zlib', 'length': 10}
}]
metadata_json = json.dumps(metadata, sort_keys=True, indent=2)
fake_object_body = metadata_json
return (fake_object_header, fake_object_body)
fake_header = None
fake_object_body = os.urandom(1024 * 1024)
return (fake_header, zlib.compress(fake_object_body))
def put_object(self, container, name, reader, content_length=None,
etag=None, chunk_size=None, content_type=None,
headers=None, query_string=None):
LOG.debug("fake put_object(%s, %s)" % (container, name))
if container == 'socket_error_on_put':
raise socket.error(111, 'ECONNREFUSED')
return 'fake-md5-sum'
def delete_object(self, container, name):
LOG.debug("fake delete_object(%s, %s)" % (container, name))
if container == 'socket_error_on_delete':
raise socket.error(111, 'ECONNREFUSED')
pass
|
apache-2.0
|
sfstpala/Victory-Chat
|
markdown/extensions/html_tidy.py
|
2
|
2089
|
#!/usr/bin/env python
"""
HTML Tidy Extension for Python-Markdown
=======================================
Runs [HTML Tidy][] on the output of Python-Markdown using the [uTidylib][]
Python wrapper. Both libtidy and uTidylib must be installed on your system.
Note than any Tidy [options][] can be passed in as extension configs. So,
for example, to output HTML rather than XHTML, set ``output_xhtml=0``. To
indent the output, set ``indent=auto`` and to have Tidy wrap the output in
``<html>`` and ``<body>`` tags, set ``show_body_only=0``.
[HTML Tidy]: http://tidy.sourceforge.net/
[uTidylib]: http://utidylib.berlios.de/
[options]: http://tidy.sourceforge.net/docs/quickref.html
Copyright (c)2008 [Waylan Limberg](http://achinghead.com)
License: [BSD](http://www.opensource.org/licenses/bsd-license.php)
Dependencies:
* [Python2.3+](http://python.org)
* [Markdown 2.0+](http://www.freewisdom.org/projects/python-markdown/)
* [HTML Tidy](http://utidylib.berlios.de/)
* [uTidylib](http://utidylib.berlios.de/)
"""
import markdown
import tidy
class TidyExtension(markdown.Extension):
def __init__(self, configs):
# Set defaults to match typical markdown behavior.
self.config = dict(output_xhtml=1,
show_body_only=1,
)
# Merge in user defined configs overriding any present if nessecary.
for c in configs:
self.config[c[0]] = c[1]
def extendMarkdown(self, md, md_globals):
# Save options to markdown instance
md.tidy_options = self.config
# Add TidyProcessor to postprocessors
md.postprocessors['tidy'] = TidyProcessor(md)
class TidyProcessor(markdown.postprocessors.Postprocessor):
def run(self, text):
# Pass text to Tidy. As Tidy does not accept unicode we need to encode
# it and decode its return value.
return str(tidy.parseString(text.encode('utf-8'),
**self.markdown.tidy_options))
def makeExtension(configs=None):
return TidyExtension(configs=configs)
|
isc
|
lmprice/ansible
|
lib/ansible/plugins/lookup/aws_service_ip_ranges.py
|
102
|
3425
|
# (c) 2016 James Turner <[email protected]>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: aws_service_ip_ranges
author:
- James Turner <[email protected]>
version_added: "2.5"
requirements:
- must have public internet connectivity
short_description: Look up the IP ranges for services provided in AWS such as EC2 and S3.
description:
- AWS publishes IP ranges used on the public internet by EC2, S3, CloudFront, CodeBuild, Route53, and Route53 Health Checking.
- This module produces a list of all the ranges (by default) or can narrow down the list to the specified region or service.
options:
service:
description: 'The service to filter ranges by. Options: EC2, S3, CLOUDFRONT, CODEbUILD, ROUTE53, ROUTE53_HEALTHCHECKS'
region:
description: 'The AWS region to narrow the ranges to. Examples: us-east-1, eu-west-2, ap-southeast-1'
"""
EXAMPLES = """
vars:
ec2_ranges: "{{ lookup('aws_service_ip_ranges', region='ap-southeast-2', service='EC2', wantlist=True) }}"
tasks:
- name: "use list return option and iterate as a loop"
debug: msg="{% for cidr in ec2_ranges %}{{ cidr }} {% endfor %}"
# "52.62.0.0/15 52.64.0.0/17 52.64.128.0/17 52.65.0.0/16 52.95.241.0/24 52.95.255.16/28 54.66.0.0/16 "
- name: "Pull S3 IP ranges, and print the default return style"
debug: msg="{{ lookup('aws_service_ip_ranges', region='us-east-1', service='S3') }}"
# "52.92.16.0/20,52.216.0.0/15,54.231.0.0/17"
"""
RETURN = """
_raw:
description: comma-separated list of CIDR ranges
"""
import json
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.module_utils.urls import open_url, ConnectionError, SSLValidationError
from ansible.module_utils._text import to_native
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
try:
resp = open_url('https://ip-ranges.amazonaws.com/ip-ranges.json')
amazon_response = json.load(resp)['prefixes']
except getattr(json.decoder, 'JSONDecodeError', ValueError) as e:
# on Python 3+, json.decoder.JSONDecodeError is raised for bad
# JSON. On 2.x it's a ValueError
raise AnsibleError("Could not decode AWS IP ranges: %s" % to_native(e))
except HTTPError as e:
raise AnsibleError("Received HTTP error while pulling IP ranges: %s" % to_native(e))
except SSLValidationError as e:
raise AnsibleError("Error validating the server's certificate for: %s" % to_native(e))
except URLError as e:
raise AnsibleError("Failed look up IP range service: %s" % to_native(e))
except ConnectionError as e:
raise AnsibleError("Error connecting to IP range service: %s" % to_native(e))
if 'region' in kwargs:
region = kwargs['region']
amazon_response = (item for item in amazon_response if item['region'] == region)
if 'service' in kwargs:
service = str.upper(kwargs['service'])
amazon_response = (item for item in amazon_response if item['service'] == service)
return [item['ip_prefix'] for item in amazon_response]
|
gpl-3.0
|
liuyxpp/blohg
|
docs/conf.py
|
2
|
7298
|
# -*- coding: utf-8 -*-
#
# blohg documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 26 23:45:47 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'blohg'
copyright = u'2010-2013, Rafael G. Martins'
cwd = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(cwd, '..', 'blohg'))
from version import version as release
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = release.split('+')[0]
# The full version, including alpha/beta/rc tags.
#release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'blohgdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'blohg.tex', u'blohg Documentation',
u'Rafael G. Martins', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'blohg', u'blohg Documentation',
[u'Rafael G. Martins'], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {}
highlight_language = 'none'
|
gpl-2.0
|
codeforamerica/skillcamp
|
ENV/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/euckrprober.py
|
2931
|
1675
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCKRDistributionAnalysis
from .mbcssm import EUCKRSMModel
class EUCKRProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(EUCKRSMModel)
self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
self.reset()
def get_charset_name(self):
return "EUC-KR"
|
mit
|
aallai/pyobfsproxy
|
obfsproxy/transports/obfs3_dh.py
|
1
|
2685
|
import binascii
import obfsproxy.common.rand as rand
def int_to_bytes(lvalue, width):
fmt = '%%.%dx' % (2*width)
return binascii.unhexlify(fmt % (lvalue & ((1L<<8*width)-1)))
class UniformDH:
"""
This is a class that implements a DH handshake that uses public
keys that are indistinguishable from 192-byte random strings.
The idea (and even the implementation) was suggested by Ian
Goldberg in:
https://lists.torproject.org/pipermail/tor-dev/2012-December/004245.html
https://lists.torproject.org/pipermail/tor-dev/2012-December/004248.html
Attributes:
mod, the modulus of our DH group.
g, the generator of our DH group.
group_len, the size of the group in bytes.
priv_str, a byte string representing our DH private key.
priv, our DH private key as an integer.
pub_str, a byte string representing our DH public key.
pub, our DH public key as an integer.
shared_secret, our DH shared secret.
"""
# 1536-bit MODP Group from RFC3526
mod = int(
"""FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1
29024E08 8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD
EF9519B3 CD3A431B 302B0A6D F25F1437 4FE1356D 6D51C245
E485B576 625E7EC6 F44C42E9 A637ED6B 0BFF5CB6 F406B7ED
EE386BFB 5A899FA5 AE9F2411 7C4B1FE6 49286651 ECE45B3D
C2007CB8 A163BF05 98DA4836 1C55D39A 69163FA8 FD24CF5F
83655D23 DCA3AD96 1C62F356 208552BB 9ED52907 7096966D
670C354E 4ABC9804 F1746C08 CA237327 FFFFFFFF FFFFFFFF""".replace(' ','').replace('\n','').replace('\t',''), 16)
g = 2
group_len = 192 # bytes (1536-bits)
def __init__(self):
# Generate private key
self.priv_str = rand.random_bytes(self.group_len)
self.priv = int(binascii.hexlify(self.priv_str), 16)
# Make the private key even
flip = self.priv % 2
self.priv -= flip
# Generate public key
self.pub = pow(self.g, self.priv, self.mod)
if flip == 1:
self.pub = self.mod - self.pub
self.pub_str = int_to_bytes(self.pub, self.group_len)
self.shared_secret = None
def get_public(self):
return self.pub_str
def get_secret(self, their_pub_str):
"""
Given the public key of the other party as a string of bytes,
calculate our shared secret.
This might raise a ValueError since 'their_pub_str' is
attacker controlled.
"""
their_pub = int(binascii.hexlify(their_pub_str), 16)
self.shared_secret = pow(their_pub, self.priv, self.mod)
return int_to_bytes(self.shared_secret, self.group_len)
|
bsd-3-clause
|
chrisfranzen/django
|
django/contrib/auth/tokens.py
|
296
|
2631
|
from datetime import date
from django.conf import settings
from django.utils.http import int_to_base36, base36_to_int
from django.utils.crypto import constant_time_compare, salted_hmac
from django.utils import six
class PasswordResetTokenGenerator(object):
"""
Strategy object used to generate and check tokens for the password
reset mechanism.
"""
def make_token(self, user):
"""
Returns a token that can be used once to do a password reset
for the given user.
"""
return self._make_token_with_timestamp(user, self._num_days(self._today()))
def check_token(self, user, token):
"""
Check that a password reset token is correct for a given user.
"""
# Parse the token
try:
ts_b36, hash = token.split("-")
except ValueError:
return False
try:
ts = base36_to_int(ts_b36)
except ValueError:
return False
# Check that the timestamp/uid has not been tampered with
if not constant_time_compare(self._make_token_with_timestamp(user, ts), token):
return False
# Check the timestamp is within limit
if (self._num_days(self._today()) - ts) > settings.PASSWORD_RESET_TIMEOUT_DAYS:
return False
return True
def _make_token_with_timestamp(self, user, timestamp):
# timestamp is number of days since 2001-1-1. Converted to
# base 36, this gives us a 3 digit string until about 2121
ts_b36 = int_to_base36(timestamp)
# By hashing on the internal state of the user and using state
# that is sure to change (the password salt will change as soon as
# the password is set, at least for current Django auth, and
# last_login will also change), we produce a hash that will be
# invalid as soon as it is used.
# We limit the hash to 20 chars to keep URL short
key_salt = "django.contrib.auth.tokens.PasswordResetTokenGenerator"
# Ensure results are consistent across DB backends
login_timestamp = user.last_login.replace(microsecond=0, tzinfo=None)
value = (six.text_type(user.pk) + user.password +
six.text_type(login_timestamp) + six.text_type(timestamp))
hash = salted_hmac(key_salt, value).hexdigest()[::2]
return "%s-%s" % (ts_b36, hash)
def _num_days(self, dt):
return (dt - date(2001, 1, 1)).days
def _today(self):
# Used for mocking in tests
return date.today()
default_token_generator = PasswordResetTokenGenerator()
|
bsd-3-clause
|
abhikumar22/MYBLOG
|
blg/Lib/site-packages/social_core/backends/base.py
|
1
|
10025
|
import time
from requests import request, ConnectionError
from ..utils import SSLHttpAdapter, module_member, parse_qs, user_agent
from ..exceptions import AuthFailed
class BaseAuth(object):
"""A authentication backend that authenticates the user based on
the provider response"""
name = '' # provider name, it's stored in database
supports_inactive_user = False # Django auth
ID_KEY = None
EXTRA_DATA = None
GET_ALL_EXTRA_DATA = False
REQUIRES_EMAIL_VALIDATION = False
SEND_USER_AGENT = False
SSL_PROTOCOL = None
def __init__(self, strategy, redirect_uri=None):
self.strategy = strategy
self.redirect_uri = redirect_uri
self.data = self.strategy.request_data()
self.redirect_uri = self.strategy.absolute_uri(
self.redirect_uri
)
def setting(self, name, default=None):
"""Return setting value from strategy"""
return self.strategy.setting(name, default=default, backend=self)
def start(self):
if self.uses_redirect():
return self.strategy.redirect(self.auth_url())
else:
return self.strategy.html(self.auth_html())
def complete(self, *args, **kwargs):
return self.auth_complete(*args, **kwargs)
def auth_url(self):
"""Must return redirect URL to auth provider"""
raise NotImplementedError('Implement in subclass')
def auth_html(self):
"""Must return login HTML content returned by provider"""
raise NotImplementedError('Implement in subclass')
def auth_complete(self, *args, **kwargs):
"""Completes loging process, must return user instance"""
raise NotImplementedError('Implement in subclass')
def process_error(self, data):
"""Process data for errors, raise exception if needed.
Call this method on any override of auth_complete."""
pass
def authenticate(self, *args, **kwargs):
"""Authenticate user using social credentials
Authentication is made if this is the correct backend, backend
verification is made by kwargs inspection for current backend
name presence.
"""
# Validate backend and arguments. Require that the Social Auth
# response be passed in as a keyword argument, to make sure we
# don't match the username/password calling conventions of
# authenticate.
if 'backend' not in kwargs or kwargs['backend'].name != self.name or \
'strategy' not in kwargs or 'response' not in kwargs:
return None
self.strategy = kwargs.get('strategy') or self.strategy
self.redirect_uri = kwargs.get('redirect_uri') or self.redirect_uri
self.data = self.strategy.request_data()
kwargs.setdefault('is_new', False)
pipeline = self.strategy.get_pipeline(self)
args, kwargs = self.strategy.clean_authenticate_args(*args, **kwargs)
return self.pipeline(pipeline, *args, **kwargs)
def pipeline(self, pipeline, pipeline_index=0, *args, **kwargs):
out = self.run_pipeline(pipeline, pipeline_index, *args, **kwargs)
if not isinstance(out, dict):
return out
user = out.get('user')
if user:
user.social_user = out.get('social')
user.is_new = out.get('is_new')
return user
def disconnect(self, *args, **kwargs):
pipeline = self.strategy.get_disconnect_pipeline(self)
kwargs['name'] = self.name
kwargs['user_storage'] = self.strategy.storage.user
return self.run_pipeline(pipeline, *args, **kwargs)
def run_pipeline(self, pipeline, pipeline_index=0, *args, **kwargs):
out = kwargs.copy()
out.setdefault('strategy', self.strategy)
out.setdefault('backend', out.pop(self.name, None) or self)
out.setdefault('request', self.strategy.request_data())
out.setdefault('details', {})
if not isinstance(pipeline_index, int) or \
pipeline_index < 0 or \
pipeline_index >= len(pipeline):
pipeline_index = 0
for idx, name in enumerate(pipeline[pipeline_index:]):
out['pipeline_index'] = pipeline_index + idx
func = module_member(name)
result = func(*args, **out) or {}
if not isinstance(result, dict):
return result
out.update(result)
return out
def extra_data(self, user, uid, response, details=None, *args, **kwargs):
"""Return default extra data to store in extra_data field"""
data = {
# store the last time authentication toke place
'auth_time': int(time.time())
}
extra_data_entries = []
if self.GET_ALL_EXTRA_DATA or self.setting('GET_ALL_EXTRA_DATA', False):
extra_data_entries = response.keys()
else:
extra_data_entries = (self.EXTRA_DATA or []) + self.setting('EXTRA_DATA', [])
for entry in extra_data_entries:
if not isinstance(entry, (list, tuple)):
entry = (entry,)
size = len(entry)
if size >= 1 and size <= 3:
if size == 3:
name, alias, discard = entry
elif size == 2:
(name, alias), discard = entry, False
elif size == 1:
name = alias = entry[0]
discard = False
value = response.get(name) or details.get(name)
if discard and not value:
continue
data[alias] = value
return data
def auth_allowed(self, response, details):
"""Return True if the user should be allowed to authenticate, by
default check if email is whitelisted (if there's a whitelist)"""
emails = self.setting('WHITELISTED_EMAILS', [])
domains = self.setting('WHITELISTED_DOMAINS', [])
email = details.get('email')
allowed = True
if email and (emails or domains):
domain = email.split('@', 1)[1]
allowed = email in emails or domain in domains
return allowed
def get_user_id(self, details, response):
"""Return a unique ID for the current user, by default from server
response."""
return response.get(self.ID_KEY)
def get_user_details(self, response):
"""Must return user details in a know internal struct:
{'username': <username if any>,
'email': <user email if any>,
'fullname': <user full name if any>,
'first_name': <user first name if any>,
'last_name': <user last name if any>}
"""
raise NotImplementedError('Implement in subclass')
def get_user_names(self, fullname='', first_name='', last_name=''):
# Avoid None values
fullname = fullname or ''
first_name = first_name or ''
last_name = last_name or ''
if fullname and not (first_name or last_name):
try:
first_name, last_name = fullname.split(' ', 1)
except ValueError:
first_name = first_name or fullname or ''
last_name = last_name or ''
fullname = fullname or ' '.join((first_name, last_name))
return fullname.strip(), first_name.strip(), last_name.strip()
def get_user(self, user_id):
"""
Return user with given ID from the User model used by this backend.
This is called by django.contrib.auth.middleware.
"""
return self.strategy.get_user(user_id)
def continue_pipeline(self, partial):
"""Continue previous halted pipeline"""
return self.strategy.authenticate(self,
pipeline_index=partial.next_step,
*partial.args,
**partial.kwargs)
def auth_extra_arguments(self):
"""Return extra arguments needed on auth process. The defaults can be
overridden by GET parameters."""
extra_arguments = self.setting('AUTH_EXTRA_ARGUMENTS', {}).copy()
extra_arguments.update((key, self.data[key]) for key in extra_arguments
if key in self.data)
return extra_arguments
def uses_redirect(self):
"""Return True if this provider uses redirect url method,
otherwise return false."""
return True
def request(self, url, method='GET', *args, **kwargs):
kwargs.setdefault('headers', {})
if self.setting('VERIFY_SSL') is not None:
kwargs.setdefault('verify', self.setting('VERIFY_SSL'))
kwargs.setdefault('timeout', self.setting('REQUESTS_TIMEOUT') or
self.setting('URLOPEN_TIMEOUT'))
if self.SEND_USER_AGENT and 'User-Agent' not in kwargs['headers']:
kwargs['headers']['User-Agent'] = self.setting('USER_AGENT') or \
user_agent()
try:
if self.SSL_PROTOCOL:
session = SSLHttpAdapter.ssl_adapter_session(self.SSL_PROTOCOL)
response = session.request(method, url, *args, **kwargs)
else:
response = request(method, url, *args, **kwargs)
except ConnectionError as err:
raise AuthFailed(self, str(err))
response.raise_for_status()
return response
def get_json(self, url, *args, **kwargs):
return self.request(url, *args, **kwargs).json()
def get_querystring(self, url, *args, **kwargs):
return parse_qs(self.request(url, *args, **kwargs).text)
def get_key_and_secret(self):
"""Return tuple with Consumer Key and Consumer Secret for current
service provider. Must return (key, secret), order *must* be respected.
"""
return self.setting('KEY'), self.setting('SECRET')
|
gpl-3.0
|
Reflexe/doc_to_pdf
|
Windows/program/python-core-3.5.0/lib/crypt.py
|
104
|
1879
|
"""Wrapper to the POSIX crypt library call and associated functionality."""
import _crypt
import string as _string
from random import SystemRandom as _SystemRandom
from collections import namedtuple as _namedtuple
_saltchars = _string.ascii_letters + _string.digits + './'
_sr = _SystemRandom()
class _Method(_namedtuple('_Method', 'name ident salt_chars total_size')):
"""Class representing a salt method per the Modular Crypt Format or the
legacy 2-character crypt method."""
def __repr__(self):
return '<crypt.METHOD_{}>'.format(self.name)
def mksalt(method=None):
"""Generate a salt for the specified method.
If not specified, the strongest available method will be used.
"""
if method is None:
method = methods[0]
s = '${}$'.format(method.ident) if method.ident else ''
s += ''.join(_sr.choice(_saltchars) for char in range(method.salt_chars))
return s
def crypt(word, salt=None):
"""Return a string representing the one-way hash of a password, with a salt
prepended.
If ``salt`` is not specified or is ``None``, the strongest
available method will be selected and a salt generated. Otherwise,
``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as
returned by ``crypt.mksalt()``.
"""
if salt is None or isinstance(salt, _Method):
salt = mksalt(salt)
return _crypt.crypt(word, salt)
# available salting/crypto methods
METHOD_CRYPT = _Method('CRYPT', None, 2, 13)
METHOD_MD5 = _Method('MD5', '1', 8, 34)
METHOD_SHA256 = _Method('SHA256', '5', 16, 63)
METHOD_SHA512 = _Method('SHA512', '6', 16, 106)
methods = []
for _method in (METHOD_SHA512, METHOD_SHA256, METHOD_MD5):
_result = crypt('', _method)
if _result and len(_result) == _method.total_size:
methods.append(_method)
methods.append(METHOD_CRYPT)
del _result, _method
|
mpl-2.0
|
brisad/grec
|
setup.py
|
1
|
2031
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import sys
from setuptools.command.test import test as TestCommand
class Tox(TestCommand):
user_options = [('tox-args=', 'a', 'Arguments to pass to tox')]
def initialize_options(self):
TestCommand.initialize_options(self)
self.tox_args = ""
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import tox
import shlex
errno = tox.cmdline(args=shlex.split(self.tox_args))
sys.exit(errno)
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
requirements = [
'termcolor'
]
test_requirements = [
'pytest', 'tox'
]
setup(
name='grec',
version='0.2.0',
description='Colorize terminal text with regular expressions.',
long_description=readme + '\n\n' + history,
author='Michael Brennan',
author_email='[email protected]',
url='https://github.com/brisad/grec',
packages=[
'grec',
],
scripts=['scripts/grec'],
package_dir={'grec':
'grec'},
include_package_data=True,
install_requires=requirements,
license="GPL",
zip_safe=False,
keywords='grec',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
test_suite='tests',
tests_require=test_requirements,
cmdclass={'test': Tox}
)
|
gpl-3.0
|
resmo/ansible
|
test/units/module_utils/common/parameters/test_list_no_log_values.py
|
22
|
1187
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils.common.parameters import list_no_log_values
@pytest.fixture
def params():
return {
'secret': 'undercookwovennativity',
'other_secret': 'cautious-slate-makeshift',
'state': 'present',
'value': 5,
}
def test_list_no_log_values(params):
argument_spec = {
'secret': {'type': 'str', 'no_log': True},
'other_secret': {'type': 'str', 'no_log': True},
'state': {'type': 'str'},
'value': {'type': 'int'},
}
result = set(('undercookwovennativity', 'cautious-slate-makeshift'))
assert result == list_no_log_values(argument_spec, params)
def test_list_no_log_values_no_secrets(params):
argument_spec = {
'other_secret': {'type': 'str', 'no_log': False},
'state': {'type': 'str'},
'value': {'type': 'int'},
}
result = set()
assert result == list_no_log_values(argument_spec, params)
|
gpl-3.0
|
Exa-Networks/exaproxy
|
lib/exaproxy/icap/parser.py
|
1
|
5875
|
#!/usr/bin/env python
# encoding: utf-8
from .request import ICAPRequestFactory
from .response import ICAPResponseFactory
from .header import ICAPResponseHeaderFactory
class ICAPParser (object):
ICAPResponseHeaderFactory = ICAPResponseHeaderFactory
ICAPRequestFactory = ICAPRequestFactory
ICAPResponseFactory = ICAPResponseFactory
VERSIONS = ('ICAP/1.0',)
METHODS = ('REQMOD', 'OPTIONS')
HEADERS = ('cache-control', 'connection', 'date', 'trailer', 'upgrade', 'via',
'authorization','allow','from','host','referer','user-agent', 'preview',
'encapsulated','proxy-authenticate','proxy-authorization', 'istag')
def __init__ (self, configuration):
self.configuration = configuration
self.header_factory = self.ICAPResponseHeaderFactory(configuration)
self.request_factory = self.ICAPRequestFactory(configuration)
self.response_factory = self.ICAPResponseFactory(configuration)
def parseRequestLine (self, request_line):
request_parts = request_line.split() if request_line else []
if len(request_parts) == 3:
method, url, version = request_parts
method = method.upper()
version = version.upper()
else:
method, url, version = None, None, None
return method, url, version
def parseResponseLine (self, response_line):
response_parts = response_line.split(' ', 2) if response_line else []
if len(response_parts) == 3:
version, code, status = response_parts
if code.isdigit():
code = int(code)
else:
version, code, status = None, None, None
else:
version, code, status = None, None, None
return version, code, status
def readHeaders (self, request_lines):
headers = {}
for line in request_lines:
if not line:
break
if ':' not in line:
headers = None
break
key, value = line.split(':', 1)
key = key.lower().strip()
value = value.strip()
if key in self.HEADERS or key.startswith('x-'):
headers[key] = value
if key == 'pragma' and ':' in value:
pkey, pvalue = value.split(':', 1)
pkey = pkey.lower().strip()
pvalue = pvalue.strip()
headers.setdefault(key, {})[pkey] = pvalue
return headers
def parseRequest (self, icap_string, http_string):
request_lines = (p for ss in icap_string.split('\r\n') for p in ss.split('\n'))
try:
request_line = request_lines.next()
except StopIteration:
request_line = None
method, url, version = self.parseRequestLine(request_line)
if method in self.METHODS and version in self.VERSIONS:
headers = self.readHeaders(request_lines)
site_name = url.rsplit(',',1)[-1] if ',' in url else 'default'
headers['x-customer-name'] = site_name
else:
headers = None
offsets = self.getOffsets(headers) if headers is not None else []
length, complete = self.getBodyLength(offsets)
if set(('res-hdr', 'res-body')).intersection(dict(offsets)):
headers = None
return self.request_factory.create(method, url, version, headers, icap_string, http_string, offsets, length, complete) if headers else None
def getOffsets (self, headers):
encapsulated_line = headers.get('encapsulated', '')
parts = (p.strip() for p in encapsulated_line.split(',') if '=' in p)
pairs = (p.split('=',1) for p in parts)
offsets = ((k,int(v)) for (k,v) in pairs if v.isdigit())
return sorted(offsets, lambda (_,a), (__,b): 1 if a >= b else -1)
def getBodyLength (self, offsets):
final, offset = offsets[-1] if offsets else ('null-body', 0)
return offset, final == 'null-body'
def splitResponseParts (self, offsets, body_string):
final, offset = offsets[-1] if offsets else (None, None)
if final != 'null-body':
offsets = offsets + [('null-body', len(body_string))]
names = [name for name,offset in offsets]
positions = [offset for name,offset in offsets]
blocks = ((positions[i], positions[i+1]) for i in xrange(len(positions)-1))
strings = (body_string[start:end] for start,end in blocks)
return dict(zip(names, strings))
def parseResponseHeader (self, header_string):
response_lines = (p for ss in header_string.split('\r\n') for p in ss.split('\n'))
try:
response_line = response_lines.next()
except StopIteration:
response_line = None
version, code, status = self.parseResponseLine(response_line)
if version in self.VERSIONS:
headers = self.readHeaders(response_lines)
headers['server'] = 'EXA Proxy 1.0'
else:
headers = {}
offsets = self.getOffsets(headers) if headers is not None else []
length, complete = self.getBodyLength(offsets)
return self.header_factory.create(version, code, status, headers, header_string, offsets, length, complete)
def continueResponse (self, response_header, body_string):
version, code, status = response_header.info
headers = response_header.headers
header_string = response_header.header_string
# split the body string into components
parts = self.splitResponseParts(response_header.offsets, body_string)
response_string = parts.get('res-hdr', '')
request_string = parts.get('req-hdr', '')
if request_string.startswith('CONNECT'):
intercept_string, new_request_string = self.splitResponse(request_string)
if headers.get('x-intercept', '') != 'active' and not new_request_string:
intercept_string = None
else:
request_string = new_request_string
else:
intercept_string = None
body_string = parts.get('res-body', None) if response_string else parts.get('req-body', None)
return self.response_factory.create(version, code, status, headers, header_string, request_string, response_string, body_string, intercept_string)
def splitResponse (self, response_string):
for delimiter in ('\n\n', '\r\n\r\n'):
if delimiter in response_string:
header_string, subheader_string = response_string.split(delimiter, 1)
break
else:
header_string, subheader_string = response_string, ''
return header_string, subheader_string
|
bsd-2-clause
|
alvaroaleman/ansible
|
lib/ansible/modules/network/nxos/nxos_ntp_auth.py
|
12
|
17744
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: nxos_ntp_auth
version_added: "2.2"
short_description: Manages NTP authentication.
description:
- Manages NTP authentication.
extends_documentation_fragment: nxos
author:
- Jason Edelman (@jedelman8)
notes:
- If C(state=absent), the module will attempt to remove the given key configuration.
If a matching key configuration isn't found on the device, the module will fail.
- If C(state=absent) and C(authentication=on), authentication will be turned off.
- If C(state=absent) and C(authentication=off), authentication will be turned on.
options:
key_id:
description:
- Authentication key identifier (numeric).
required: true
md5string:
description:
- MD5 String.
required: true
default: null
auth_type:
description:
- Whether the given md5string is in cleartext or
has been encrypted. If in cleartext, the device
will encrypt it before storing it.
required: false
default: text
choices: ['text', 'encrypt']
trusted_key:
description:
- Whether the given key is required to be supplied by a time source
for the device to synchronize to the time source.
required: false
default: false
choices: ['true', 'false']
authentication:
description:
- Turns NTP authentication on or off.
required: false
default: null
choices: ['on', 'off']
state:
description:
- Manage the state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
# Basic NTP authentication configuration
- nxos_ntp_auth:
key_id: 32
md5string: hello
auth_type: text
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"auth_type": "text", "authentication": "off",
"key_id": "32", "md5string": "helloWorld",
"trusted_key": "true"}
existing:
description:
- k/v pairs of existing ntp authentication
type: dict
sample: {"authentication": "off", "trusted_key": "false"}
end_state:
description: k/v pairs of ntp authentication after module execution
returned: always
type: dict
sample: {"authentication": "off", "key_id": "32",
"md5string": "kapqgWjwdg", "trusted_key": "true"}
state:
description: state as sent in from the playbook
returned: always
type: string
sample: "present"
updates:
description: command sent to the device
returned: always
type: list
sample: ["ntp authentication-key 32 md5 helloWorld 0", "ntp trusted-key 32"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import json
# COMMON CODE FOR MIGRATION
import re
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine
from ansible.module_utils.shell import ShellError
try:
from ansible.module_utils.nxos import get_module
except ImportError:
from ansible.module_utils.nxos import NetworkModule
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class CustomNetworkConfig(NetworkConfig):
def expand_section(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj.children:
if child in S:
continue
self.expand_section(child, S)
return S
def get_object(self, path):
for item in self.items:
if item.text == path[-1]:
parents = [p.text for p in item.parents]
if parents == path[:-1]:
return item
def to_block(self, section):
return '\n'.join([item.raw for item in section])
def get_section(self, path):
try:
section = self.get_section_objects(path)
return self.to_block(section)
except ValueError:
return list()
def get_section_objects(self, path):
if not isinstance(path, list):
path = [path]
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self.expand_section(obj)
def add(self, lines, parents=None):
"""Adds one or lines of configuration
"""
ancestors = list()
offset = 0
obj = None
## global config command
if not parents:
for line in to_list(lines):
item = ConfigLine(line)
item.raw = line
if item not in self.items:
self.items.append(item)
else:
for index, p in enumerate(parents):
try:
i = index + 1
obj = self.get_section_objects(parents[:i])[0]
ancestors.append(obj)
except ValueError:
# add parent to config
offset = index * self.indent
obj = ConfigLine(p)
obj.raw = p.rjust(len(p) + offset)
if ancestors:
obj.parents = list(ancestors)
ancestors[-1].children.append(obj)
self.items.append(obj)
ancestors.append(obj)
# add child objects
for line in to_list(lines):
# check if child already exists
for child in ancestors[-1].children:
if child.text == line:
break
else:
offset = len(parents) * self.indent
item = ConfigLine(line)
item.raw = line.rjust(len(line) + offset)
item.parents = ancestors
ancestors[-1].children.append(item)
self.items.append(item)
def get_network_module(**kwargs):
try:
return get_module(**kwargs)
except NameError:
return NetworkModule(**kwargs)
def get_config(module, include_defaults=False):
config = module.params['config']
if not config:
try:
config = module.get_config()
except AttributeError:
defaults = module.params['include_defaults']
config = module.config.get_config(include_defaults=defaults)
return CustomNetworkConfig(indent=2, contents=config)
def load_config(module, candidate):
config = get_config(module)
commands = candidate.difference(config)
commands = [str(c).strip() for c in commands]
save_config = module.params['save']
result = dict(changed=False)
if commands:
if not module.check_mode:
try:
module.configure(commands)
except AttributeError:
module.config(commands)
if save_config:
try:
module.config.save_config()
except AttributeError:
module.execute(['copy running-config startup-config'])
result['changed'] = True
result['updates'] = commands
return result
# END OF COMMON CODE
def execute_config_command(commands, module):
try:
module.configure(commands)
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
except AttributeError:
try:
commands.insert(0, 'configure')
module.cli.add_commands(commands, output='config')
module.cli.run_commands()
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
def get_cli_body_ssh(command, response, module):
"""Get response for when transport=cli. This is kind of a hack and mainly
needed because these modules were originally written for NX-API. And
not every command supports "| json" when using cli/ssh. As such, we assume
if | json returns an XML string, it is a valid command, but that the
resource doesn't exist yet. Instead, the output will be a raw string
when issuing commands containing 'show run'.
"""
if 'xml' in response[0]:
body = []
elif 'show run' in command:
body = response
else:
try:
body = [json.loads(response[0])]
except ValueError:
module.fail_json(msg='Command does not support JSON output',
command=command)
return body
def execute_show(cmds, module, command_type=None):
command_type_map = {
'cli_show': 'json',
'cli_show_ascii': 'text'
}
try:
if command_type:
response = module.execute(cmds, command_type=command_type)
else:
response = module.execute(cmds)
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
except AttributeError:
try:
if command_type:
command_type = command_type_map.get(command_type)
module.cli.add_commands(cmds, output=command_type)
response = module.cli.run_commands()
else:
module.cli.add_commands(cmds, raw=True)
response = module.cli.run_commands()
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
return response
def execute_show_command(command, module, command_type='cli_show'):
if module.params['transport'] == 'cli':
if 'show run' not in command:
command += ' | json'
cmds = [command]
response = execute_show(cmds, module)
body = get_cli_body_ssh(command, response, module)
elif module.params['transport'] == 'nxapi':
cmds = [command]
body = execute_show(cmds, module, command_type=command_type)
return body
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_ntp_auth(module):
command = 'show ntp authentication-status'
body = execute_show_command(command, module)[0]
ntp_auth_str = body['authentication']
if 'enabled' in ntp_auth_str:
ntp_auth = True
else:
ntp_auth = False
return ntp_auth
def get_ntp_trusted_key(module):
trusted_key_list = []
command = 'show run | inc ntp.trusted-key'
trusted_key_str = execute_show_command(
command, module, command_type='cli_show_ascii')[0]
if trusted_key_str:
trusted_keys = trusted_key_str.splitlines()
else:
trusted_keys = []
for line in trusted_keys:
if line:
trusted_key_list.append(str(line.split()[2]))
return trusted_key_list
def get_ntp_auth_key(key_id, module):
authentication_key = {}
command = 'show run | inc ntp.authentication-key.{0}'.format(key_id)
auth_regex = (".*ntp\sauthentication-key\s(?P<key_id>\d+)\s"
"md5\s(?P<md5string>\S+).*")
body = execute_show_command(command, module, command_type='cli_show_ascii')
try:
match_authentication = re.match(auth_regex, body[0], re.DOTALL)
group_authentication = match_authentication.groupdict()
key_id = group_authentication["key_id"]
md5string = group_authentication['md5string']
authentication_key['key_id'] = key_id
authentication_key['md5string'] = md5string
except (AttributeError, TypeError):
authentication_key = {}
return authentication_key
def get_ntp_auth_info(key_id, module):
auth_info = get_ntp_auth_key(key_id, module)
trusted_key_list = get_ntp_trusted_key(module)
auth_power = get_ntp_auth(module)
if key_id in trusted_key_list:
auth_info['trusted_key'] = 'true'
else:
auth_info['trusted_key'] = 'false'
if auth_power:
auth_info['authentication'] = 'on'
else:
auth_info['authentication'] = 'off'
return auth_info
def auth_type_to_num(auth_type):
if auth_type == 'encrypt' :
return '7'
else:
return '0'
def set_ntp_auth_key(key_id, md5string, auth_type, trusted_key, authentication):
ntp_auth_cmds = []
auth_type_num = auth_type_to_num(auth_type)
ntp_auth_cmds.append(
'ntp authentication-key {0} md5 {1} {2}'.format(
key_id, md5string, auth_type_num))
if trusted_key == 'true':
ntp_auth_cmds.append(
'ntp trusted-key {0}'.format(key_id))
elif trusted_key == 'false':
ntp_auth_cmds.append(
'no ntp trusted-key {0}'.format(key_id))
if authentication == 'on':
ntp_auth_cmds.append(
'ntp authenticate')
elif authentication == 'off':
ntp_auth_cmds.append(
'no ntp authenticate')
return ntp_auth_cmds
def remove_ntp_auth_key(key_id, md5string, auth_type, trusted_key, authentication):
auth_remove_cmds = []
auth_type_num = auth_type_to_num(auth_type)
auth_remove_cmds.append(
'no ntp authentication-key {0} md5 {1} {2}'.format(
key_id, md5string, auth_type_num))
if authentication == 'on':
auth_remove_cmds.append(
'no ntp authenticate')
elif authentication == 'off':
auth_remove_cmds.append(
'ntp authenticate')
return auth_remove_cmds
def main():
argument_spec = dict(
key_id=dict(required=True, type='str'),
md5string=dict(required=True, type='str'),
auth_type=dict(choices=['text', 'encrypt'], default='text'),
trusted_key=dict(choices=['true', 'false'], default='false'),
authentication=dict(choices=['on', 'off']),
state=dict(choices=['absent', 'present'], default='present'),
)
module = get_network_module(argument_spec=argument_spec,
supports_check_mode=True)
key_id = module.params['key_id']
md5string = module.params['md5string']
auth_type = module.params['auth_type']
trusted_key = module.params['trusted_key']
authentication = module.params['authentication']
state = module.params['state']
args = dict(key_id=key_id, md5string=md5string,
auth_type=auth_type, trusted_key=trusted_key,
authentication=authentication)
changed = False
proposed = dict((k, v) for k, v in args.items() if v is not None)
existing = get_ntp_auth_info(key_id, module)
end_state = existing
delta = dict(set(proposed.items()).difference(existing.items()))
commands = []
if state == 'present':
if delta:
command = set_ntp_auth_key(
key_id, md5string, auth_type, trusted_key, delta.get('authentication'))
if command:
commands.append(command)
elif state == 'absent':
if existing:
auth_toggle = None
if authentication == existing.get('authentication'):
auth_toggle = authentication
command = remove_ntp_auth_key(
key_id, md5string, auth_type, trusted_key, auth_toggle)
if command:
commands.append(command)
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
try:
execute_config_command(cmds, module)
except ShellError:
clie = get_exception()
module.fail_json(msg=str(clie) + ": " + cmds)
end_state = get_ntp_auth_info(key_id, module)
delta = dict(set(end_state.items()).difference(existing.items()))
if delta or (len(existing) != len(end_state)):
changed = True
if 'configure' in cmds:
cmds.pop(0)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['updates'] = cmds
results['changed'] = changed
results['end_state'] = end_state
module.exit_json(**results)
if __name__ == '__main__':
main()
|
gpl-3.0
|
hi2srihari/crab
|
scikits/crab/metrics/sampling.py
|
10
|
3357
|
"""Utilities for sampling techniques"""
# Author: Marcel Caraciolo <[email protected]>
# License: BSD Style.
import numpy as np
from ..utils import check_random_state
from math import ceil
class SplitSampling(object):
""" Random Split Sampling the dataset into two sets.
Parameters
----------
n : int
Total number of elements in the dataset.
evaluation_fraction : float (default 0.7)
Should be between 0.0 and 1.0 and represent the proportion of
the dataset to include in the training set.
indices : boolean, optional (default False)
Return split with integer indices or boolean mask.
Integer indices are useful when dealing with sparse matrices
that cannot be indexed by boolean masks.
random_state : int or RandomState
Pseudo-random number generator state used for random sampling.
"""
def __init__(self, n, evaluation_fraction=0.7, indices=False,
random_state=None):
self.n = n
self.evaluation_fraction = evaluation_fraction
self.random_state = random_state
self.indices = indices
def split(self, evaluation_fraction=None, indices=False,
random_state=None, permutation=True):
"""
Random Split Sampling the dataset into two sets.
Parameters
----------
evaluation_fraction : float (default None)
Should be between 0.0 and 1.0 and represent the proportion of
the dataset to include in the training set. If evaluation_fraction
is None, it will be used the one passed in the constructor.
indices : boolean, optional (default False)
Return split with integer indices or boolean mask.
Integer indices are useful when dealing with sparse matrices
that cannot be indexed by boolean masks.
random_state : int or RandomState
Pseudo-random number generator state used for random sampling.
permutation: boolean, optional (default True)
For testing purposes, to deactivate the permutation.
"""
if evaluation_fraction is not None:
self.evaluation_fraction = evaluation_fraction
if random_state is not None:
self.random_state = random_state
self.indices = indices
rng = self.random_state = check_random_state(self.random_state)
n_train = ceil(self.evaluation_fraction * self.n)
#random partition
permutation = rng.permutation(self.n) if permutation \
else np.arange(self.n)
ind_train = permutation[-n_train:]
ind_ignore = permutation[:-n_train]
if self.indices:
return ind_train, ind_ignore
else:
train_mask = np.zeros(self.n, dtype=np.bool)
train_mask[ind_train] = True
test_mask = np.zeros(self.n, dtype=np.bool)
test_mask[ind_ignore] = True
return train_mask, test_mask
def __repr__(self):
return ('%s(%d, evaluation_fraction=%s, indices=%s, '
'random_state=%d)' % (
self.__class__.__name__,
self.n,
str(self.evaluation_fraction),
self.indices,
self.random_state,
))
|
bsd-3-clause
|
valmynd/MediaFetcher
|
src/plugins/youtube_dl/youtube_dl/extractor/twitter.py
|
1
|
18792
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
determine_ext,
dict_get,
ExtractorError,
float_or_none,
int_or_none,
remove_end,
try_get,
xpath_text,
)
from .periscope import PeriscopeIE
class TwitterBaseIE(InfoExtractor):
def _extract_formats_from_vmap_url(self, vmap_url, video_id):
vmap_data = self._download_xml(vmap_url, video_id)
video_url = xpath_text(vmap_data, './/MediaFile').strip()
if determine_ext(video_url) == 'm3u8':
return self._extract_m3u8_formats(
video_url, video_id, ext='mp4', m3u8_id='hls',
entry_protocol='m3u8_native')
return [{
'url': video_url,
}]
@staticmethod
def _search_dimensions_in_video_url(a_format, video_url):
m = re.search(r'/(?P<width>\d+)x(?P<height>\d+)/', video_url)
if m:
a_format.update({
'width': int(m.group('width')),
'height': int(m.group('height')),
})
class TwitterCardIE(TwitterBaseIE):
IE_NAME = 'twitter:card'
_VALID_URL = r'https?://(?:www\.)?twitter\.com/i/(?P<path>cards/tfw/v1|videos(?:/tweet)?)/(?P<id>\d+)'
_TESTS = [
{
'url': 'https://twitter.com/i/cards/tfw/v1/560070183650213889',
# MD5 checksums are different in different places
'info_dict': {
'id': '560070183650213889',
'ext': 'mp4',
'title': 'Twitter web player',
'thumbnail': r're:^https?://.*\.jpg$',
'duration': 30.033,
},
},
{
'url': 'https://twitter.com/i/cards/tfw/v1/623160978427936768',
'md5': '7ee2a553b63d1bccba97fbed97d9e1c8',
'info_dict': {
'id': '623160978427936768',
'ext': 'mp4',
'title': 'Twitter web player',
'thumbnail': r're:^https?://.*$',
},
},
{
'url': 'https://twitter.com/i/cards/tfw/v1/654001591733886977',
'md5': 'b6d9683dd3f48e340ded81c0e917ad46',
'info_dict': {
'id': 'dq4Oj5quskI',
'ext': 'mp4',
'title': 'Ubuntu 11.10 Overview',
'description': 'md5:a831e97fa384863d6e26ce48d1c43376',
'upload_date': '20111013',
'uploader': 'OMG! Ubuntu!',
'uploader_id': 'omgubuntu',
},
'add_ie': ['Youtube'],
},
{
'url': 'https://twitter.com/i/cards/tfw/v1/665289828897005568',
'md5': '6dabeaca9e68cbb71c99c322a4b42a11',
'info_dict': {
'id': 'iBb2x00UVlv',
'ext': 'mp4',
'upload_date': '20151113',
'uploader_id': '1189339351084113920',
'uploader': 'ArsenalTerje',
'title': 'Vine by ArsenalTerje',
'timestamp': 1447451307,
},
'add_ie': ['Vine'],
}, {
'url': 'https://twitter.com/i/videos/tweet/705235433198714880',
'md5': '884812a2adc8aaf6fe52b15ccbfa3b88',
'info_dict': {
'id': '705235433198714880',
'ext': 'mp4',
'title': 'Twitter web player',
'thumbnail': r're:^https?://.*',
},
}, {
'url': 'https://twitter.com/i/videos/752274308186120192',
'only_matching': True,
},
]
_API_BASE = 'https://api.twitter.com/1.1'
def _parse_media_info(self, media_info, video_id):
formats = []
for media_variant in media_info.get('variants', []):
media_url = media_variant['url']
if media_url.endswith('.m3u8'):
formats.extend(self._extract_m3u8_formats(media_url, video_id, ext='mp4', m3u8_id='hls'))
elif media_url.endswith('.mpd'):
formats.extend(self._extract_mpd_formats(media_url, video_id, mpd_id='dash'))
else:
tbr = int_or_none(dict_get(media_variant, ('bitRate', 'bitrate')), scale=1000)
a_format = {
'url': media_url,
'format_id': 'http-%d' % tbr if tbr else 'http',
'tbr': tbr,
}
# Reported bitRate may be zero
if not a_format['tbr']:
del a_format['tbr']
self._search_dimensions_in_video_url(a_format, media_url)
formats.append(a_format)
return formats
def _extract_mobile_formats(self, username, video_id):
webpage = self._download_webpage(
'https://mobile.twitter.com/%s/status/%s' % (username, video_id),
video_id, 'Downloading mobile webpage',
headers={
# A recent mobile UA is necessary for `gt` cookie
'User-Agent': 'Mozilla/5.0 (Android 6.0.1; Mobile; rv:54.0) Gecko/54.0 Firefox/54.0',
})
main_script_url = self._html_search_regex(
r'<script[^>]+src="([^"]+main\.[^"]+)"', webpage, 'main script URL')
main_script = self._download_webpage(
main_script_url, video_id, 'Downloading main script')
bearer_token = self._search_regex(
r'BEARER_TOKEN\s*:\s*"([^"]+)"',
main_script, 'bearer token')
# https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/get-statuses-show-id
api_data = self._download_json(
'%s/statuses/show/%s.json' % (self._API_BASE, video_id),
video_id, 'Downloading API data',
headers={
'Authorization': 'Bearer ' + bearer_token,
})
media_info = try_get(api_data, lambda o: o['extended_entities']['media'][0]['video_info']) or {}
return self._parse_media_info(media_info, video_id)
def _real_extract(self, url):
path, video_id = re.search(self._VALID_URL, url).groups()
config = None
formats = []
duration = None
urls = [url]
if path.startswith('cards/'):
urls.append('https://twitter.com/i/videos/' + video_id)
for u in urls:
webpage = self._download_webpage(
u, video_id, headers={'Referer': 'https://twitter.com/'})
iframe_url = self._html_search_regex(
r'<iframe[^>]+src="((?:https?:)?//(?:www\.youtube\.com/embed/[^"]+|(?:www\.)?vine\.co/v/\w+/card))"',
webpage, 'video iframe', default=None)
if iframe_url:
return self.url_result(iframe_url)
config = self._parse_json(self._html_search_regex(
r'data-(?:player-)?config="([^"]+)"', webpage,
'data player config', default='{}'),
video_id)
if config.get('source_type') == 'vine':
return self.url_result(config['player_url'], 'Vine')
periscope_url = PeriscopeIE._extract_url(webpage)
if periscope_url:
return self.url_result(periscope_url, PeriscopeIE.ie_key())
video_url = config.get('video_url') or config.get('playlist', [{}])[0].get('source')
if video_url:
if determine_ext(video_url) == 'm3u8':
formats.extend(self._extract_m3u8_formats(video_url, video_id, ext='mp4', m3u8_id='hls'))
else:
f = {
'url': video_url,
}
self._search_dimensions_in_video_url(f, video_url)
formats.append(f)
vmap_url = config.get('vmapUrl') or config.get('vmap_url')
if vmap_url:
formats.extend(
self._extract_formats_from_vmap_url(vmap_url, video_id))
media_info = None
for entity in config.get('status', {}).get('entities', []):
if 'mediaInfo' in entity:
media_info = entity['mediaInfo']
if media_info:
formats.extend(self._parse_media_info(media_info, video_id))
duration = float_or_none(media_info.get('duration', {}).get('nanos'), scale=1e9)
username = config.get('user', {}).get('screen_name')
if username:
formats.extend(self._extract_mobile_formats(username, video_id))
if formats:
title = self._search_regex(r'<title>([^<]+)</title>', webpage, 'title')
thumbnail = config.get('posterImageUrl') or config.get('image_src')
duration = float_or_none(config.get('duration'), scale=1000) or duration
break
if not formats:
headers = {
'Authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAAPYXBAAAAAAACLXUNDekMxqa8h%2F40K4moUkGsoc%3DTYfbDKbT3jJPCEVnMYqilB28NHfOPqkca3qaAxGfsyKCs0wRbw',
'Referer': url,
}
ct0 = self._get_cookies(url).get('ct0')
if ct0:
headers['csrf_token'] = ct0.value
guest_token = self._download_json(
'%s/guest/activate.json' % self._API_BASE, video_id,
'Downloading guest token', data=b'',
headers=headers)['guest_token']
headers['x-guest-token'] = guest_token
self._set_cookie('api.twitter.com', 'gt', guest_token)
config = self._download_json(
'%s/videos/tweet/config/%s.json' % (self._API_BASE, video_id),
video_id, headers=headers)
track = config['track']
vmap_url = track.get('vmapUrl')
if vmap_url:
formats = self._extract_formats_from_vmap_url(vmap_url, video_id)
else:
playback_url = track['playbackUrl']
if determine_ext(playback_url) == 'm3u8':
formats = self._extract_m3u8_formats(
playback_url, video_id, 'mp4',
entry_protocol='m3u8_native', m3u8_id='hls')
else:
formats = [{
'url': playback_url,
}]
title = 'Twitter web player'
thumbnail = config.get('posterImage')
duration = float_or_none(track.get('durationMs'), scale=1000)
self._remove_duplicate_formats(formats)
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'duration': duration,
'formats': formats,
}
class TwitterIE(InfoExtractor):
IE_NAME = 'twitter'
_VALID_URL = r'https?://(?:www\.|m\.|mobile\.)?twitter\.com/(?:i/web|(?P<user_id>[^/]+))/status/(?P<id>\d+)'
_TEMPLATE_URL = 'https://twitter.com/%s/status/%s'
_TEMPLATE_STATUSES_URL = 'https://twitter.com/statuses/%s'
_TESTS = [{
'url': 'https://twitter.com/freethenipple/status/643211948184596480',
'info_dict': {
'id': '643211948184596480',
'ext': 'mp4',
'title': 'FREE THE NIPPLE - FTN supporters on Hollywood Blvd today!',
'thumbnail': r're:^https?://.*\.jpg',
'description': 'FREE THE NIPPLE on Twitter: "FTN supporters on Hollywood Blvd today! http://t.co/c7jHH749xJ"',
'uploader': 'FREE THE NIPPLE',
'uploader_id': 'freethenipple',
'duration': 12.922,
},
}, {
'url': 'https://twitter.com/giphz/status/657991469417025536/photo/1',
'md5': 'f36dcd5fb92bf7057f155e7d927eeb42',
'info_dict': {
'id': '657991469417025536',
'ext': 'mp4',
'title': 'Gifs - tu vai cai tu vai cai tu nao eh capaz disso tu vai cai',
'description': 'Gifs on Twitter: "tu vai cai tu vai cai tu nao eh capaz disso tu vai cai https://t.co/tM46VHFlO5"',
'thumbnail': r're:^https?://.*\.png',
'uploader': 'Gifs',
'uploader_id': 'giphz',
},
'expected_warnings': ['height', 'width'],
'skip': 'Account suspended',
}, {
'url': 'https://twitter.com/starwars/status/665052190608723968',
'info_dict': {
'id': '665052190608723968',
'ext': 'mp4',
'title': 'Star Wars - A new beginning is coming December 18. Watch the official 60 second #TV spot for #StarWars: #TheForceAwakens.',
'description': 'Star Wars on Twitter: "A new beginning is coming December 18. Watch the official 60 second #TV spot for #StarWars: #TheForceAwakens."',
'uploader_id': 'starwars',
'uploader': 'Star Wars',
},
}, {
'url': 'https://twitter.com/BTNBrentYarina/status/705235433198714880',
'info_dict': {
'id': '705235433198714880',
'ext': 'mp4',
'title': 'Brent Yarina - Khalil Iverson\'s missed highlight dunk. And made highlight dunk. In one highlight.',
'description': 'Brent Yarina on Twitter: "Khalil Iverson\'s missed highlight dunk. And made highlight dunk. In one highlight."',
'uploader_id': 'BTNBrentYarina',
'uploader': 'Brent Yarina',
},
'params': {
# The same video as https://twitter.com/i/videos/tweet/705235433198714880
# Test case of TwitterCardIE
'skip_download': True,
},
}, {
'url': 'https://twitter.com/jaydingeer/status/700207533655363584',
'info_dict': {
'id': '700207533655363584',
'ext': 'mp4',
'title': 'JG - BEAT PROD: @suhmeduh #Damndaniel',
'description': 'JG on Twitter: "BEAT PROD: @suhmeduh https://t.co/HBrQ4AfpvZ #Damndaniel https://t.co/byBooq2ejZ"',
'thumbnail': r're:^https?://.*\.jpg',
'uploader': 'JG',
'uploader_id': 'jaydingeer',
'duration': 30.0,
},
}, {
'url': 'https://twitter.com/Filmdrunk/status/713801302971588609',
'md5': '89a15ed345d13b86e9a5a5e051fa308a',
'info_dict': {
'id': 'MIOxnrUteUd',
'ext': 'mp4',
'title': 'Vince Mancini - Vine of the day',
'description': 'Vince Mancini on Twitter: "Vine of the day https://t.co/xmTvRdqxWf"',
'uploader': 'Vince Mancini',
'uploader_id': 'Filmdrunk',
'timestamp': 1402826626,
'upload_date': '20140615',
},
'add_ie': ['Vine'],
}, {
'url': 'https://twitter.com/captainamerica/status/719944021058060289',
'info_dict': {
'id': '719944021058060289',
'ext': 'mp4',
'title': 'Captain America - @King0fNerd Are you sure you made the right choice? Find out in theaters.',
'description': 'Captain America on Twitter: "@King0fNerd Are you sure you made the right choice? Find out in theaters. https://t.co/GpgYi9xMJI"',
'uploader_id': 'captainamerica',
'uploader': 'Captain America',
'duration': 3.17,
},
}, {
'url': 'https://twitter.com/OPP_HSD/status/779210622571536384',
'info_dict': {
'id': '1zqKVVlkqLaKB',
'ext': 'mp4',
'title': 'Sgt Kerry Schmidt - LIVE on #Periscope: Road rage, mischief, assault, rollover and fire in one occurrence',
'description': 'Sgt Kerry Schmidt on Twitter: "LIVE on #Periscope: Road rage, mischief, assault, rollover and fire in one occurrence https://t.co/EKrVgIXF3s"',
'upload_date': '20160923',
'uploader_id': 'OPP_HSD',
'uploader': 'Sgt Kerry Schmidt',
'timestamp': 1474613214,
},
'add_ie': ['Periscope'],
}, {
# has mp4 formats via mobile API
'url': 'https://twitter.com/news_al3alm/status/852138619213144067',
'info_dict': {
'id': '852138619213144067',
'ext': 'mp4',
'title': 'عالم الأخبار - كلمة تاريخية بجلسة الجناسي التاريخية.. النائب خالد مؤنس العتيبي للمعارضين : اتقوا الله .. الظلم ظلمات يوم القيامة',
'description': 'عالم الأخبار on Twitter: "كلمة تاريخية بجلسة الجناسي التاريخية.. النائب خالد مؤنس العتيبي للمعارضين : اتقوا الله .. الظلم ظلمات يوم القيامة https://t.co/xg6OhpyKfN"',
'uploader': 'عالم الأخبار',
'uploader_id': 'news_al3alm',
'duration': 277.4,
},
}, {
'url': 'https://twitter.com/i/web/status/910031516746514432',
'info_dict': {
'id': '910031516746514432',
'ext': 'mp4',
'title': 'Préfet de Guadeloupe - [Direct] #Maria Le centre se trouve actuellement au sud de Basse-Terre. Restez confinés. Réfugiez-vous dans la pièce la + sûre.',
'thumbnail': r're:^https?://.*\.jpg',
'description': 'Préfet de Guadeloupe on Twitter: "[Direct] #Maria Le centre se trouve actuellement au sud de Basse-Terre. Restez confinés. Réfugiez-vous dans la pièce la + sûre. https://t.co/mwx01Rs4lo"',
'uploader': 'Préfet de Guadeloupe',
'uploader_id': 'Prefet971',
'duration': 47.48,
},
'params': {
'skip_download': True, # requires ffmpeg
},
}, {
# card via api.twitter.com/1.1/videos/tweet/config
'url': 'https://twitter.com/LisPower1/status/1001551623938805763',
'info_dict': {
'id': '1001551623938805763',
'ext': 'mp4',
'title': 're:.*?Shep is on a roll today.*?',
'thumbnail': r're:^https?://.*\.jpg',
'description': 'md5:63b036c228772523ae1924d5f8e5ed6b',
'uploader': 'Lis Power',
'uploader_id': 'LisPower1',
'duration': 111.278,
},
'params': {
'skip_download': True, # requires ffmpeg
},
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
user_id = mobj.group('user_id')
twid = mobj.group('id')
webpage, urlh = self._download_webpage_handle(
self._TEMPLATE_STATUSES_URL % twid, twid)
if 'twitter.com/account/suspended' in urlh.geturl():
raise ExtractorError('Account suspended by Twitter.', expected=True)
if user_id is None:
mobj = re.match(self._VALID_URL, urlh.geturl())
user_id = mobj.group('user_id')
username = remove_end(self._og_search_title(webpage), ' on Twitter')
title = description = self._og_search_description(webpage).strip('').replace('\n', ' ').strip('“”')
# strip 'https -_t.co_BJYgOjSeGA' junk from filenames
title = re.sub(r'\s+(https?://[^ ]+)', '', title)
info = {
'uploader_id': user_id,
'uploader': username,
'webpage_url': url,
'description': '%s on Twitter: "%s"' % (username, description),
'title': username + ' - ' + title,
}
mobj = re.search(r'''(?x)
<video[^>]+class="animated-gif"(?P<more_info>[^>]+)>\s*
<source[^>]+video-src="(?P<url>[^"]+)"
''', webpage)
if mobj:
more_info = mobj.group('more_info')
height = int_or_none(self._search_regex(
r'data-height="(\d+)"', more_info, 'height', fatal=False))
width = int_or_none(self._search_regex(
r'data-width="(\d+)"', more_info, 'width', fatal=False))
thumbnail = self._search_regex(
r'poster="([^"]+)"', more_info, 'poster', fatal=False)
info.update({
'id': twid,
'url': mobj.group('url'),
'height': height,
'width': width,
'thumbnail': thumbnail,
})
return info
twitter_card_url = None
if 'class="PlayableMedia' in webpage:
twitter_card_url = '%s//twitter.com/i/videos/tweet/%s' % (self.http_scheme(), twid)
else:
twitter_card_iframe_url = self._search_regex(
r'data-full-card-iframe-url=([\'"])(?P<url>(?:(?!\1).)+)\1',
webpage, 'Twitter card iframe URL', default=None, group='url')
if twitter_card_iframe_url:
twitter_card_url = compat_urlparse.urljoin(url, twitter_card_iframe_url)
if twitter_card_url:
info.update({
'_type': 'url_transparent',
'ie_key': 'TwitterCard',
'url': twitter_card_url,
})
return info
raise ExtractorError('There\'s no video in this tweet.')
class TwitterAmplifyIE(TwitterBaseIE):
IE_NAME = 'twitter:amplify'
_VALID_URL = r'https?://amp\.twimg\.com/v/(?P<id>[0-9a-f\-]{36})'
_TEST = {
'url': 'https://amp.twimg.com/v/0ba0c3c7-0af3-4c0a-bed5-7efd1ffa2951',
'md5': '7df102d0b9fd7066b86f3159f8e81bf6',
'info_dict': {
'id': '0ba0c3c7-0af3-4c0a-bed5-7efd1ffa2951',
'ext': 'mp4',
'title': 'Twitter Video',
'thumbnail': 're:^https?://.*',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
vmap_url = self._html_search_meta(
'twitter:amplify:vmap', webpage, 'vmap url')
formats = self._extract_formats_from_vmap_url(vmap_url, video_id)
thumbnails = []
thumbnail = self._html_search_meta(
'twitter:image:src', webpage, 'thumbnail', fatal=False)
def _find_dimension(target):
w = int_or_none(self._html_search_meta(
'twitter:%s:width' % target, webpage, fatal=False))
h = int_or_none(self._html_search_meta(
'twitter:%s:height' % target, webpage, fatal=False))
return w, h
if thumbnail:
thumbnail_w, thumbnail_h = _find_dimension('image')
thumbnails.append({
'url': thumbnail,
'width': thumbnail_w,
'height': thumbnail_h,
})
video_w, video_h = _find_dimension('player')
formats[0].update({
'width': video_w,
'height': video_h,
})
return {
'id': video_id,
'title': 'Twitter Video',
'formats': formats,
'thumbnails': thumbnails,
}
|
gpl-3.0
|
Lujeni/ansible
|
lib/ansible/modules/cloud/cloudstack/cs_network_acl_rule.py
|
13
|
14343
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, René Moser <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_network_acl_rule
short_description: Manages network access control list (ACL) rules on Apache CloudStack based clouds.
description:
- Add, update and remove network ACL rules.
version_added: '2.4'
author: René Moser (@resmo)
options:
network_acl:
description:
- Name of the network ACL.
type: str
required: true
aliases: [ acl ]
cidrs:
description:
- CIDRs of the rule.
type: list
default: [ 0.0.0.0/0 ]
aliases: [ cidr ]
rule_position:
description:
- The position of the network ACL rule.
type: int
required: true
aliases: [ number ]
protocol:
description:
- Protocol of the rule
choices: [ tcp, udp, icmp, all, by_number ]
type: str
default: tcp
protocol_number:
description:
- Protocol number from 1 to 256 required if I(protocol=by_number).
type: int
start_port:
description:
- Start port for this rule.
- Considered if I(protocol=tcp) or I(protocol=udp).
type: int
aliases: [ port ]
end_port:
description:
- End port for this rule.
- Considered if I(protocol=tcp) or I(protocol=udp).
- If not specified, equal I(start_port).
type: int
icmp_type:
description:
- Type of the icmp message being sent.
- Considered if I(protocol=icmp).
type: int
icmp_code:
description:
- Error code for this icmp message.
- Considered if I(protocol=icmp).
type: int
vpc:
description:
- VPC the network ACL is related to.
type: str
required: true
traffic_type:
description:
- Traffic type of the rule.
type: str
choices: [ ingress, egress ]
default: ingress
aliases: [ type ]
action_policy:
description:
- Action policy of the rule.
type: str
choices: [ allow, deny ]
default: allow
aliases: [ action ]
tags:
description:
- List of tags. Tags are a list of dictionaries having keys I(key) and I(value).
- "If you want to delete all tags, set a empty list e.g. I(tags: [])."
type: list
aliases: [ tag ]
domain:
description:
- Domain the VPC is related to.
type: str
account:
description:
- Account the VPC is related to.
type: str
project:
description:
- Name of the project the VPC is related to.
type: str
zone:
description:
- Name of the zone the VPC related to.
- If not set, default zone is used.
type: str
state:
description:
- State of the network ACL rule.
type: str
default: present
choices: [ present, absent ]
poll_async:
description:
- Poll async jobs until job has finished.
type: bool
default: yes
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: create a network ACL rule, allow port 80 ingress
cs_network_acl_rule:
network_acl: web
rule_position: 1
vpc: my vpc
traffic_type: ingress
action_policy: allow
port: 80
cidr: 0.0.0.0/0
delegate_to: localhost
- name: create a network ACL rule, deny port range 8000-9000 ingress for 10.20.0.0/16 and 10.22.0.0/16
cs_network_acl_rule:
network_acl: web
rule_position: 1
vpc: my vpc
traffic_type: ingress
action_policy: deny
start_port: 8000
end_port: 9000
cidrs:
- 10.20.0.0/16
- 10.22.0.0/16
delegate_to: localhost
- name: remove a network ACL rule
cs_network_acl_rule:
network_acl: web
rule_position: 1
vpc: my vpc
state: absent
delegate_to: localhost
'''
RETURN = '''
---
network_acl:
description: Name of the network ACL.
returned: success
type: str
sample: customer acl
cidr:
description: CIDR of the network ACL rule.
returned: success
type: str
sample: 0.0.0.0/0
cidrs:
description: CIDRs of the network ACL rule.
returned: success
type: list
sample: [ 0.0.0.0/0 ]
version_added: '2.9'
rule_position:
description: Position of the network ACL rule.
returned: success
type: int
sample: 1
action_policy:
description: Action policy of the network ACL rule.
returned: success
type: str
sample: deny
traffic_type:
description: Traffic type of the network ACL rule.
returned: success
type: str
sample: ingress
protocol:
description: Protocol of the network ACL rule.
returned: success
type: str
sample: tcp
protocol_number:
description: Protocol number in case protocol is by number.
returned: success
type: int
sample: 8
start_port:
description: Start port of the network ACL rule.
returned: success
type: int
sample: 80
end_port:
description: End port of the network ACL rule.
returned: success
type: int
sample: 80
icmp_code:
description: ICMP code of the network ACL rule.
returned: success
type: int
sample: 8
icmp_type:
description: ICMP type of the network ACL rule.
returned: success
type: int
sample: 0
state:
description: State of the network ACL rule.
returned: success
type: str
sample: Active
vpc:
description: VPC of the network ACL.
returned: success
type: str
sample: customer vpc
tags:
description: List of resource tags associated with the network ACL rule.
returned: success
type: list
sample: '[ { "key": "foo", "value": "bar" } ]'
domain:
description: Domain the network ACL rule is related to.
returned: success
type: str
sample: example domain
account:
description: Account the network ACL rule is related to.
returned: success
type: str
sample: example account
project:
description: Name of project the network ACL rule is related to.
returned: success
type: str
sample: Production
zone:
description: Zone the VPC is related to.
returned: success
type: str
sample: ch-gva-2
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together
)
class AnsibleCloudStackNetworkAclRule(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackNetworkAclRule, self).__init__(module)
self.returns = {
'cidrlist': 'cidr',
'action': 'action_policy',
'protocol': 'protocol',
'icmpcode': 'icmp_code',
'icmptype': 'icmp_type',
'number': 'rule_position',
'traffictype': 'traffic_type',
}
# these values will be casted to int
self.returns_to_int = {
'startport': 'start_port',
'endport': 'end_port',
}
def get_network_acl_rule(self):
args = {
'aclid': self.get_network_acl(key='id'),
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
}
network_acl_rules = self.query_api('listNetworkACLs', **args)
for acl_rule in network_acl_rules.get('networkacl', []):
if acl_rule['number'] == self.module.params.get('rule_position'):
return acl_rule
return None
def present_network_acl_rule(self):
network_acl_rule = self.get_network_acl_rule()
protocol = self.module.params.get('protocol')
start_port = self.module.params.get('start_port')
end_port = self.get_or_fallback('end_port', 'start_port')
icmp_type = self.module.params.get('icmp_type')
icmp_code = self.module.params.get('icmp_code')
if protocol in ['tcp', 'udp'] and (start_port is None or end_port is None):
self.module.fail_json(msg="protocol is %s but the following are missing: start_port, end_port" % protocol)
elif protocol == 'icmp' and (icmp_type is None or icmp_code is None):
self.module.fail_json(msg="protocol is icmp but the following are missing: icmp_type, icmp_code")
elif protocol == 'by_number' and self.module.params.get('protocol_number') is None:
self.module.fail_json(msg="protocol is by_number but the following are missing: protocol_number")
if not network_acl_rule:
network_acl_rule = self._create_network_acl_rule(network_acl_rule)
else:
network_acl_rule = self._update_network_acl_rule(network_acl_rule)
if network_acl_rule:
network_acl_rule = self.ensure_tags(resource=network_acl_rule, resource_type='NetworkACL')
return network_acl_rule
def absent_network_acl_rule(self):
network_acl_rule = self.get_network_acl_rule()
if network_acl_rule:
self.result['changed'] = True
args = {
'id': network_acl_rule['id'],
}
if not self.module.check_mode:
res = self.query_api('deleteNetworkACL', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.poll_job(res, 'networkacl')
return network_acl_rule
def _create_network_acl_rule(self, network_acl_rule):
self.result['changed'] = True
protocol = self.module.params.get('protocol')
args = {
'aclid': self.get_network_acl(key='id'),
'action': self.module.params.get('action_policy'),
'protocol': protocol if protocol != 'by_number' else self.module.params.get('protocol_number'),
'startport': self.module.params.get('start_port'),
'endport': self.get_or_fallback('end_port', 'start_port'),
'number': self.module.params.get('rule_position'),
'icmpcode': self.module.params.get('icmp_code'),
'icmptype': self.module.params.get('icmp_type'),
'traffictype': self.module.params.get('traffic_type'),
'cidrlist': self.module.params.get('cidrs'),
}
if not self.module.check_mode:
res = self.query_api('createNetworkACL', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
network_acl_rule = self.poll_job(res, 'networkacl')
return network_acl_rule
def _update_network_acl_rule(self, network_acl_rule):
protocol = self.module.params.get('protocol')
args = {
'id': network_acl_rule['id'],
'action': self.module.params.get('action_policy'),
'protocol': protocol if protocol != 'by_number' else str(self.module.params.get('protocol_number')),
'startport': self.module.params.get('start_port'),
'endport': self.get_or_fallback('end_port', 'start_port'),
'icmpcode': self.module.params.get('icmp_code'),
'icmptype': self.module.params.get('icmp_type'),
'traffictype': self.module.params.get('traffic_type'),
'cidrlist': ",".join(self.module.params.get('cidrs')),
}
if self.has_changed(args, network_acl_rule):
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('updateNetworkACLItem', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
network_acl_rule = self.poll_job(res, 'networkacl')
return network_acl_rule
def get_result(self, network_acl_rule):
super(AnsibleCloudStackNetworkAclRule, self).get_result(network_acl_rule)
if network_acl_rule:
if 'cidrlist' in network_acl_rule:
self.result['cidrs'] = network_acl_rule['cidrlist'].split(',') or [network_acl_rule['cidrlist']]
if network_acl_rule['protocol'] not in ['tcp', 'udp', 'icmp', 'all']:
self.result['protocol_number'] = int(network_acl_rule['protocol'])
self.result['protocol'] = 'by_number'
self.result['action_policy'] = self.result['action_policy'].lower()
self.result['traffic_type'] = self.result['traffic_type'].lower()
return self.result
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
network_acl=dict(required=True, aliases=['acl']),
rule_position=dict(required=True, type='int', aliases=['number']),
vpc=dict(required=True),
cidrs=dict(type='list', default=['0.0.0.0/0'], aliases=['cidr']),
protocol=dict(choices=['tcp', 'udp', 'icmp', 'all', 'by_number'], default='tcp'),
protocol_number=dict(type='int'),
traffic_type=dict(choices=['ingress', 'egress'], aliases=['type'], default='ingress'),
action_policy=dict(choices=['allow', 'deny'], aliases=['action'], default='allow'),
icmp_type=dict(type='int'),
icmp_code=dict(type='int'),
start_port=dict(type='int', aliases=['port']),
end_port=dict(type='int'),
state=dict(choices=['present', 'absent'], default='present'),
zone=dict(),
domain=dict(),
account=dict(),
project=dict(),
tags=dict(type='list', aliases=['tag']),
poll_async=dict(type='bool', default=True),
))
required_together = cs_required_together()
required_together.extend([
['icmp_type', 'icmp_code'],
])
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
mutually_exclusive=(
['icmp_type', 'start_port'],
['icmp_type', 'end_port'],
),
supports_check_mode=True
)
acs_network_acl_rule = AnsibleCloudStackNetworkAclRule(module)
state = module.params.get('state')
if state == 'absent':
network_acl_rule = acs_network_acl_rule.absent_network_acl_rule()
else:
network_acl_rule = acs_network_acl_rule.present_network_acl_rule()
result = acs_network_acl_rule.get_result(network_acl_rule)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
gpl-3.0
|
ApuliaSoftware/odoo
|
addons/account/wizard/account_move_line_unreconcile_select.py
|
385
|
1864
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_move_line_unreconcile_select(osv.osv_memory):
_name = "account.move.line.unreconcile.select"
_description = "Unreconciliation"
_columns ={
'account_id': fields.many2one('account.account','Account',required=True),
}
def action_open_window(self, cr, uid, ids, context=None):
data = self.read(cr, uid, ids, context=context)[0]
return {
'domain': "[('account_id','=',%d),('reconcile_id','<>',False),('state','<>','draft')]" % data['account_id'],
'name': 'Unreconciliation',
'view_type': 'form',
'view_mode': 'tree,form',
'view_id': False,
'res_model': 'account.move.line',
'type': 'ir.actions.act_window'
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
Fusion-Rom/android_external_chromium_org
|
tools/telemetry/telemetry/core/backends/chrome/inspector_console.py
|
27
|
1896
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class InspectorConsole(object):
def __init__(self, inspector_backend):
self._inspector_backend = inspector_backend
self._inspector_backend.RegisterDomain(
'Console',
self._OnNotification,
self._OnClose)
self._message_output_stream = None
self._last_message = None
self._console_enabled = False
def _OnNotification(self, msg):
if msg['method'] == 'Console.messageAdded':
if msg['params']['message']['url'] == 'chrome://newtab/':
return
self._last_message = 'At %s:%i: %s' % (
msg['params']['message']['url'],
msg['params']['message']['line'],
msg['params']['message']['text'])
if self._message_output_stream:
self._message_output_stream.write(
'%s\n' % self._last_message)
elif msg['method'] == 'Console.messageRepeatCountUpdated':
if self._message_output_stream:
self._message_output_stream.write(
'%s\n' % self._last_message)
def _OnClose(self):
pass
# False positive in PyLint 0.25.1: http://www.logilab.org/89092
@property
def message_output_stream(self): # pylint: disable=E0202
return self._message_output_stream
@message_output_stream.setter
def message_output_stream(self, stream): # pylint: disable=E0202
self._message_output_stream = stream
self._UpdateConsoleEnabledState()
def _UpdateConsoleEnabledState(self):
enabled = self._message_output_stream != None
if enabled == self._console_enabled:
return
if enabled:
method_name = 'enable'
else:
method_name = 'disable'
self._inspector_backend.SyncRequest({
'method': 'Console.%s' % method_name
})
self._console_enabled = enabled
|
bsd-3-clause
|
vabs22/zulip
|
zerver/migrations/0053_emailchangestatus.py
|
19
|
1109
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-23 05:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('zerver', '0052_auto_fix_realmalias_realm_nullable'),
]
operations = [
migrations.CreateModel(
name='EmailChangeStatus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('new_email', models.EmailField(max_length=254)),
('old_email', models.EmailField(max_length=254)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.IntegerField(default=0)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
apache-2.0
|
junmin-zhu/chromium-rivertrail
|
chrome/test/pyautolib/chromeos/suid_actions.py
|
70
|
5373
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Helper script to perform actions as a super-user on ChromeOS.
Needs to be run with superuser privileges, typically using the
suid_python binary.
Usage:
sudo python suid_actions.py --action=CleanFlimflamDirs
"""
import optparse
import os
import shutil
import subprocess
import sys
import time
sys.path.append('/usr/local') # to import autotest libs.
from autotest.cros import constants
from autotest.cros import cryptohome
TEMP_BACKCHANNEL_FILE = '/tmp/pyauto_network_backchannel_file'
class SuidAction(object):
"""Helper to perform some super-user actions on ChromeOS."""
def _ParseArgs(self):
parser = optparse.OptionParser()
parser.add_option(
'-a', '--action', help='Action to perform.')
self._options = parser.parse_args()[0]
if not self._options.action:
raise RuntimeError('No action specified.')
def Run(self):
self._ParseArgs()
assert os.geteuid() == 0, 'Needs superuser privileges.'
handler = getattr(self, self._options.action)
assert handler and callable(handler), \
'No handler for %s' % self._options.action
handler()
return 0
## Actions ##
def CleanFlimflamDirs(self):
"""Clean the contents of all connection manager (shill/flimflam) profiles.
"""
flimflam_dirs = ['/home/chronos/user/flimflam',
'/home/chronos/user/shill',
'/var/cache/flimflam',
'/var/cache/shill']
# The stop/start flimflam command should stop/start shill respectivly if
# enabled.
os.system('stop flimflam')
try:
for flimflam_dir in flimflam_dirs:
if not os.path.exists(flimflam_dir):
continue
for item in os.listdir(flimflam_dir):
path = os.path.join(flimflam_dir, item)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
finally:
os.system('start flimflam')
# TODO(stanleyw): crosbug.com/29421 This method should wait until
# flimflam/shill is fully initialized and accessible via DBus again.
# Otherwise, there is a race conditions and subsequent accesses to
# flimflam/shill may fail. Until this is fixed, waiting for the
# resolv.conf file to be created is better than nothing.
begin = time.time()
while not os.path.exists(constants.RESOLV_CONF_FILE):
if time.time() - begin > 10:
raise RuntimeError('Timeout while waiting for flimflam/shill start.')
time.sleep(.25)
def RemoveAllCryptohomeVaults(self):
"""Remove any existing cryptohome vaults."""
cryptohome.remove_all_vaults()
def _GetEthInterfaces(self):
"""Returns a list of the eth* interfaces detected by the device."""
# Assumes ethernet interfaces all have "eth" in the name.
import pyudev
return sorted([iface.sys_name for iface in
pyudev.Context().list_devices(subsystem='net')
if 'eth' in iface.sys_name])
def _Renameif(self, old_iface, new_iface, mac_address):
"""Renames the interface with mac_address from old_iface to new_iface.
Args:
old_iface: The name of the interface you want to change.
new_iface: The name of the interface you want to change to.
mac_address: The mac address of the interface being changed.
"""
subprocess.call(['stop', 'flimflam'])
subprocess.call(['ifconfig', old_iface, 'down'])
subprocess.call(['nameif', new_iface, mac_address])
subprocess.call(['ifconfig', new_iface, 'up'])
subprocess.call(['start', 'flimflam'])
# Check and make sure interfaces have been renamed
eth_ifaces = self._GetEthInterfaces()
if new_iface not in eth_ifaces:
raise RuntimeError('Interface %s was not renamed to %s' %
(old_iface, new_iface))
elif old_iface in eth_ifaces:
raise RuntimeError('Old iface %s is still present' % old_iface)
def SetupBackchannel(self):
"""Renames the connected ethernet interface to eth_test for offline mode
testing. Does nothing if no connected interface is found.
"""
# Return the interface with ethernet connected or returns if none found.
for iface in self._GetEthInterfaces():
with open('/sys/class/net/%s/operstate' % iface, 'r') as fp:
if 'up' in fp.read():
eth_iface = iface
break
else:
return
# Write backup file to be used by TeardownBackchannel to restore the
# interface names.
with open(TEMP_BACKCHANNEL_FILE, 'w') as fpw:
with open('/sys/class/net/%s/address' % eth_iface) as fp:
mac_address = fp.read().strip()
fpw.write('%s, %s' % (eth_iface, mac_address))
self._Renameif(eth_iface, 'eth_test', mac_address)
def TeardownBackchannel(self):
"""Restores the eth interface names if SetupBackchannel was called."""
if not os.path.isfile(TEMP_BACKCHANNEL_FILE):
return
with open(TEMP_BACKCHANNEL_FILE, 'r') as fp:
eth_iface, mac_address = fp.read().split(',')
self._Renameif('eth_test', eth_iface, mac_address)
os.remove(TEMP_BACKCHANNEL_FILE)
if __name__ == '__main__':
sys.exit(SuidAction().Run())
|
bsd-3-clause
|
liangxia/origin
|
vendor/k8s.io/kubernetes/hack/boilerplate/boilerplate_test.py
|
629
|
1362
|
#!/usr/bin/env python
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import boilerplate
import unittest
import StringIO
import os
import sys
class TestBoilerplate(unittest.TestCase):
"""
Note: run this test from the hack/boilerplate directory.
$ python -m unittest boilerplate_test
"""
def test_boilerplate(self):
os.chdir("test/")
class Args(object):
def __init__(self):
self.filenames = []
self.rootdir = "."
self.boilerplate_dir = "../"
self.verbose = True
# capture stdout
old_stdout = sys.stdout
sys.stdout = StringIO.StringIO()
boilerplate.args = Args()
ret = boilerplate.main()
output = sorted(sys.stdout.getvalue().split())
sys.stdout = old_stdout
self.assertEquals(
output, ['././fail.go', '././fail.py'])
|
apache-2.0
|
pascalchevrel/bedrock
|
bedrock/pocketfeed/api.py
|
4
|
1683
|
import datetime
import re
import requests
from sentry_sdk import capture_exception
from django.conf import settings
from django.utils.timezone import make_aware, utc
def get_articles_data(count=8):
payload = {
'consumer_key': settings.POCKET_CONSUMER_KEY,
'access_token': settings.POCKET_ACCESS_TOKEN,
'count': count,
'detailType': 'complete',
}
try:
resp = requests.post(settings.POCKET_API_URL, json=payload, timeout=5)
resp.raise_for_status()
return resp.json()
except Exception:
capture_exception()
return None
def complete_articles_data(articles):
for _, article in articles:
# id from API should be moved to pocket_id to not conflict w/DB's id
article['pocket_id'] = article['id']
# convert time_shared from unix timestamp to datetime
article['time_shared'] = make_aware(datetime.datetime.fromtimestamp(int(article['time_shared'])), utc)
# remove data points we don't need
del article['comment']
del article['excerpt']
del article['id']
del article['quote']
check_article_image(article)
def check_article_image(article):
"""Determine if external image is available"""
# sanity check to make sure image provided by API actually exists and is https
if article['image_src'] and re.match(r'^https://', article['image_src'], flags=re.I):
try:
resp = requests.get(article['image_src'])
resp.raise_for_status()
except Exception:
capture_exception()
article['image_src'] = None
else:
article['image_src'] = None
|
mpl-2.0
|
philanthropy-u/edx-platform
|
openedx/core/djangoapps/embargo/test_utils.py
|
13
|
2864
|
"""Utilities for writing unit tests that involve course embargos. """
import contextlib
import mock
from django.core.cache import cache
from django.urls import reverse
import pygeoip
from .models import Country, CountryAccessRule, RestrictedCourse
@contextlib.contextmanager
def restrict_course(course_key, access_point="enrollment", disable_access_check=False):
"""Simulate that a course is restricted.
This does two things:
1) Configures country access rules so that the course is restricted.
2) Mocks the GeoIP call so the user appears to be coming
from a country that's blocked from the course.
This is useful for tests that need to verify
that restricted users won't be able to access
particular views.
Arguments:
course_key (CourseKey): The location of the course to block.
Keyword Arguments:
access_point (str): Either "courseware" or "enrollment"
Yields:
str: A URL to the page in the embargo app that explains
why the user was blocked.
Example Usage:
>>> with restrict_course(course_key) as redirect_url:
>>> # The client will appear to be coming from
>>> # an IP address that is blocked.
>>> resp = self.client.get(url)
>>> self.assertRedirects(resp, redirect_url)
"""
# Clear the cache to ensure that previous tests don't interfere
# with this test.
cache.clear()
with mock.patch.object(pygeoip.GeoIP, 'country_code_by_addr') as mock_ip:
# Remove all existing rules for the course
CountryAccessRule.objects.all().delete()
# Create the country object
# Ordinarily, we'd create models for every country,
# but that would slow down the test suite.
country, __ = Country.objects.get_or_create(country='IR')
# Create a model for the restricted course
restricted_course, __ = RestrictedCourse.objects.get_or_create(course_key=course_key)
restricted_course.enroll_msg_key = 'default'
restricted_course.access_msg_key = 'default'
restricted_course.disable_access_check = disable_access_check
restricted_course.save()
# Ensure that there is a blacklist rule for the country
CountryAccessRule.objects.get_or_create(
restricted_course=restricted_course,
country=country,
rule_type='blacklist'
)
# Simulate that the user is coming from the blacklisted country
mock_ip.return_value = 'IR'
# Yield the redirect url so the tests don't need to know
# the embargo messaging URL structure.
redirect_url = reverse(
'embargo:blocked_message',
kwargs={
'access_point': access_point,
'message_key': 'default'
}
)
yield redirect_url
|
agpl-3.0
|
jswope00/GAI
|
lms/djangoapps/verify_student/migrations/0002_auto__add_field_softwaresecurephotoverification_window.py
|
53
|
6710
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SoftwareSecurePhotoVerification.window'
db.add_column('verify_student_softwaresecurephotoverification', 'window',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['reverification.MidcourseReverificationWindow'], null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'SoftwareSecurePhotoVerification.window'
db.delete_column('verify_student_softwaresecurephotoverification', 'window_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'reverification.midcoursereverificationwindow': {
'Meta': {'object_name': 'MidcourseReverificationWindow'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'end_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'})
},
'verify_student.softwaresecurephotoverification': {
'Meta': {'ordering': "['-created_at']", 'object_name': 'SoftwareSecurePhotoVerification'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'error_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'error_msg': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'face_image_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'photo_id_image_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'photo_id_key': ('django.db.models.fields.TextField', [], {'max_length': '1024'}),
'receipt_id': ('django.db.models.fields.CharField', [], {'default': "'<function uuid4 at 0x21d4398>'", 'max_length': '255', 'db_index': 'True'}),
'reviewing_service': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'reviewing_user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'photo_verifications_reviewed'", 'null': 'True', 'to': "orm['auth.User']"}),
'status': ('model_utils.fields.StatusField', [], {'default': "'created'", 'max_length': '100', u'no_check_for_status': 'True'}),
'status_changed': ('model_utils.fields.MonitorField', [], {'default': 'datetime.datetime.now', u'monitor': "u'status'"}),
'submitted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'window': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['reverification.MidcourseReverificationWindow']", 'null': 'True'})
}
}
complete_apps = ['verify_student']
|
agpl-3.0
|
musically-ut/statsmodels
|
statsmodels/graphics/tests/test_regressionplots.py
|
20
|
9978
|
import numpy as np
import statsmodels.api as sm
from numpy.testing import dec
from statsmodels.graphics.regressionplots import (plot_fit, plot_ccpr,
plot_partregress, plot_regress_exog, abline_plot,
plot_partregress_grid, plot_ccpr_grid, add_lowess,
plot_added_variable, plot_partial_residuals,
plot_ceres_residuals)
from pandas import Series, DataFrame
try:
import matplotlib.pyplot as plt #makes plt available for test functions
have_matplotlib = True
except:
have_matplotlib = False
pdf_output = False
if pdf_output:
from matplotlib.backends.backend_pdf import PdfPages
pdf = PdfPages("test_regressionplots.pdf")
else:
pdf = None
def close_or_save(pdf, fig):
if pdf_output:
pdf.savefig(fig)
plt.close(fig)
@dec.skipif(not have_matplotlib)
def teardown_module():
plt.close('all')
if pdf_output:
pdf.close()
class TestPlot(object):
def __init__(self):
self.setup() #temp: for testing without nose
def setup(self):
nsample = 100
sig = 0.5
x1 = np.linspace(0, 20, nsample)
x2 = 5 + 3* np.random.randn(nsample)
X = np.c_[x1, x2, np.sin(0.5*x1), (x2-5)**2, np.ones(nsample)]
beta = [0.5, 0.5, 1, -0.04, 5.]
y_true = np.dot(X, beta)
y = y_true + sig * np.random.normal(size=nsample)
exog0 = sm.add_constant(np.c_[x1, x2], prepend=False)
res = sm.OLS(y, exog0).fit()
self.res = res
@dec.skipif(not have_matplotlib)
def test_plot_fit(self):
res = self.res
fig = plot_fit(res, 0, y_true=None)
x0 = res.model.exog[:, 0]
yf = res.fittedvalues
y = res.model.endog
px1, px2 = fig.axes[0].get_lines()[0].get_data()
np.testing.assert_equal(x0, px1)
np.testing.assert_equal(y, px2)
px1, px2 = fig.axes[0].get_lines()[1].get_data()
np.testing.assert_equal(x0, px1)
np.testing.assert_equal(yf, px2)
close_or_save(pdf, fig)
@dec.skipif(not have_matplotlib)
def test_plot_oth(self):
#just test that they run
res = self.res
plot_fit(res, 0, y_true=None)
plot_partregress_grid(res, exog_idx=[0,1])
plot_regress_exog(res, exog_idx=0)
plot_ccpr(res, exog_idx=0)
plot_ccpr_grid(res, exog_idx=[0])
fig = plot_ccpr_grid(res, exog_idx=[0,1])
for ax in fig.axes:
add_lowess(ax)
close_or_save(pdf, fig)
class TestPlotPandas(TestPlot):
def setup(self):
nsample = 100
sig = 0.5
x1 = np.linspace(0, 20, nsample)
x2 = 5 + 3* np.random.randn(nsample)
X = np.c_[x1, x2, np.sin(0.5*x1), (x2-5)**2, np.ones(nsample)]
beta = [0.5, 0.5, 1, -0.04, 5.]
y_true = np.dot(X, beta)
y = y_true + sig * np.random.normal(size=nsample)
exog0 = sm.add_constant(np.c_[x1, x2], prepend=False)
exog0 = DataFrame(exog0, columns=["const", "var1", "var2"])
y = Series(y, name="outcome")
res = sm.OLS(y, exog0).fit()
self.res = res
data = DataFrame(exog0, columns=["const", "var1", "var2"])
data['y'] = y
self.data = data
class TestPlotFormula(TestPlotPandas):
@dec.skipif(not have_matplotlib)
def test_one_column_exog(self):
from statsmodels.formula.api import ols
res = ols("y~var1-1", data=self.data).fit()
plot_regress_exog(res, "var1")
res = ols("y~var1", data=self.data).fit()
plot_regress_exog(res, "var1")
class TestABLine(object):
@classmethod
def setupClass(cls):
np.random.seed(12345)
X = sm.add_constant(np.random.normal(0, 20, size=30))
y = np.dot(X, [25, 3.5]) + np.random.normal(0, 30, size=30)
mod = sm.OLS(y,X).fit()
cls.X = X
cls.y = y
cls.mod = mod
@dec.skipif(not have_matplotlib)
def test_abline_model(self):
fig = abline_plot(model_results=self.mod)
ax = fig.axes[0]
ax.scatter(self.X[:,1], self.y)
close_or_save(pdf, fig)
@dec.skipif(not have_matplotlib)
def test_abline_model_ax(self):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(self.X[:,1], self.y)
fig = abline_plot(model_results=self.mod, ax=ax)
close_or_save(pdf, fig)
@dec.skipif(not have_matplotlib)
def test_abline_ab(self):
mod = self.mod
intercept, slope = mod.params
fig = abline_plot(intercept=intercept, slope=slope)
close_or_save(pdf, fig)
@dec.skipif(not have_matplotlib)
def test_abline_ab_ax(self):
mod = self.mod
intercept, slope = mod.params
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(self.X[:,1], self.y)
fig = abline_plot(intercept=intercept, slope=slope, ax=ax)
close_or_save(pdf, fig)
class TestABLinePandas(TestABLine):
@classmethod
def setupClass(cls):
np.random.seed(12345)
X = sm.add_constant(np.random.normal(0, 20, size=30))
y = np.dot(X, [25, 3.5]) + np.random.normal(0, 30, size=30)
cls.X = X
cls.y = y
X = DataFrame(X, columns=["const", "someX"])
y = Series(y, name="outcome")
mod = sm.OLS(y,X).fit()
cls.mod = mod
class TestAddedVariablePlot(object):
@dec.skipif(not have_matplotlib)
def test_added_variable_poisson(self):
np.random.seed(3446)
n = 100
p = 3
exog = np.random.normal(size=(n, p))
lin_pred = 4 + exog[:, 0] + 0.2*exog[:, 1]**2
expval = np.exp(lin_pred)
endog = np.random.poisson(expval)
model = sm.GLM(endog, exog, family=sm.families.Poisson())
results = model.fit()
for focus_col in 0, 1, 2:
for use_glm_weights in False, True:
for resid_type in "resid_deviance", "resid_response":
weight_str = ["Unweighted", "Weighted"][use_glm_weights]
# Run directly and called as a results method.
for j in 0,1:
if j == 0:
fig = plot_added_variable(results, focus_col,
use_glm_weights=use_glm_weights,
resid_type=resid_type)
ti = "Added variable plot"
else:
fig = results.plot_added_variable(focus_col,
use_glm_weights=use_glm_weights,
resid_type=resid_type)
ti = "Added variable plot (called as method)"
ax = fig.get_axes()[0]
add_lowess(ax)
ax.set_position([0.1, 0.1, 0.8, 0.7])
effect_str = ["Linear effect, slope=1",
"Quadratic effect", "No effect"][focus_col]
ti += "\nPoisson regression\n"
ti += effect_str + "\n"
ti += weight_str + "\n"
ti += "Using '%s' residuals" % resid_type
ax.set_title(ti)
close_or_save(pdf, fig)
class TestPartialResidualPlot(object):
@dec.skipif(not have_matplotlib)
def test_partial_residual_poisson(self):
np.random.seed(3446)
n = 100
p = 3
exog = np.random.normal(size=(n, p))
exog[:, 0] = 1
lin_pred = 4 + exog[:, 1] + 0.2*exog[:, 2]**2
expval = np.exp(lin_pred)
endog = np.random.poisson(expval)
model = sm.GLM(endog, exog, family=sm.families.Poisson())
results = model.fit()
for focus_col in 1, 2:
for j in 0,1:
if j == 0:
fig = plot_partial_residuals(results, focus_col)
else:
fig = results.plot_partial_residuals(focus_col)
ax = fig.get_axes()[0]
add_lowess(ax)
ax.set_position([0.1, 0.1, 0.8, 0.77])
effect_str = ["Intercept", "Linear effect, slope=1",
"Quadratic effect"][focus_col]
ti = "Partial residual plot"
if j == 1:
ti += " (called as method)"
ax.set_title(ti + "\nPoisson regression\n" +
effect_str)
close_or_save(pdf, fig)
class TestCERESPlot(object):
@dec.skipif(not have_matplotlib)
def test_ceres_poisson(self):
np.random.seed(3446)
n = 100
p = 3
exog = np.random.normal(size=(n, p))
exog[:, 0] = 1
lin_pred = 4 + exog[:, 1] + 0.2*exog[:, 2]**2
expval = np.exp(lin_pred)
endog = np.random.poisson(expval)
model = sm.GLM(endog, exog, family=sm.families.Poisson())
results = model.fit()
for focus_col in 1, 2:
for j in 0, 1:
if j == 0:
fig = plot_ceres_residuals(results, focus_col)
else:
fig = results.plot_ceres_residuals(focus_col)
ax = fig.get_axes()[0]
add_lowess(ax)
ax.set_position([0.1, 0.1, 0.8, 0.77])
effect_str = ["Intercept", "Linear effect, slope=1",
"Quadratic effect"][focus_col]
ti = "CERES plot"
if j == 1:
ti += " (called as method)"
ax.set_title(ti + "\nPoisson regression\n" +
effect_str)
close_or_save(pdf, fig)
if __name__ == "__main__":
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb'], exit=False)
|
bsd-3-clause
|
HyperBaton/ansible
|
lib/ansible/modules/network/onyx/onyx_ospf.py
|
21
|
8189
|
#!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: onyx_ospf
version_added: "2.5"
author: "Samer Deeb (@samerd)"
short_description: Manage OSPF protocol on Mellanox ONYX network devices
description:
- This module provides declarative management and configuration of OSPF
protocol on Mellanox ONYX network devices.
notes:
- Tested on ONYX 3.6.4000
options:
ospf:
description:
- "OSPF instance number 1-65535"
required: true
router_id:
description:
- OSPF router ID. Required if I(state=present).
interfaces:
description:
- List of interfaces and areas. Required if I(state=present).
suboptions:
name:
description:
- Interface name.
required: true
area:
description:
- OSPF area.
required: true
state:
description:
- OSPF state.
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: add ospf router to interface
onyx_ospf:
ospf: 2
router_id: 192.168.8.2
interfaces:
- name: Eth1/1
- area: 0.0.0.0
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always
type: list
sample:
- router ospf 2
- router-id 192.168.8.2
- exit
- interface ethernet 1/1 ip ospf area 0.0.0.0
"""
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.network.onyx.onyx import BaseOnyxModule
from ansible.module_utils.network.onyx.onyx import show_cmd
class OnyxOspfModule(BaseOnyxModule):
OSPF_IF_REGEX = re.compile(
r'^(Loopback\d+|Eth\d+\/\d+|Vlan\d+|Po\d+)\s+(\S+).*')
OSPF_ROUTER_REGEX = re.compile(r'^Routing Process (\d+).*ID\s+(\S+).*')
@classmethod
def _get_element_spec(cls):
interface_spec = dict(
name=dict(required=True),
area=dict(required=True),
)
element_spec = dict(
ospf=dict(type='int', required=True),
router_id=dict(),
interfaces=dict(type='list', elements='dict',
options=interface_spec),
state=dict(choices=['present', 'absent'], default='present'),
)
return element_spec
def init_module(self):
""" Ansible module initialization
"""
element_spec = self._get_element_spec()
argument_spec = dict()
argument_spec.update(element_spec)
self._module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
def validate_ospf(self, value):
if value and not 1 <= int(value) <= 65535:
self._module.fail_json(msg='ospf id must be between 1 and 65535')
def get_required_config(self):
module_params = self._module.params
self._required_config = dict(
ospf=module_params['ospf'],
router_id=module_params['router_id'],
state=module_params['state'],
)
interfaces = module_params['interfaces'] or list()
req_interfaces = self._required_config['interfaces'] = dict()
for interface_data in interfaces:
req_interfaces[interface_data['name']] = interface_data['area']
self.validate_param_values(self._required_config)
def _update_ospf_data(self, ospf_data):
match = self.OSPF_ROUTER_REGEX.match(ospf_data)
if match:
ospf_id = int(match.group(1))
router_id = match.group(2)
self._current_config['ospf'] = ospf_id
self._current_config['router_id'] = router_id
def _update_ospf_interfaces(self, ospf_interfaces):
interfaces = self._current_config['interfaces'] = dict()
lines = ospf_interfaces.split('\n')
for line in lines:
line = line.strip()
match = self.OSPF_IF_REGEX.match(line)
if match:
name = match.group(1)
area = match.group(2)
for prefix in ("Vlan", "Loopback"):
if name.startswith(prefix):
name = name.replace(prefix, prefix + ' ')
interfaces[name] = area
def _get_ospf_config(self, ospf_id):
cmd = 'show ip ospf %s | include Process' % ospf_id
return show_cmd(self._module, cmd, json_fmt=False, fail_on_error=False)
def _get_ospf_interfaces_config(self, ospf_id):
cmd = 'show ip ospf interface %s brief' % ospf_id
return show_cmd(self._module, cmd, json_fmt=False, fail_on_error=False)
def load_current_config(self):
# called in base class in run function
ospf_id = self._required_config['ospf']
self._current_config = dict()
ospf_data = self._get_ospf_config(ospf_id)
if ospf_data:
self._update_ospf_data(ospf_data)
ospf_interfaces = self._get_ospf_interfaces_config(ospf_id)
if ospf_interfaces:
self._update_ospf_interfaces(ospf_interfaces)
def _generate_no_ospf_commands(self):
req_ospf_id = self._required_config['ospf']
curr_ospf_id = self._current_config.get('ospf')
if curr_ospf_id == req_ospf_id:
cmd = 'no router ospf %s' % req_ospf_id
self._commands.append(cmd)
def _get_interface_command_name(self, if_name):
if if_name.startswith('Eth'):
return if_name.replace("Eth", "ethernet ")
if if_name.startswith('Po'):
return if_name.replace("Po", "port-channel ")
if if_name.startswith('Vlan'):
return if_name.replace("Vlan", "vlan")
if if_name.startswith('Loopback'):
return if_name.replace("Loopback", "loopback")
self._module.fail_json(
msg='invalid interface name: %s' % if_name)
def _get_interface_area_cmd(self, if_name, area):
interface_prefix = self._get_interface_command_name(if_name)
if area:
area_cmd = 'ip ospf area %s' % area
else:
area_cmd = 'no ip ospf area'
cmd = 'interface %s %s' % (interface_prefix, area_cmd)
return cmd
def _generate_ospf_commands(self):
req_router_id = self._required_config['router_id']
req_ospf_id = self._required_config['ospf']
curr_router_id = self._current_config.get('router_id')
curr_ospf_id = self._current_config.get('ospf')
if curr_ospf_id != req_ospf_id or req_router_id != curr_router_id:
cmd = 'router ospf %s' % req_ospf_id
self._commands.append(cmd)
if req_router_id != curr_router_id:
if req_router_id:
cmd = 'router-id %s' % req_router_id
else:
cmd = 'no router-id'
self._commands.append(cmd)
self._commands.append('exit')
req_interfaces = self._required_config['interfaces']
curr_interfaces = self._current_config.get('interfaces', dict())
for if_name, area in iteritems(req_interfaces):
curr_area = curr_interfaces.get(if_name)
if curr_area != area:
cmd = self._get_interface_area_cmd(if_name, area)
self._commands.append(cmd)
for if_name in curr_interfaces:
if if_name not in req_interfaces:
cmd = self._get_interface_area_cmd(if_name, None)
self._commands.append(cmd)
def generate_commands(self):
req_state = self._required_config['state']
if req_state == 'absent':
return self._generate_no_ospf_commands()
return self._generate_ospf_commands()
def main():
""" main entry point for module execution
"""
OnyxOspfModule.main()
if __name__ == '__main__':
main()
|
gpl-3.0
|
vladikoff/fxa-mochitest
|
tests/mozbase/mozdevice/sut_tests/dmunit.py
|
5
|
1720
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import types
import unittest
import mozlog
from mozdevice import devicemanager
from mozdevice import devicemanagerSUT
ip = ''
port = 0
heartbeat_port = 0
log_level = mozlog.ERROR
class DeviceManagerTestCase(unittest.TestCase):
"""DeviceManager tests should subclass this.
"""
"""Set to False in your derived class if this test
should not be run on the Python agent.
"""
runs_on_test_device = True
def _setUp(self):
""" Override this if you want set-up code in your test."""
return
def setUp(self):
self.dm = devicemanagerSUT.DeviceManagerSUT(host=ip, port=port,
logLevel=log_level)
self.dmerror = devicemanager.DMError
self._setUp()
class DeviceManagerTestLoader(unittest.TestLoader):
def __init__(self, isTestDevice=False):
self.isTestDevice = isTestDevice
def loadTestsFromModuleName(self, module_name):
"""Loads tests from modules unless the SUT is a test device and
the test case has runs_on_test_device set to False
"""
tests = []
module = __import__(module_name)
for name in dir(module):
obj = getattr(module, name)
if (isinstance(obj, (type, types.ClassType)) and
issubclass(obj, unittest.TestCase)) and \
(not self.isTestDevice or obj.runs_on_test_device):
tests.append(self.loadTestsFromTestCase(obj))
return self.suiteClass(tests)
|
mpl-2.0
|
glyph/E-Max
|
epywrap.py
|
1
|
13586
|
# Copyright (C) 2012
# See LICENSE.txt for details.
"""
Epytext (and general Python docstring) wrapper
==============================================
Utility for wrapping docstrings in Python; specifically, docstrings in U{Epytext
<http://epydoc.sourceforge.net/manual-epytext.html>} format, or those that are
close enough.
The wrapping herein generally adheres to all the conventions set forth by the
Twisted project U{http://twistedmatrix.com/}.
Currently (obviously) the only supported editor is U{Sublime Text 2
<http://www.sublimetext.com/>} but a sufficiently enterprising individual could
either use this file as a script (no dependencies!) by piping the contents of
the docstring to it, or call L{wrapPythonDocstring} and preserve point position.
"""
from __future__ import unicode_literals
import re
from uuid import uuid4
__all__ = [
"wrapPythonDocstring"
]
def isUnderline(expr):
return bool(re.match("[=]+$", expr) or re.match("[-]+$", expr))
def startslist(x):
return (x == '-' or (x.endswith(".") and x[:-1].isdigit()))
class RegularParagraph(object):
otherIndent = ""
def __init__(self, pointTracker, fixedIndent="", hangIndent="",
followIndent=""):
self.words = []
self.fixedIndent = fixedIndent
self.hangIndent = hangIndent
self.followIndent = followIndent
self.more = None
self.pointTracker = pointTracker
self._unwrappedLines = 0
self._headingType = None
self._headingPoints = []
def matchesTag(self, other):
return False
def __nonzero__(self):
return bool(self.words)
def all(self):
while self is not None:
#print self.__class__.__name__
if self:
yield self
self = self.more
def setIsHeading(self, headingType):
self._headingType = headingType
def isHeading(self):
return bool(self._headingType)
def add(self, line):
clean = self.pointTracker.peek(line)
stripped = clean.strip()
if stripped:
self._unwrappedLines += 1
active = self
firstword = list(self.pointTracker.filterWords(line.split()))[0]
if stripped.startswith("@"):
fp = FieldParagraph(pointTracker=self.pointTracker)
fp.words.extend(line.split())
active = self.more = fp
elif isUnderline(stripped) and self._unwrappedLines == 2:
# This paragraph is actually a section heading.
active.setIsHeading(stripped[0])
self._headingPoints = self.pointTracker.extractPoints(line)
# FIXME: should respect leading indentation.
active = self.nextRegular()
elif startslist(firstword):
# Aesthetically I prefer a 2-space indent here, but the
# convention in the codebase seems to be 4 spaces.
LIST_INDENT = 4
# FIXME: this also needs to respect leading indentation so it
# can properly represent nested lists.
hangIndent = self.pointTracker.lengthOf(firstword) + 1
fi = self.fixedIndent
if not (self.words and startslist(self.words[0])):
fi += (" " * LIST_INDENT)
fp = RegularParagraph(
pointTracker=self.pointTracker,
fixedIndent=fi,
hangIndent=" " * hangIndent,
followIndent=self.followIndent,
)
fp.words.extend(line.split())
active = self.more = fp
else:
self.words.extend(line.split())
if stripped.endswith("::"):
active.more = PreFormattedParagraph(
active,
indentBegins=len(clean) - len(clean.lstrip())
)
active = active.more
return active
else:
rawstrip = line.strip()
if rawstrip:
self.words.append(rawstrip)
if len(list(self.pointTracker.filterWords(self.words))):
return self.nextRegular()
return self
def wrap(self, output, indentation, width):
if not self.words:
return
thisLine = self.firstIndent(indentation)
first = True
prevWord = ''
for word in self.words:
if not self.pointTracker.isWord(word):
thisLine += word
continue
if ((prevWord.endswith(".") or prevWord.endswith("?") or
prevWord.endswith("!")) and not prevWord[:-1].isdigit()):
words = prevWord.split(".")[:-1]
if ( len(words) > 1 and
[self.pointTracker.lengthOf(x) for x in words] ==
[1] * len(words) ):
# acronym
spaces = 1
else:
spaces = 2
else:
spaces = 1
prevWord = word
if ( self.pointTracker.lengthOf(thisLine) +
self.pointTracker.lengthOf(word) + spaces <= width ):
if first:
first = not first
else:
thisLine += (" " * spaces)
thisLine += word
else:
output.write(self.pointTracker.scan(thisLine, output.tell()))
output.write("\n")
thisLine = self.restIndent(indentation) + word
output.write(self.pointTracker.scan(thisLine, output.tell()))
output.write("\n")
if self.isHeading():
indentText = self.firstIndent(indentation)
lineSize = self.pointTracker.lengthOf(thisLine) - len(indentText)
output.write(self.pointTracker.scan(
indentText + ''.join(self._headingPoints) +
(self._headingType * lineSize), output.tell()
))
output.write("\n")
def firstIndent(self, indentation):
return indentation + self.fixedIndent
def restIndent(self, indentation):
return (indentation + self.fixedIndent + self.hangIndent +
self.otherIndent)
def genRegular(self):
return RegularParagraph(pointTracker=self.pointTracker,
fixedIndent=self.nextIndent(),
followIndent=self.nextIndent())
def nextRegular(self):
self.more = self.genRegular()
return self.more
def nextIndent(self):
return self.followIndent
class FieldParagraph(RegularParagraph):
otherIndent = " "
def nextIndent(self):
return " "
def matchesTag(self, other):
if isinstance(other, FieldParagraph):
myWords = list(self.pointTracker.filterWords(self.words))
theirWords = list(self.pointTracker.filterWords(other.words))
if ( set([myWords[0], theirWords[0]]) ==
set(["@return:", "@rtype:"]) ):
# matching @return and @rtype fields.
return True
elif len(myWords) > 1 and len(theirWords) > 1:
# matching @param and @type fields.
return myWords[1] == theirWords[1]
return False
else:
return False
class PreFormattedParagraph(object):
def __init__(self, before, indentBegins):
self.lines = []
self.before = before
pointTracker = before.pointTracker
fixedIndent = (before.fixedIndent + before.hangIndent +
before.otherIndent)
self.indentBegins = indentBegins
self.fixedIndent = fixedIndent
self.more = None
self.pointTracker = pointTracker
def matchesTag(self, other):
return False
def add(self, line):
actualLine = self.pointTracker.peek(line)
if actualLine.strip():
if len(actualLine) - len(actualLine.lstrip()) <= self.indentBegins:
next = self.more = self.before.genRegular()
return next.add(line)
self.lines.append(line.rstrip())
else:
self.lines.append(line.strip())
return self
def fixIndentation(self):
while self.lines and not self.lines[0].strip():
self.lines.pop(0)
while self.lines and not self.lines[-1].strip():
self.lines.pop()
if not self.lines:
return
cleanLines = map(self.pointTracker.peek, self.lines)
commonLeadingIndent = min([len(x) - len(x.lstrip()) for x in cleanLines
if x.strip()])
newLines = []
for actualLine, line in zip(cleanLines, self.lines):
if actualLine != line and line[:commonLeadingIndent].strip():
# There's a marker, and it's in the leading whitespace.
# Explicitly reposition the marker at the beginning of the fixed
# indentation.
line = (self.pointTracker.marker +
actualLine[commonLeadingIndent:])
else:
line = line.rstrip()[commonLeadingIndent:]
newLines.append(line)
self.lines = newLines
def wrap(self, output, indentation, width):
# OK, now we know about all the lines we're going to know about.
self.fixIndentation()
for line in self.lines:
if self.pointTracker.peek(line):
output.write(indentation + " " + self.fixedIndent)
output.write(self.pointTracker.scan(line, output.tell()))
output.write("\n")
class PointTracker(object):
"""
Object for keeping track of where the insertion points are.
"""
def __init__(self, point):
self.point = point
self.marker = "{" + unicode(uuid4()) + "}"
self.outPoints = []
def annotate(self, text):
"""
Add point references to a block of text.
"""
return text[:self.point] + self.marker + text[self.point:]
def filterWords(self, words):
for word in words:
if self.isWord(word):
yield self.peek(word)
def isWord(self, text):
"""
Is the given word actually a word, or just an artifact of the
point-tracking process? If it's just the point marker by itself, then
no, it isn't, and don't insert additional whitespace after it.
"""
return not (text == self.marker)
def lengthOf(self, word):
"""
How long would this word be if it didn't have any point-markers in it?
"""
return len(self.peek(word))
def peek(self, word):
"""
What would this word look like if it didn't have any point-markers in
it?
"""
return word.replace(self.marker, "")
def extractPoints(self, text):
"""
Return a C{list} of all point markers contained in the text.
"""
if self.marker in text:
return [self.marker]
return []
def scan(self, text, offset):
"""
Scan some text for point markers, remember them, and remove them.
"""
idx = text.find(self.marker)
if idx == -1:
return text
self.outPoints.append(idx + offset)
return self.peek(text)
def wrapPythonDocstring(docstring, output, indentation=" ",
width=79, point=0):
"""
Wrap a given Python docstring.
@param docstring: the docstring itself (just the stuff between the quotes).
@type docstring: unicode
@param output: The unicode output file to write the wrapped docstring to.
@type output: L{file}-like (C{write} takes unicode.)
@param indentation: a string (consisting only of spaces) indicating the
amount of space to shift by. Don't adjust this. It's always 4 spaces.
PEP8 says so.
@type indentation: L{unicode}
@param width: The maximum number of characters allowed in a wrapped line.
@type width: L{int}
@param point: The location of the cursor in the text, as an offset from the
beginning of the docstring. If this function is being used from within
a graphical editor, this parameter can be used (in addition to the
return value of this function) to reposition the cursor at the relative
position which the user will expect.
@return: The new location of the cursor.
"""
# TODO: multiple points; usable, for example, for start and end of a
# currently active selection.
pt = PointTracker(point)
start = paragraph = RegularParagraph(pt)
docstring = pt.annotate(docstring)
for line in docstring.split("\n"):
paragraph = paragraph.add(line)
prevp = None
for paragraph in start.all():
if not paragraph.matchesTag(prevp):
output.write("\n")
prevp = paragraph
paragraph.wrap(output, indentation, width)
output.write(indentation)
return pt.outPoints[0]
if __name__ == '__main__':
import sys
from cStringIO import StringIO
io = StringIO()
indata = sys.stdin.read()
firstline = [line for line in indata.split("\n") if line][0]
wrapPythonDocstring(indata, io,
indentation=" " * (len(firstline) - len(firstline.lstrip())))
sys.stdout.write(io.getvalue())
sys.stdout.flush()
|
mit
|
owlabs/incubator-airflow
|
tests/contrib/operators/test_dataflow_operator.py
|
1
|
10028
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from airflow.contrib.operators.dataflow_operator import \
DataFlowPythonOperator, DataFlowJavaOperator, \
DataflowTemplateOperator, GoogleCloudBucketHelper
from airflow.version import version
from tests.compat import mock
TASK_ID = 'test-dataflow-operator'
JOB_NAME = 'test-dataflow-pipeline'
TEMPLATE = 'gs://dataflow-templates/wordcount/template_file'
PARAMETERS = {
'inputFile': 'gs://dataflow-samples/shakespeare/kinglear.txt',
'output': 'gs://test/output/my_output'
}
PY_FILE = 'gs://my-bucket/my-object.py'
JAR_FILE = 'example/test.jar'
JOB_CLASS = 'com.test.NotMain'
PY_OPTIONS = ['-m']
DEFAULT_OPTIONS_PYTHON = DEFAULT_OPTIONS_JAVA = {
'project': 'test',
'stagingLocation': 'gs://test/staging',
}
DEFAULT_OPTIONS_TEMPLATE = {
'project': 'test',
'stagingLocation': 'gs://test/staging',
'tempLocation': 'gs://test/temp',
'zone': 'us-central1-f'
}
ADDITIONAL_OPTIONS = {
'output': 'gs://test/output',
'labels': {'foo': 'bar'}
}
TEST_VERSION = 'v{}'.format(version.replace('.', '-').replace('+', '-'))
EXPECTED_ADDITIONAL_OPTIONS = {
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}
}
POLL_SLEEP = 30
GCS_HOOK_STRING = 'airflow.contrib.operators.dataflow_operator.{}'
class DataFlowPythonOperatorTest(unittest.TestCase):
def setUp(self):
self.dataflow = DataFlowPythonOperator(
task_id=TASK_ID,
py_file=PY_FILE,
job_name=JOB_NAME,
py_options=PY_OPTIONS,
dataflow_default_options=DEFAULT_OPTIONS_PYTHON,
options=ADDITIONAL_OPTIONS,
poll_sleep=POLL_SLEEP)
def test_init(self):
"""Test DataFlowPythonOperator instance is properly initialized."""
self.assertEqual(self.dataflow.task_id, TASK_ID)
self.assertEqual(self.dataflow.job_name, JOB_NAME)
self.assertEqual(self.dataflow.py_file, PY_FILE)
self.assertEqual(self.dataflow.py_options, PY_OPTIONS)
self.assertEqual(self.dataflow.poll_sleep, POLL_SLEEP)
self.assertEqual(self.dataflow.dataflow_default_options,
DEFAULT_OPTIONS_PYTHON)
self.assertEqual(self.dataflow.options,
EXPECTED_ADDITIONAL_OPTIONS)
@mock.patch('airflow.contrib.operators.dataflow_operator.DataFlowHook')
@mock.patch(GCS_HOOK_STRING.format('GoogleCloudBucketHelper'))
def test_exec(self, gcs_hook, dataflow_mock):
"""Test DataFlowHook is created and the right args are passed to
start_python_workflow.
"""
start_python_hook = dataflow_mock.return_value.start_python_dataflow
gcs_download_hook = gcs_hook.return_value.google_cloud_to_local
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
expected_options = {
'project': 'test',
'staging_location': 'gs://test/staging',
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}
}
gcs_download_hook.assert_called_once_with(PY_FILE)
start_python_hook.assert_called_once_with(JOB_NAME, expected_options, mock.ANY,
PY_OPTIONS)
self.assertTrue(self.dataflow.py_file.startswith('/tmp/dataflow'))
class DataFlowJavaOperatorTest(unittest.TestCase):
def setUp(self):
self.dataflow = DataFlowJavaOperator(
task_id=TASK_ID,
jar=JAR_FILE,
job_name=JOB_NAME,
job_class=JOB_CLASS,
dataflow_default_options=DEFAULT_OPTIONS_JAVA,
options=ADDITIONAL_OPTIONS,
poll_sleep=POLL_SLEEP)
def test_init(self):
"""Test DataflowTemplateOperator instance is properly initialized."""
self.assertEqual(self.dataflow.task_id, TASK_ID)
self.assertEqual(self.dataflow.job_name, JOB_NAME)
self.assertEqual(self.dataflow.poll_sleep, POLL_SLEEP)
self.assertEqual(self.dataflow.dataflow_default_options,
DEFAULT_OPTIONS_JAVA)
self.assertEqual(self.dataflow.job_class, JOB_CLASS)
self.assertEqual(self.dataflow.jar, JAR_FILE)
self.assertEqual(self.dataflow.options,
EXPECTED_ADDITIONAL_OPTIONS)
@mock.patch('airflow.contrib.operators.dataflow_operator.DataFlowHook')
@mock.patch(GCS_HOOK_STRING.format('GoogleCloudBucketHelper'))
def test_exec(self, gcs_hook, dataflow_mock):
"""Test DataFlowHook is created and the right args are passed to
start_java_workflow.
"""
start_java_hook = dataflow_mock.return_value.start_java_dataflow
gcs_download_hook = gcs_hook.return_value.google_cloud_to_local
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
gcs_download_hook.assert_called_once_with(JAR_FILE)
start_java_hook.assert_called_once_with(JOB_NAME, mock.ANY,
mock.ANY, JOB_CLASS)
class DataFlowTemplateOperatorTest(unittest.TestCase):
def setUp(self):
self.dataflow = DataflowTemplateOperator(
task_id=TASK_ID,
template=TEMPLATE,
job_name=JOB_NAME,
parameters=PARAMETERS,
dataflow_default_options=DEFAULT_OPTIONS_TEMPLATE,
poll_sleep=POLL_SLEEP)
def test_init(self):
"""Test DataflowTemplateOperator instance is properly initialized."""
self.assertEqual(self.dataflow.task_id, TASK_ID)
self.assertEqual(self.dataflow.job_name, JOB_NAME)
self.assertEqual(self.dataflow.template, TEMPLATE)
self.assertEqual(self.dataflow.parameters, PARAMETERS)
self.assertEqual(self.dataflow.poll_sleep, POLL_SLEEP)
self.assertEqual(self.dataflow.dataflow_default_options,
DEFAULT_OPTIONS_TEMPLATE)
@mock.patch('airflow.contrib.operators.dataflow_operator.DataFlowHook')
def test_exec(self, dataflow_mock):
"""Test DataFlowHook is created and the right args are passed to
start_template_workflow.
"""
start_template_hook = dataflow_mock.return_value.start_template_dataflow
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
expected_options = {
'project': 'test',
'stagingLocation': 'gs://test/staging',
'tempLocation': 'gs://test/temp',
'zone': 'us-central1-f'
}
start_template_hook.assert_called_once_with(JOB_NAME, expected_options,
PARAMETERS, TEMPLATE)
class GoogleCloudBucketHelperTest(unittest.TestCase):
@mock.patch(
'airflow.contrib.operators.dataflow_operator.GoogleCloudBucketHelper.__init__'
)
def test_invalid_object_path(self, mock_parent_init):
# This is just the path of a bucket hence invalid filename
file_name = 'gs://test-bucket'
mock_parent_init.return_value = None
gcs_bucket_helper = GoogleCloudBucketHelper()
gcs_bucket_helper._gcs_hook = mock.Mock()
with self.assertRaises(Exception) as context:
gcs_bucket_helper.google_cloud_to_local(file_name)
self.assertEqual(
'Invalid Google Cloud Storage (GCS) object path: {}'.format(file_name),
str(context.exception))
@mock.patch(
'airflow.contrib.operators.dataflow_operator.GoogleCloudBucketHelper.__init__'
)
def test_valid_object(self, mock_parent_init):
file_name = 'gs://test-bucket/path/to/obj.jar'
mock_parent_init.return_value = None
gcs_bucket_helper = GoogleCloudBucketHelper()
gcs_bucket_helper._gcs_hook = mock.Mock()
def _mock_download(bucket, object, filename=None):
text_file_contents = 'text file contents'
with open(filename, 'w') as text_file:
text_file.write(text_file_contents)
return text_file_contents
gcs_bucket_helper._gcs_hook.download.side_effect = _mock_download
local_file = gcs_bucket_helper.google_cloud_to_local(file_name)
self.assertIn('obj.jar', local_file)
@mock.patch(
'airflow.contrib.operators.dataflow_operator.GoogleCloudBucketHelper.__init__'
)
def test_empty_object(self, mock_parent_init):
file_name = 'gs://test-bucket/path/to/obj.jar'
mock_parent_init.return_value = None
gcs_bucket_helper = GoogleCloudBucketHelper()
gcs_bucket_helper._gcs_hook = mock.Mock()
def _mock_download(bucket, object, filename=None):
text_file_contents = ''
with open(filename, 'w') as text_file:
text_file.write(text_file_contents)
return text_file_contents
gcs_bucket_helper._gcs_hook.download.side_effect = _mock_download
with self.assertRaises(Exception) as context:
gcs_bucket_helper.google_cloud_to_local(file_name)
self.assertEqual(
'Failed to download Google Cloud Storage (GCS) object: {}'.format(file_name),
str(context.exception))
|
apache-2.0
|
kasioumis/invenio
|
invenio/legacy/bibcirculation/utils.py
|
13
|
30901
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2008, 2009, 2010, 2011, 2012, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibCirculation Utils: Auxiliary methods of BibCirculation """
__revision__ = "$Id$"
import datetime
import random
import re
import time
from invenio.legacy.bibrecord import get_fieldvalues
from invenio.utils.url import make_invenio_opener
from invenio.legacy.search_engine import get_field_tags
from invenio.legacy.bibsched.bibtask import task_low_level_submission
from invenio.utils.text import encode_for_xml
from invenio.base.i18n import gettext_set_language
from invenio.config import CFG_SITE_URL, CFG_TMPDIR, CFG_SITE_LANG
import invenio.legacy.bibcirculation.db_layer as db
from invenio.legacy.bibcirculation.config import \
CFG_BIBCIRCULATION_WORKING_DAYS, \
CFG_BIBCIRCULATION_HOLIDAYS, \
CFG_CERN_SITE, \
CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN, \
CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF, \
CFG_BIBCIRCULATION_ITEM_STATUS_IN_PROCESS, \
CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING, \
CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING, \
CFG_BIBCIRCULATION_LOAN_STATUS_ON_LOAN, \
CFG_BIBCIRCULATION_LOAN_STATUS_EXPIRED, \
CFG_BIBCIRCULATION_LOAN_STATUS_RETURNED
DICC_REGEXP = re.compile("^\{('[^']*': ?('[^']*'|\"[^\"]+\"|[0-9]*|None)(, ?'[^']*': ?('[^']*'|\"[^\"]+\"|[0-9]*|None))*)?\}$")
BIBCIRCULATION_OPENER = make_invenio_opener('BibCirculation')
def search_user(column, string):
if string is not None:
string = string.strip()
if CFG_CERN_SITE == 1:
if column == 'name':
result = db.search_borrower_by_name(string)
else:
if column == 'email':
try:
result = db.search_borrower_by_email(string)
except:
result = ()
else:
try:
result = db.search_borrower_by_ccid(string)
except:
result = ()
if result == ():
from invenio.legacy.bibcirculation.cern_ldap \
import get_user_info_from_ldap
ldap_info = 'busy'
while ldap_info == 'busy':
time.sleep(1)
if column == 'id' or column == 'ccid':
ldap_info = get_user_info_from_ldap(ccid=string)
elif column == 'email':
ldap_info = get_user_info_from_ldap(email=string)
else:
ldap_info = get_user_info_from_ldap(nickname=string)
if len(ldap_info) == 0:
result = ()
else:
try:
name = ldap_info['displayName'][0]
except KeyError:
name = ""
try:
email = ldap_info['mail'][0]
except KeyError:
email = ""
try:
phone = ldap_info['telephoneNumber'][0]
except KeyError:
phone = ""
try:
address = ldap_info['physicalDeliveryOfficeName'][0]
except KeyError:
address = ""
try:
mailbox = ldap_info['postOfficeBox'][0]
except KeyError:
mailbox = ""
try:
ccid = ldap_info['employeeID'][0]
except KeyError:
ccid = ""
try:
db.new_borrower(ccid, name, email, phone,
address, mailbox, '')
except:
pass
result = db.search_borrower_by_ccid(int(ccid))
else:
if column == 'name':
result = db.search_borrower_by_name(string)
elif column == 'email':
result = db.search_borrower_by_email(string)
else:
result = db.search_borrower_by_id(string)
return result
def update_user_info_from_ldap(user_id):
from invenio.legacy.bibcirculation.cern_ldap import get_user_info_from_ldap
ccid = db.get_borrower_ccid(user_id)
ldap_info = get_user_info_from_ldap(ccid=ccid)
if not ldap_info:
result = ()
else:
try:
name = ldap_info['displayName'][0]
except KeyError:
name = ""
try:
email = ldap_info['mail'][0]
except KeyError:
email = ""
try:
phone = ldap_info['telephoneNumber'][0]
except KeyError:
phone = ""
try:
address = ldap_info['physicalDeliveryOfficeName'][0]
except KeyError:
address = ""
try:
mailbox = ldap_info['postOfficeBox'][0]
except KeyError:
mailbox = ""
db.update_borrower(user_id, name, email, phone, address, mailbox)
result = db.search_borrower_by_ccid(int(ccid))
return result
def get_book_cover(isbn):
"""
Retrieve book cover using Amazon web services.
@param isbn: book's isbn
@type isbn: string
@return book cover
"""
from xml.dom import minidom
# connect to AWS
"""cover_xml = BIBCIRCULATION_OPENER.open('http://ecs.amazonaws.com/onca/xml' \
'?Service=AWSECommerceService&AWSAccessKeyId=' \
+ CFG_BIBCIRCULATION_AMAZON_ACCESS_KEY + \
'&Operation=ItemSearch&Condition=All&' \
'ResponseGroup=Images&SearchIndex=Books&' \
'Keywords=' + isbn)"""
cover_xml=""
# parse XML
try:
xml_img = minidom.parse(cover_xml)
retrieve_book_cover = xml_img.getElementsByTagName('MediumImage')
book_cover = retrieve_book_cover.item(0).firstChild.firstChild.data
except:
book_cover = "%s/img/book_cover_placeholder.gif" % (CFG_SITE_URL)
return book_cover
def book_information_from_MARC(recid):
"""
Retrieve book's information from MARC
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@return tuple with title, year, author, isbn and editor.
"""
# FIXME do the same that book_title_from_MARC
book_title = book_title_from_MARC(recid)
book_year = ''.join(get_fieldvalues(recid, "260__c"))
author_tags = ['100__a', '700__a', '721__a']
book_author = ''
for tag in author_tags:
l = get_fieldvalues(recid, tag)
for c in l:
book_author += c + '; '
book_author = book_author[:-2]
l = get_fieldvalues(recid, "020__a")
book_isbn = ''
for isbn in l:
book_isbn += isbn + ', '
book_isbn = book_isbn[:-2]
book_editor = ', '.join(get_fieldvalues(recid, "260__a") + \
get_fieldvalues(recid, "260__b"))
return (book_title, book_year, book_author, book_isbn, book_editor)
def book_title_from_MARC(recid):
"""
Retrieve book's title from MARC
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@return book's title
"""
title_tags = get_field_tags('title')
book_title = ''
i = 0
while book_title == '' and i < len(title_tags):
l = get_fieldvalues(recid, title_tags[i])
for candidate in l:
book_title = book_title + candidate + ': '
i += 1
book_title = book_title[:-2]
return book_title
def update_status_if_expired(loan_id):
"""
Update the loan's status if status is 'expired'.
@param loan_id: identify the loan. Primary key of crcLOAN.
@type loan_id: int
"""
loan_status = db.get_loan_status(loan_id)
if loan_status == CFG_BIBCIRCULATION_LOAN_STATUS_EXPIRED:
db.update_loan_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN, loan_id)
return
def get_next_day(date_string):
"""
Get the next day
@param date_string: date
@type date_string: string
return next day
"""
# add 1 day
more_1_day = datetime.timedelta(days=1)
# convert date_string to datetime format
tmp_date = time.strptime(date_string, '%Y-%m-%d')
# calculate the new date (next day)
next_day = datetime.datetime(tmp_date[0], tmp_date[1], tmp_date[2]) \
+ more_1_day
return next_day
def generate_new_due_date(days):
"""
Generate a new due date (today + X days = new due date).
@param days: number of days
@type days: string
@return new due date
"""
today = datetime.date.today()
more_X_days = datetime.timedelta(days=days)
tmp_date = today + more_X_days
week_day = tmp_date.strftime('%A')
due_date = tmp_date.strftime('%Y-%m-%d')
due_date_validated = False
while not due_date_validated:
if week_day in CFG_BIBCIRCULATION_WORKING_DAYS \
and due_date not in CFG_BIBCIRCULATION_HOLIDAYS:
due_date_validated = True
else:
next_day = get_next_day(due_date)
due_date = next_day.strftime('%Y-%m-%d')
week_day = next_day.strftime('%A')
return due_date
def renew_loan_for_X_days(barcode):
"""
Renew a loan based on its loan period
@param barcode: identify the item. Primary key of crcITEM.
@type barcode: string
@return new due date
"""
loan_period = db.get_loan_period(barcode)
if loan_period == '4 weeks':
due_date = generate_new_due_date(30)
else:
due_date = generate_new_due_date(7)
return due_date
def make_copy_available(request_id):
"""
Change the status of a copy for
CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF when
an hold request was cancelled.
@param request_id: identify the request: Primary key of crcLOANREQUEST
@type request_id: int
"""
barcode_requested = db.get_requested_barcode(request_id)
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF, barcode_requested)
update_requests_statuses(barcode_requested)
def print_new_loan_information(req, ln=CFG_SITE_LANG):
"""
Create a printable format with the information of the last
loan who has been registered on the table crcLOAN.
"""
_ = gettext_set_language(ln)
# get the last loan from crcLOAN
(recid, borrower_id, due_date) = db.get_last_loan()
# get book's information
(book_title, book_year, book_author,
book_isbn, book_editor) = book_information_from_MARC(recid)
# get borrower's data/information (name, address, email)
(borrower_name, borrower_address,
borrower_mailbox, borrower_email) = db.get_borrower_data(borrower_id)
# Generate printable format
req.content_type = "text/html"
req.send_http_header()
out = """<table style='width:95%; margin:auto; max-width: 600px;'>"""
out += """
<tr>
<td><img src="%s/img/CERN_CDS_logo.png"></td>
</tr>
</table><br />""" % (CFG_SITE_URL)
out += """<table style='color: #79d; font-size: 82%; width:95%;
margin:auto; max-width: 400px;'>"""
out += """ <tr>
<td align="center">
<h2><strong>%s</strong></h2>
</td>
</tr>""" % (_("Loan information"))
out += """ <tr>
<td align="center"><strong>%s</strong></td>
</tr>""" % (_("This book has been sent to you:"))
out += """</table><br />"""
out += """<table style='color: #79d; font-size: 82%; width:95%;
margin:auto; max-width: 400px;'>"""
out += """ <tr>
<td width="70"><strong>%s</strong></td>
<td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td>
<td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td>
<td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td>
<td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td>
<td style='color: black;'>%s</td>
</tr>
""" % (_("Title"), book_title,
_("Author"), book_author,
_("Editor"), book_editor,
_("ISBN"), book_isbn,
_("Year"), book_year)
out += """</table><br />"""
out += """<table style='color: #79d; font-size: 82%; width:95%;
margin:auto; max-width: 400px;'>"""
out += """ <tr>
<td width="70"><strong>%s</strong></td>
<td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td>
<td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td>
<td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td>
<td style='color: black;'>%s</td>
</tr>
""" % (_("Name"), borrower_name,
_("Mailbox"), borrower_mailbox,
_("Address"), borrower_address,
_("Email"), borrower_email)
out += """</table>
<br />"""
out += """<table style='color: #79d; font-size: 82%; width:95%;
margin:auto; max-width: 400px;'>"""
out += """ <tr>
<td align="center"><h2><strong>%s: %s</strong></h2></td>
</tr>""" % (_("Due date"), due_date)
out += """</table>"""
out += """<table style='color: #79d; font-size: 82%; width:95%;
margin:auto; max-width: 800px;'>
<tr>
<td>
<input type="button" onClick='window.print()'
value='Print' style='color: #fff;
background: #36c; font-weight: bold;'>
</td>
</tr>
</table>
"""
req.write("<html>")
req.write(out)
req.write("</html>")
return "\n"
def print_pending_hold_requests_information(req, ln):
"""
Create a printable format with all the information about all
pending hold requests.
"""
_ = gettext_set_language(ln)
requests = db.get_pdf_request_data(CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING)
req.content_type = "text/html"
req.send_http_header()
out = """<table style='width:100%; margin:auto; max-width: 1024px;'>"""
out += """
<tr>
<td><img src="%s/img/CERN_CDS_logo.png"></td>
</tr>
</table><br />""" % (CFG_SITE_URL)
out += """<table style='color: #79d; font-size: 82%;
width:95%; margin:auto; max-width: 1024px;'>"""
out += """ <tr>
<td align="center"><h2><strong>%s</strong></h2></td>
</tr>""" % (_("List of pending hold requests"))
out += """ <tr>
<td align="center"><strong>%s</strong></td>
</tr>""" % (time.ctime())
out += """</table><br/>"""
out += """<table style='color: #79d; font-size: 82%;
width:95%; margin:auto; max-width: 1024px;'>"""
out += """<tr>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
</tr>
""" % (_("Borrower"),
_("Item"),
_("Library"),
_("Location"),
_("From"),
_("To"),
_("Request date"))
for (recid, borrower_name, library_name, location,
date_from, date_to, request_date) in requests:
out += """<tr style='color: black;'>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
</tr>
""" % (borrower_name, book_title_from_MARC(recid),
library_name, location, date_from, date_to,
request_date)
out += """</table>
<br />
<br />
<table style='color: #79d; font-size: 82%;
width:95%; margin:auto; max-width: 1024px;'>
<tr>
<td>
<input type=button value='Back' onClick="history.go(-1)"
style='color: #fff; background: #36c;
font-weight: bold;'>
<input type="button" onClick='window.print()'
value='Print' style='color: #fff;
background: #36c; font-weight: bold;'>
</td>
</tr>
</table>"""
req.write("<html>")
req.write(out)
req.write("</html>")
return "\n"
def get_item_info_for_search_result(recid):
"""
Get the item's info from MARC in order to create a
search result with more details
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@return book's informations (author, editor and number of copies)
"""
book_author = ' '.join(get_fieldvalues(recid, "100__a") + \
get_fieldvalues(recid, "100__u"))
book_editor = ' , '.join(get_fieldvalues(recid, "260__a") + \
get_fieldvalues(recid, "260__b") + \
get_fieldvalues(recid, "260__c"))
book_copies = ' '.join(get_fieldvalues(recid, "964__a"))
book_infos = (book_author, book_editor, book_copies)
return book_infos
def update_request_data(request_id):
"""
Update the status of a given request.
@param request_id: identify the request: Primary key of crcLOANREQUEST
@type request_id: int
"""
barcode = db.get_request_barcode(request_id)
is_on_loan = db.is_item_on_loan(barcode)
if is_on_loan is not None:
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN, barcode)
else:
db.update_item_status(CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF, barcode)
update_requests_statuses(barcode)
return True
def compare_dates(date):
"""
Compare given date with today
@param date: given date
@type date: string
@return boolean
"""
if date < time.strftime("%Y-%m-%d"):
return False
else:
return True
def validate_date_format(date):
"""
Verify the date format
@param date: given date
@type date: string
@return boolean
"""
try:
if time.strptime(date, "%Y-%m-%d"):
if compare_dates(date):
return True
else:
return False
except ValueError:
return False
def create_ill_record(book_info):
"""
Create a new ILL record
@param book_info: book's information
@type book_info: tuple
@return MARC record
"""
(title, author, place, publisher, year, edition, isbn) = book_info
ill_record = """
<record>
<datafield tag="020" ind1=" " ind2=" ">
<subfield code="a">%(isbn)s</subfield>
</datafield>
<datafield tag="100" ind1=" " ind2=" ">
<subfield code="a">%(author)s</subfield>
</datafield>
<datafield tag="245" ind1=" " ind2=" ">
<subfield code="a">%(title)s</subfield>
</datafield>
<datafield tag="250" ind1=" " ind2=" ">
<subfield code="a">%(edition)s</subfield>
</datafield>
<datafield tag="260" ind1=" " ind2=" ">
<subfield code="a">%(place)s</subfield>
<subfield code="b">%(publisher)s</subfield>
<subfield code="c">%(year)s</subfield>
</datafield>
<datafield tag="980" ind1=" " ind2=" ">
<subfield code="a">ILLBOOK</subfield>
</datafield>
</record>
""" % {'isbn': encode_for_xml(isbn),
'author': encode_for_xml(author),
'title': encode_for_xml(title),
'edition': encode_for_xml(edition),
'place': encode_for_xml(place),
'publisher': encode_for_xml(publisher),
'year': encode_for_xml(year)}
file_path = '%s/%s_%s.xml' % (CFG_TMPDIR, 'bibcirculation_ill_book',
time.strftime("%Y%m%d_%H%M%S"))
xml_file = open(file_path, 'w')
xml_file.write(ill_record)
xml_file.close()
# Pass XML file to BibUpload.
task_low_level_submission('bibupload', 'bibcirculation',
'-P', '5', '-i', file_path)
return ill_record
def wash_recid_from_ILL_request(ill_request_id):
"""
Get dictionnary and wash recid values.
@param ill_request_id: identify the ILL request. Primray key of crcILLREQUEST
@type ill_request_id: int
@return recid
"""
book_info = db.get_ill_book_info(ill_request_id)
if looks_like_dictionary(book_info):
book_info = eval(book_info)
else:
book_info = None
try:
recid = int(book_info['recid'])
except KeyError:
recid = None
return recid
def all_copies_are_missing(recid):
"""
Verify if all copies of an item are missing
@param recid: identify the record. Primary key of bibrec
@type recid: int
@return boolean
"""
copies_status = db.get_copies_status(recid)
number_of_missing = 0
if copies_status == None:
return True
else:
for (status) in copies_status:
if status == 'missing':
number_of_missing += 1
if number_of_missing == len(copies_status):
return True
else:
return False
#def has_copies(recid):
# """
# Verify if a recid is item (has copies)
#
# @param recid: identify the record. Primary key of bibrec
# @type recid: int
#
# @return boolean
# """
#
# copies_status = db.get_copies_status(recid)
#
# if copies_status is None:
# return False
# else:
# if len(copies_status) == 0:
# return False
# else:
# return True
def generate_email_body(template, loan_id, ill=0):
"""
Generate the body of an email for loan recalls.
@param template: email template
@type template: string
@param loan_id: identify the loan. Primary key of crcLOAN.
@type loan_id: int
@return email(body)
"""
if ill:
# Inter library loan.
out = template
else:
recid = db.get_loan_recid(loan_id)
(book_title, book_year, book_author,
book_isbn, book_editor) = book_information_from_MARC(int(recid))
out = template % (book_title, book_year, book_author,
book_isbn, book_editor)
return out
def create_item_details_url(recid, ln):
url = '/admin2/bibcirculation/get_item_details?ln=%s&recid=%s' % (ln,
str(recid))
return CFG_SITE_URL + url
def tag_all_requests_as_done(barcode, user_id):
recid = db.get_id_bibrec(barcode)
description = db.get_item_description(barcode)
list_of_barcodes = db.get_barcodes(recid, description)
for bc in list_of_barcodes:
db.tag_requests_as_done(user_id, bc)
def update_requests_statuses(barcode):
recid = db.get_id_bibrec(barcode)
description = db.get_item_description(barcode)
list_of_pending_requests = db.get_requests(recid, description,
CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING)
some_copy_available = False
copies_status = db.get_copies_status(recid, description)
if copies_status is not None:
for status in copies_status:
if status in (CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF,
CFG_BIBCIRCULATION_ITEM_STATUS_IN_PROCESS):
some_copy_available = True
if len(list_of_pending_requests) == 1:
if not some_copy_available:
db.update_loan_request_status(CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING,
list_of_pending_requests[0][0])
else:
return list_of_pending_requests[0][0]
elif len(list_of_pending_requests) == 0:
if some_copy_available:
list_of_waiting_requests = db.get_requests(recid, description,
CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING)
if len(list_of_waiting_requests) > 0:
db.update_loan_request_status(CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING,
list_of_waiting_requests[0][0])
return list_of_waiting_requests[0][0]
elif len(list_of_pending_requests) > 1:
for request in list_of_pending_requests:
db.update_loan_request_status(CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING,
request[0])
list_of_waiting_requests = db.get_requests(recid, description,
CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING)
if some_copy_available:
db.update_loan_request_status(CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING,
list_of_waiting_requests[0][0])
return list_of_waiting_requests[0][0]
return None
def is_periodical(recid):
rec_type = get_fieldvalues(recid, "690C_a")
if len(rec_type) > 0:
for value in rec_type:
if value == 'PERI':
return True
return False
def has_date_format(date):
if type(date) is not str:
return False
date = date.strip()
if len(date) is not 10:
return False
elif date[4] is not '-' and date[7] is not '-':
return False
else:
year = date[:4]
month = date[5:7]
day = date[8:]
return year.isdigit() and month.isdigit() and day.isdigit()
def generate_tmp_barcode():
tmp_barcode = 'tmp-' + str(random.random())[-8:]
while(db.barcode_in_use(tmp_barcode)):
tmp_barcode = 'tmp-' + str(random.random())[-8:]
return tmp_barcode
def check_database():
from invenio.legacy.dbquery import run_sql
r1 = run_sql(""" SELECT it.barcode, it.status, ln.status
FROM crcITEM it, crcLOAN ln
WHERE ln.barcode=it.barcode
AND it.status=%s
AND ln.status!=%s
AND ln.status!=%s
AND ln.status!=%s
""", (CFG_BIBCIRCULATION_ITEM_STATUS_ON_LOAN,
CFG_BIBCIRCULATION_LOAN_STATUS_ON_LOAN,
CFG_BIBCIRCULATION_LOAN_STATUS_EXPIRED,
CFG_BIBCIRCULATION_LOAN_STATUS_RETURNED))
r2 = run_sql(""" SELECT it.barcode
FROM crcITEM it, crcLOAN ln
WHERE ln.barcode=it.barcode
AND it.status=%s
AND (ln.status=%s or ln.status=%s)
""", (CFG_BIBCIRCULATION_ITEM_STATUS_ON_SHELF,
CFG_BIBCIRCULATION_LOAN_STATUS_ON_LOAN,
CFG_BIBCIRCULATION_LOAN_STATUS_EXPIRED))
r3 = run_sql(""" SELECT l1.barcode, l1.id,
DATE_FORMAT(l1.loaned_on,'%%Y-%%m-%%d %%H:%%i:%%s'),
DATE_FORMAT(l2.loaned_on,'%%Y-%%m-%%d %%H:%%i:%%s')
FROM crcLOAN l1,
crcLOAN l2
WHERE l1.id!=l2.id
AND l1.status!=%s
AND l1.status=l2.status
AND l1.barcode=l2.barcode
ORDER BY l1.loaned_on
""", (CFG_BIBCIRCULATION_LOAN_STATUS_RETURNED, ))
r4 = run_sql(""" SELECT id, id_crcBORROWER, barcode,
due_date, number_of_renewals
FROM crcLOAN
WHERE status=%s
AND due_date>NOW()
""", (CFG_BIBCIRCULATION_LOAN_STATUS_EXPIRED, ))
return (len(r1), len(r2), len(r3), len(r4))
def looks_like_dictionary(candidate_string):
if re.match(DICC_REGEXP, candidate_string):
return True
else:
return False
|
gpl-2.0
|
pchavanne/yatt
|
tests/test_ticker.py
|
1
|
5056
|
import datetime
from yatt import BASE_CURRENCY
from yatt.ticker import aapl, agg, amzn, goog, msft, spy, eurusd, eurgbp, eurchf
from yatt.ticker import Ticker, Stock, Index, Future, Fx
from yatt.ticker import Tickers
timestamp = datetime.datetime(2000, 0o1, 0o1)
def test_ticker():
ticker = Ticker(symbol='AAPL', currency='USD')
assert ticker.symbol == 'AAPL'
assert ticker.name == 'AAPL'
assert ticker.currency == 'USD'
assert ticker.slippage == 0
assert ticker.commission == 0
assert ticker.last_timestamp is None
assert ticker.last_value is None
ticker = Ticker(symbol='AAPL', currency='USD', name='apple', slippage=0.5, commission=1.5, last_timestamp=timestamp, last_value=130)
assert ticker.symbol == 'AAPL'
assert ticker.name == 'apple'
assert ticker.currency == 'USD'
assert ticker.slippage == 0.5
assert ticker.commission == 1.5
assert ticker.last_timestamp == timestamp
assert ticker.last_value == 130
assert str(ticker) == 'AAPL'
assert ticker.__repr__() == 'Ticker AAPL'
ticker2 = Ticker(symbol='AAPL', currency='USD', slippage=0.5, commission=1.5, last_timestamp=timestamp, last_value=150)
assert ticker == ticker2
ticker3 = Ticker(symbol='AAPL', currency='EUR')
assert ticker != ticker3
def test_stock():
assert issubclass(Stock, Ticker)
stock = Stock(symbol='TSLA', currency='USD')
assert stock.symbol == 'TSLA'
assert stock.currency == 'USD'
assert stock.repo == 0
assert stock.dividend is None
assert isinstance(aapl, Stock)
assert aapl.symbol == 'AAPL'
assert aapl.currency == 'USD'
assert isinstance(agg, Stock)
assert agg.symbol == 'AGG'
assert agg.currency == 'USD'
assert isinstance(amzn, Stock)
assert amzn.symbol == 'AMZN'
assert amzn.currency == 'USD'
assert isinstance(goog, Stock)
assert goog.symbol == 'GOOG'
assert goog.currency == 'USD'
assert isinstance(msft, Stock)
assert msft.symbol == 'MSFT'
assert msft.currency == 'USD'
def test_index():
assert issubclass(Index, Ticker)
index = Index(symbol='SX5E', currency='EUR')
assert index.symbol == 'SX5E'
assert index.currency == 'EUR'
assert isinstance(spy, Index)
assert spy.symbol == 'SPY'
assert spy.currency == 'USD'
def test_future():
assert issubclass(Future, Ticker)
future = Future(symbol='SX5E', currency='EUR', maturity=timestamp, multiplier=10)
assert future.symbol == 'SX5E'
assert future.currency == 'EUR'
assert future.maturity == timestamp
assert future.multiplier == 10
future2 = Future(symbol='SX5E', currency='EUR', maturity=timestamp, multiplier=10)
assert future2 == future
future.maturity = datetime.datetime(2001, 1, 1)
assert future2 != future
future2 = Future(symbol='SX5E', currency='EUR', maturity=timestamp, multiplier=20)
assert future2 != future
def test_fx():
assert issubclass(Fx, Ticker)
fx = Fx(symbol='USDJPY', currency='JPY')
assert fx.symbol == 'USDJPY'
assert fx.currency == 'JPY'
assert isinstance(eurusd, Fx)
assert eurusd.symbol == 'EURUSD'
assert eurusd.currency == 'USD'
assert isinstance(eurgbp, Fx)
assert eurgbp.symbol == 'EURGBP'
assert eurgbp.currency == 'GBP'
assert isinstance(eurchf, Fx)
assert eurchf.symbol == 'EURCHF'
assert eurchf.currency == 'CHF'
def test_tickers():
bnp = Stock(symbol='BNP', currency='EUR', last_timestamp=timestamp, last_value=60)
smi = Index(symbol='SMI', currency='CHF', last_timestamp=timestamp, last_value=9000)
aapl.last_timestamp = timestamp
aapl.last_value = 130
assert issubclass(Tickers, list)
tickers = Tickers()
assert tickers == []
assert tickers.base_currency == BASE_CURRENCY
assert tickers.fx == []
tickers.append(bnp)
assert tickers == [bnp]
assert tickers.fx == []
tickers.append(aapl)
assert tickers == [bnp, aapl]
assert tickers.fx == [eurusd]
assert tickers == [bnp, aapl]
tickers.append(smi)
assert tickers.fx == [eurusd, eurchf]
tickers = Tickers(tickers_list=aapl)
assert tickers == [aapl]
assert tickers.fx == [eurusd]
tickers = Tickers(tickers_list=[bnp, aapl, smi])
assert tickers == [bnp, aapl, smi]
assert tickers.fx == [eurusd, eurchf]
assert tickers.ticker_from_symbol('AAPL') == aapl
assert tickers.ticker_from_symbol('SMI') == smi
assert tickers.ticker_from_symbol('EURUSD') == eurusd
assert tickers.all_tickers == [bnp, aapl, smi, eurusd, eurchf]
assert not tickers.is_synchronized
eurusd2 = tickers.ticker_from_symbol('EURUSD')
eurusd2.last_timestamp = timestamp
eurusd2.last_value = 1.13
eurchf2 = tickers.ticker_from_symbol('EURCHF')
eurchf2.last_timestamp = timestamp
eurchf2.last_value = 1.09
assert tickers.is_synchronized
assert tickers.snapshot == {'AAPL': 130, 'BNP': 60, 'EURCHF': 1.09, 'EURUSD': 1.13, 'SMI': 9000,
'timestamp': datetime.datetime(2000, 1, 1, 0, 0)}
|
mit
|
nightpool/CORE-Scouting-Server
|
views/commit.py
|
2
|
2391
|
from werkzeug import exceptions as ex
import simplejson as json
import flask
import wtforms_me
import wtforms.fields
import model.commit
import config
"""api used for submitting commits"""
blueprint = flask.Blueprint("commits", __name__, url_prefix="/commit")
MatchForm = wtforms_me.model_form(model.commit.MatchCommit)
MatchForm.event = wtforms.fields.HiddenField(**MatchForm.event.kwargs)
@blueprint.route('/submit', methods=["GET","POST"])
def submit_commit():
form = MatchForm(flask.request.form)
if flask.request.method == "POST" and form.validate():
form.save()
flask.flash('Thanks for your submission! <a href="/commit/{}">Edit it —></a>'
.format(form.instance.key))
return flask.redirect("/commit/submit")
return flask.render_template('commit_submit.html', form=form, type="match")
@blueprint.route('/', methods=["GET","POST"])
def commit_search():
query = {"event":config.event, "match_type":'q'}
errors = []
if flask.request.method == "POST" and flask.request.form.get("query", None):
try:
query = json.loads(flask.request.form.get("query"))
except Exception, e:
print e
errors.append(e.message)
objects = []
try:
objects = list(model.commit.MatchCommit.objects(**query).order_by("-time"))
except Exception, e:
print e
errors.append(e.message)
return flask.render_template("commit_search.html", objects=objects,
query=json.dumps(query), errors=errors)
@blueprint.route('/<cid>', methods=["GET","POST"])
def get_commit(cid):
try:
c = model.commit.get_commit(cid)
e_key, match_type, match_num, team = model.commit.parse_cid(cid)
obj = {"event":e_key, "match_type":match_type, "match_num": match_num, "team": team}
print c
form = MatchForm(flask.request.form, instance=c) if c else MatchForm(flask.request.form, **obj)
except ValueError:
raise ex.BadRequest("Commit id %s malformatted." % cid)
if flask.request.method == "POST" and form.validate():
if c: print c; c.delete()
form = MatchForm(flask.request.form)
form.save()
flask.flash("Thanks for your submission!")
return flask.redirect("/commit/submit")
return flask.render_template("commit_submit.html", form=form, type="match", furl=flask.request.url)
|
gpl-3.0
|
giavac/tadhack_paris_2015
|
send_email.py
|
1
|
4640
|
#!/usr/bin/env python
import speech_recognition as sr
import sys
import smtplib
from os.path import basename
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import COMMASPACE, formatdate
import subprocess
import syslog
BASE_URL = 'https:// WEB PAGE IP ADDRESS HERE :8090'
#debug = True
debug = False
disable_transcription = False
#disable_transcription = True
send_from = ' FROM EMAIL ADDRESS HERE '
send_to = ' TO EMAIL ADDRESS HERE '
syslog.syslog('Processing started')
def send_mail(send_from, send_to, subject, text, files=None, server="127.0.0.1"):
assert isinstance(send_to, list)
print("Subject: " + subject)
msg = MIMEMultipart()
msg['Subject'] = subject
msg['From'] = send_from
msg['To'] = COMMASPACE.join(send_to)
msg['Date'] = formatdate(localtime=True)
msg.attach(MIMEText(text, 'html'))
for f in files or []:
with open(f, "rb") as fil:
msg.attach(MIMEApplication(
fil.read(),
Content_Disposition='attachment; filename="%s"' % basename(f),
Name=basename(f)
))
smtp = smtplib.SMTP(server)
smtp.sendmail(send_from, send_to, msg.as_string())
smtp.close()
#########################################
def transcript(WAV_FILE):
r = sr.Recognizer()
with sr.WavFile(WAV_FILE) as source:
audio = r.record(source) # read the entire WAV file
try:
return r.recognize_google(audio)
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
return ""
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
return ""
##############################################
video_file = sys.argv[1]
print("Processing video file: " + video_file)
#/tmp/kurento/alice_bob_1449919920.webm
tmp = video_file[13:-5]
pieces = tmp.split('_')
syslog.syslog("Pieces length: " + str(len(pieces)))
callee = pieces[0]
syslog.syslog("CallEE: " + callee)
caller = pieces[1]
syslog.syslog("CallER: " + caller)
timestamp = pieces[2]
syslog.syslog("TIMESTAMP: " + timestamp)
callmebackgsmno = ""
if (len(pieces) >= 4):
callmebackgsmno = pieces[3]
syslog.syslog("Callmeback GSM no: " + callmebackgsmno)
extracted_audio_file_base = callee + '_' + caller + '_' + timestamp
command_to_execute = 'ffmpeg -i ' + video_file + ' -vn -acodec copy ' + extracted_audio_file_base + '.ogg'
print("Preparing to run command: " + command_to_execute)
syslog.syslog("Preparing to run command: " + command_to_execute)
if not debug:
subprocess.call(command_to_execute, shell=True)
print("Extracted!")
syslog.syslog("Extracted!")
command_to_execute = 'ffmpeg -i ' + extracted_audio_file_base + '.ogg ' + extracted_audio_file_base + '.wav'
print("Preparing to run command: " + command_to_execute)
syslog.syslog("Preparing to run command: " + command_to_execute)
if not debug:
subprocess.call(command_to_execute, shell=True)
extracted_audio_file = extracted_audio_file_base + '.wav'
print("Extracted: " + extracted_audio_file)
files = [extracted_audio_file]
print("Transcribing..........")
syslog.syslog("Transcribing..........")
transcription = ""
if (not disable_transcription):
#transcription = transcript("man1_nb.wav")
transcription = transcript(extracted_audio_file)
syslog.syslog("Transcription: " + transcription)
video_file = callee + '_' + caller + '_' + timestamp
if (callmebackgsmno):
video_file += '_' + callmebackgsmno
video_file += '.webm'
video_url = BASE_URL + '/play.html?file_uri=' + video_file
transcription_sentence = ""
if (transcription):
transcription_sentence = "It says: <br>" + transcription + "<br>"
callmeback_sentence = ""
if (callmebackgsmno):
callmeback_url = BASE_URL + '/call.html?number=' + callmebackgsmno + '&name=' + caller
callmeback_sentence = "<a href='" + callmeback_url + "'>Please call me back at +" + callmebackgsmno + "</a><br>"
body = """ \
<html>
<head></head>
<body>
<p>Hi %s!<br>
Here is a <a href="%s">videomessage</a> from %s.<br>
%s <br>
%s <br>
Yours,<br>
The Cool Voicemail<br>
</p>
</body>
</html>
""" % (callee, video_url, caller, transcription_sentence, callmeback_sentence)
subject = 'Videomessage from ' + caller
print "sending email now... with body: " + body
syslog.syslog("sending email now... with body: " + body)
if not debug:
send_mail(send_from, send_to, subject, body, files)
print("DONE.")
|
mit
|
kuiwei/kuiwei
|
lms/djangoapps/instructor/tests/test_ecommerce.py
|
12
|
15076
|
"""
Unit tests for Ecommerce feature flag in new instructor dashboard.
"""
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from courseware.tests.tests import TEST_DATA_MONGO_MODULESTORE
from student.tests.factories import AdminFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from course_modes.models import CourseMode
from shoppingcart.models import Coupon, PaidCourseRegistration, CourseRegistrationCode
from mock import patch
from student.roles import CourseFinanceAdminRole
@override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE)
class TestECommerceDashboardViews(ModuleStoreTestCase):
"""
Check for E-commerce view on the new instructor dashboard
"""
def setUp(self):
self.course = CourseFactory.create()
# Create instructor account
self.instructor = AdminFactory.create()
self.client.login(username=self.instructor.username, password="test")
mode = CourseMode(
course_id=self.course.id.to_deprecated_string(), mode_slug='honor',
mode_display_name='honor', min_price=10, currency='usd'
)
mode.save()
# URL for instructor dash
self.url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
self.e_commerce_link = '<a href="" data-section="e-commerce">E-Commerce</a>'
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
def tearDown(self):
"""
Undo all patches.
"""
patch.stopall()
def test_pass_e_commerce_tab_in_instructor_dashboard(self):
"""
Test Pass E-commerce Tab is in the Instructor Dashboard
"""
response = self.client.get(self.url)
self.assertTrue(self.e_commerce_link in response.content)
def test_user_has_finance_admin_rights_in_e_commerce_tab(self):
response = self.client.get(self.url)
self.assertTrue(self.e_commerce_link in response.content)
# Total amount html should render in e-commerce page, total amount will be 0
total_amount = PaidCourseRegistration.get_total_amount_of_purchased_item(self.course.id)
self.assertTrue('<span>Total Amount: <span>$' + str(total_amount) + '</span></span>' in response.content)
self.assertTrue('Download All e-Commerce Purchase' in response.content)
# removing the course finance_admin role of login user
CourseFinanceAdminRole(self.course.id).remove_users(self.instructor)
# total amount should not be visible in e-commerce page if the user is not finance admin
url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url)
total_amount = PaidCourseRegistration.get_total_amount_of_purchased_item(self.course.id)
self.assertFalse('Download All e-Commerce Purchase' in response.content)
self.assertFalse('<span>Total Amount: <span>$' + str(total_amount) + '</span></span>' in response.content)
def test_user_view_course_price(self):
"""
test to check if the user views the set price button and price in
the instructor dashboard
"""
response = self.client.get(self.url)
self.assertTrue(self.e_commerce_link in response.content)
# Total amount html should render in e-commerce page, total amount will be 0
course_honor_mode = CourseMode.mode_for_course(self.course.id, 'honor')
price = course_honor_mode.min_price
self.assertTrue('Course Price: <span>$' + str(price) + '</span>' in response.content)
self.assertFalse('+ Set Price</a></span>' in response.content)
# removing the course finance_admin role of login user
CourseFinanceAdminRole(self.course.id).remove_users(self.instructor)
# total amount should not be visible in e-commerce page if the user is not finance admin
url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url)
self.assertFalse('+ Set Price</a></span>' in response.content)
def test_update_course_price_check(self):
price = 200
# course B
course2 = CourseFactory.create(org='EDX', display_name='test_course', number='100')
mode = CourseMode(
course_id=course2.id.to_deprecated_string(), mode_slug='honor',
mode_display_name='honor', min_price=30, currency='usd'
)
mode.save()
# course A update
CourseMode.objects.filter(course_id=self.course.id).update(min_price=price)
set_course_price_url = reverse('set_course_mode_price', kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {'course_price': price, 'currency': 'usd'}
response = self.client.post(set_course_price_url, data)
self.assertTrue('CourseMode price updated successfully' in response.content)
# Course A updated total amount should be visible in e-commerce page if the user is finance admin
url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url)
self.assertTrue('Course Price: <span>$' + str(price) + '</span>' in response.content)
def test_user_admin_set_course_price(self):
"""
test to set the course price related functionality.
test al the scenarios for setting a new course price
"""
set_course_price_url = reverse('set_course_mode_price', kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {'course_price': '12%', 'currency': 'usd'}
# Value Error course price should be a numeric value
response = self.client.post(set_course_price_url, data)
self.assertTrue("Please Enter the numeric value for the course price" in response.content)
# validation check passes and course price is successfully added
data['course_price'] = 100
response = self.client.post(set_course_price_url, data)
self.assertTrue("CourseMode price updated successfully" in response.content)
course_honor_mode = CourseMode.objects.get(mode_slug='honor')
course_honor_mode.delete()
# Course Mode not exist with mode slug honor
response = self.client.post(set_course_price_url, data)
self.assertTrue("CourseMode with the mode slug({mode_slug}) DoesNotExist".format(mode_slug='honor') in response.content)
def test_add_coupon(self):
"""
Test Add Coupon Scenarios. Handle all the HttpResponses return by add_coupon view
"""
# URL for add_coupon
add_coupon_url = reverse('add_coupon', kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {
'code': 'A2314', 'course_id': self.course.id.to_deprecated_string(),
'description': 'ADSADASDSAD', 'created_by': self.instructor, 'discount': 5
}
response = self.client.post(add_coupon_url, data)
self.assertTrue("coupon with the coupon code ({code}) added successfully".format(code=data['code']) in response.content)
data = {
'code': 'A2314', 'course_id': self.course.id.to_deprecated_string(),
'description': 'asdsasda', 'created_by': self.instructor, 'discount': 99
}
response = self.client.post(add_coupon_url, data)
self.assertTrue("coupon with the coupon code ({code}) already exist".format(code='A2314') in response.content)
response = self.client.post(self.url)
self.assertTrue('<td>ADSADASDSAD</td>' in response.content)
self.assertTrue('<td>A2314</td>' in response.content)
self.assertFalse('<td>111</td>' in response.content)
data = {
'code': 'A2345314', 'course_id': self.course.id.to_deprecated_string(),
'description': 'asdsasda', 'created_by': self.instructor, 'discount': 199
}
response = self.client.post(add_coupon_url, data)
self.assertTrue("Please Enter the Coupon Discount Value Less than or Equal to 100" in response.content)
data['discount'] = '25%'
response = self.client.post(add_coupon_url, data=data)
self.assertTrue('Please Enter the Integer Value for Coupon Discount' in response.content)
course_registration = CourseRegistrationCode(
code='Vs23Ws4j', course_id=self.course.id.to_deprecated_string(),
transaction_group_name='Test Group', created_by=self.instructor
)
course_registration.save()
data['code'] = 'Vs23Ws4j'
response = self.client.post(add_coupon_url, data)
self.assertTrue("The code ({code}) that you have tried to define is already in use as a registration code"
.format(code=data['code']) in response.content)
def test_delete_coupon(self):
"""
Test Delete Coupon Scenarios. Handle all the HttpResponses return by remove_coupon view
"""
coupon = Coupon(
code='AS452', description='asdsadsa', course_id=self.course.id.to_deprecated_string(),
percentage_discount=10, created_by=self.instructor
)
coupon.save()
response = self.client.post(self.url)
self.assertTrue('<td>AS452</td>' in response.content)
# URL for remove_coupon
delete_coupon_url = reverse('remove_coupon', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(delete_coupon_url, {'id': coupon.id})
self.assertTrue('coupon with the coupon id ({coupon_id}) updated successfully'.format(coupon_id=coupon.id) in response.content)
coupon.is_active = False
coupon.save()
response = self.client.post(delete_coupon_url, {'id': coupon.id})
self.assertTrue('coupon with the coupon id ({coupon_id}) is already inactive'.format(coupon_id=coupon.id) in response.content)
response = self.client.post(delete_coupon_url, {'id': 24454})
self.assertTrue('coupon with the coupon id ({coupon_id}) DoesNotExist'.format(coupon_id=24454) in response.content)
response = self.client.post(delete_coupon_url, {'id': ''})
self.assertTrue('coupon id is None' in response.content)
def test_get_coupon_info(self):
"""
Test Edit Coupon Info Scenarios. Handle all the HttpResponses return by edit_coupon_info view
"""
coupon = Coupon(
code='AS452', description='asdsadsa', course_id=self.course.id.to_deprecated_string(),
percentage_discount=10, created_by=self.instructor
)
coupon.save()
# URL for edit_coupon_info
edit_url = reverse('get_coupon_info', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(edit_url, {'id': coupon.id})
self.assertTrue('coupon with the coupon id ({coupon_id}) updated successfully'.format(coupon_id=coupon.id) in response.content)
response = self.client.post(edit_url, {'id': 444444})
self.assertTrue('coupon with the coupon id ({coupon_id}) DoesNotExist'.format(coupon_id=444444) in response.content)
response = self.client.post(edit_url, {'id': ''})
self.assertTrue('coupon id not found"' in response.content)
coupon.is_active = False
coupon.save()
response = self.client.post(edit_url, {'id': coupon.id})
self.assertTrue("coupon with the coupon id ({coupon_id}) is already inactive".format(coupon_id=coupon.id) in response.content)
def test_update_coupon(self):
"""
Test Update Coupon Info Scenarios. Handle all the HttpResponses return by update_coupon view
"""
coupon = Coupon(
code='AS452', description='asdsadsa', course_id=self.course.id.to_deprecated_string(),
percentage_discount=10, created_by=self.instructor
)
coupon.save()
response = self.client.post(self.url)
self.assertTrue('<td>AS452</td>' in response.content)
data = {
'coupon_id': coupon.id, 'code': 'update_code', 'discount': '12',
'course_id': coupon.course_id.to_deprecated_string()
}
# URL for update_coupon
update_coupon_url = reverse('update_coupon', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(update_coupon_url, data=data)
self.assertTrue('coupon with the coupon id ({coupon_id}) updated Successfully'.format(coupon_id=coupon.id)in response.content)
response = self.client.post(self.url)
self.assertTrue('<td>update_code</td>' in response.content)
self.assertTrue('<td>12</td>' in response.content)
data['coupon_id'] = 1000 # Coupon Not Exist with this ID
response = self.client.post(update_coupon_url, data=data)
self.assertTrue('coupon with the coupon id ({coupon_id}) DoesNotExist'.format(coupon_id=1000) in response.content)
data['coupon_id'] = coupon.id
data['discount'] = 123
response = self.client.post(update_coupon_url, data=data)
self.assertTrue('Please Enter the Coupon Discount Value Less than or Equal to 100' in response.content)
data['discount'] = '25%'
response = self.client.post(update_coupon_url, data=data)
self.assertTrue('Please Enter the Integer Value for Coupon Discount' in response.content)
data['coupon_id'] = '' # Coupon id is not provided
response = self.client.post(update_coupon_url, data=data)
self.assertTrue('coupon id not found' in response.content)
coupon1 = Coupon(
code='11111', description='coupon', course_id=self.course.id.to_deprecated_string(),
percentage_discount=20, created_by=self.instructor
)
coupon1.save()
data = {'coupon_id': coupon.id, 'code': '11111', 'discount': '12'} # pylint: disable=E1101
response = self.client.post(update_coupon_url, data=data)
self.assertTrue('coupon with the coupon id ({coupon_id}) already exist'.format(coupon_id=coupon.id) in response.content) # pylint: disable=E1101
course_registration = CourseRegistrationCode(
code='Vs23Ws4j', course_id=self.course.id.to_deprecated_string(),
transaction_group_name='Test Group', created_by=self.instructor
)
course_registration.save()
data = {'coupon_id': coupon.id, 'code': 'Vs23Ws4j', # pylint: disable=E1101
'discount': '6', 'course_id': coupon.course_id.to_deprecated_string()} # pylint: disable=E1101
response = self.client.post(update_coupon_url, data=data)
self.assertTrue("The code ({code}) that you have tried to define is already in use as a registration code".
format(code=data['code']) in response.content)
|
agpl-3.0
|
rail/treeherder
|
treeherder/autoclassify/management/commands/autoclassify.py
|
2
|
2433
|
import logging
from collections import defaultdict
from django.core.management.base import BaseCommand, CommandError
from treeherder.autoclassify import matchers
from treeherder.model.models import FailureLine, Matcher, FailureMatch
logger = logging.getLogger(__name__)
# The minimum goodness of match we need to mark a particular match as the best match
AUTOCLASSIFY_CUTOFF_RATIO = 0.8
# Initialisation needed to associate matcher functions with the matcher objects
matchers.register()
class Command(BaseCommand):
args = '<job_guid>, <repository>'
help = 'Mark failures on a job.'
def handle(self, *args, **options):
if not len(args) == 2:
raise CommandError('3 arguments required, %s given' % len(args))
job_id, repository = args
match_errors(repository, job_id)
def match_errors(repository, job_guid):
unmatched_failures = FailureLine.objects.unmatched_for_job(repository, job_guid)
if not unmatched_failures:
return
all_matched = set()
for matcher in Matcher.objects.registered_matchers():
matches = matcher(unmatched_failures)
for match in matches:
match.failure_line.matches.add(
FailureMatch(score=match.score,
matcher=matcher.db_object,
classified_failure=match.classified_failure))
match.failure_line.save()
logger.info("Matched failure %i with intermittent %i" %
(match.failure_line.id, match.classified_failure.id))
all_matched.add(match.failure_line)
if all_lines_matched(unmatched_failures):
break
for failure_line in all_matched:
# TODO: store all matches
best_match = failure_line.best_match(AUTOCLASSIFY_CUTOFF_RATIO)
if best_match:
best_match.is_best = True
best_match.save()
def all_lines_matched(failure_lines):
failure_score_dict = defaultdict(list)
query = FailureMatch.objects.filter(
failure_line__in=failure_lines).only('failure_line_id', 'score')
for failure_match in query:
failure_score_dict[failure_match.failure_line_id].append(failure_match.score)
for failure_line in failure_lines:
scores = failure_score_dict[failure_line.id]
if not scores or not all(score >= 1 for score in scores):
return False
return True
|
mpl-2.0
|
SummerLW/Perf-Insight-Report
|
telemetry/telemetry/internal/util/file_handle.py
|
35
|
1976
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
_next_file_id = 0
class FileHandle(object):
def __init__(self, temp_file=None, absolute_path=None):
"""Constructs a FileHandle object.
This constructor should not be used by the user; rather it is preferred to
use the module-level GetAbsPath and FromTempFile functions.
Args:
temp_file: An instance of a temporary file object.
absolute_path: A path; should not be passed if tempfile is and vice-versa.
extension: A string that specifies the file extension. It must starts with
".".
"""
# Exactly one of absolute_path or temp_file must be specified.
assert (absolute_path is None) != (temp_file is None)
self._temp_file = temp_file
self._absolute_path = absolute_path
global _next_file_id
self._id = _next_file_id
_next_file_id += 1
@property
def id(self):
return self._id
@property
def extension(self):
return os.path.splitext(self.GetAbsPath())[1]
def GetAbsPath(self):
"""Returns the path to the pointed-to file relative to the given start path.
Args:
start: A string representing a starting path.
Returns:
A string giving the relative path from path to this file.
"""
if self._temp_file:
self._temp_file.close()
return self._temp_file.name
else:
return self._absolute_path
def FromTempFile(temp_file):
"""Constructs a FileHandle pointing to a temporary file.
Returns:
A FileHandle referring to a named temporary file.
"""
return FileHandle(temp_file)
def FromFilePath(path):
"""Constructs a FileHandle from an absolute file path.
Args:
path: A string giving the absolute path to a file.
Returns:
A FileHandle referring to the file at the specified path.
"""
return FileHandle(None, os.path.abspath(path))
|
bsd-3-clause
|
amurzeau/streamlink-debian
|
src/streamlink/session.py
|
2
|
19160
|
import logging
import pkgutil
from collections import OrderedDict
from functools import lru_cache
from socket import AF_INET, AF_INET6
import requests
import requests.packages.urllib3.util.connection as urllib3_connection
from requests.packages.urllib3.util.connection import allowed_gai_family
from streamlink import __version__, plugins
from streamlink.compat import is_win32
from streamlink.exceptions import NoPluginError, PluginError
from streamlink.logger import StreamlinkLogger
from streamlink.options import Options
from streamlink.plugin import Plugin, api
from streamlink.utils import load_module, update_scheme
from streamlink.utils.l10n import Localization
# Ensure that the Logger class returned is Streamslink's for using the API (for backwards compatibility)
logging.setLoggerClass(StreamlinkLogger)
log = logging.getLogger(__name__)
class PythonDeprecatedWarning(UserWarning):
pass
class Streamlink:
"""A Streamlink session is used to keep track of plugins,
options and log settings."""
def __init__(self, options=None):
self.http = api.HTTPSession()
self.options = Options({
"interface": None,
"ipv4": False,
"ipv6": False,
"hds-live-edge": 10.0,
"hds-segment-attempts": 3,
"hds-segment-threads": 1,
"hds-segment-timeout": 10.0,
"hds-timeout": 60.0,
"hls-live-edge": 3,
"hls-segment-attempts": 3,
"hls-segment-ignore-names": [],
"hls-segment-threads": 1,
"hls-segment-timeout": 10.0,
"hls-segment-stream-data": False,
"hls-timeout": 60.0,
"hls-playlist-reload-attempts": 3,
"hls-playlist-reload-time": "default",
"hls-start-offset": 0,
"hls-duration": None,
"http-stream-timeout": 60.0,
"ringbuffer-size": 1024 * 1024 * 16, # 16 MB
"rtmp-timeout": 60.0,
"rtmp-rtmpdump": is_win32 and "rtmpdump.exe" or "rtmpdump",
"rtmp-proxy": None,
"stream-segment-attempts": 3,
"stream-segment-threads": 1,
"stream-segment-timeout": 10.0,
"stream-timeout": 60.0,
"subprocess-errorlog": False,
"subprocess-errorlog-path": None,
"ffmpeg-ffmpeg": None,
"ffmpeg-fout": None,
"ffmpeg-video-transcode": None,
"ffmpeg-audio-transcode": None,
"ffmpeg-copyts": False,
"ffmpeg-start-at-zero": False,
"mux-subtitles": False,
"locale": None,
"user-input-requester": None
})
if options:
self.options.update(options)
self.plugins = OrderedDict({})
self.load_builtin_plugins()
def set_option(self, key, value):
"""Sets general options used by plugins and streams originating
from this session object.
:param key: key of the option
:param value: value to set the option to
**Available options**:
======================== =========================================
interface (str) Set the network interface,
default: ``None``
ipv4 (bool) Resolve address names to IPv4 only.
This option overrides ipv6, default: ``False``
ipv6 (bool) Resolve address names to IPv6 only.
This option overrides ipv4, default: ``False``
hds-live-edge (float) Specify the time live HDS
streams will start from the edge of
stream, default: ``10.0``
hds-segment-attempts (int) How many attempts should be done
to download each HDS segment, default: ``3``
hds-segment-threads (int) The size of the thread pool used
to download segments, default: ``1``
hds-segment-timeout (float) HDS segment connect and read
timeout, default: ``10.0``
hds-timeout (float) Timeout for reading data from
HDS streams, default: ``60.0``
hls-live-edge (int) How many segments from the end
to start live streams on, default: ``3``
hls-segment-attempts (int) How many attempts should be done
to download each HLS segment, default: ``3``
hls-segment-ignore-names (str[]) List of segment names without
file endings which should get filtered out,
default: ``[]``
hls-segment-threads (int) The size of the thread pool used
to download segments, default: ``1``
hls-segment-stream-data (bool) Stream HLS segment downloads,
default: ``False``
hls-segment-timeout (float) HLS segment connect and read
timeout, default: ``10.0``
hls-timeout (float) Timeout for reading data from
HLS streams, default: ``60.0``
http-proxy (str) Specify a HTTP proxy to use for
all HTTP requests
https-proxy (str) Specify a HTTPS proxy to use for
all HTTPS requests
http-cookies (dict or str) A dict or a semi-colon (;)
delimited str of cookies to add to each
HTTP request, e.g. ``foo=bar;baz=qux``
http-headers (dict or str) A dict or semi-colon (;)
delimited str of headers to add to each
HTTP request, e.g. ``foo=bar;baz=qux``
http-query-params (dict or str) A dict or a ampersand (&)
delimited string of query parameters to
add to each HTTP request,
e.g. ``foo=bar&baz=qux``
http-trust-env (bool) Trust HTTP settings set in the
environment, such as environment
variables (HTTP_PROXY, etc) and
~/.netrc authentication
http-ssl-verify (bool) Verify SSL certificates,
default: ``True``
http-ssl-cert (str or tuple) SSL certificate to use,
can be either a .pem file (str) or a
.crt/.key pair (tuple)
http-timeout (float) General timeout used by all HTTP
requests except the ones covered by
other options, default: ``20.0``
http-stream-timeout (float) Timeout for reading data from
HTTP streams, default: ``60.0``
subprocess-errorlog (bool) Log errors from subprocesses to
a file located in the temp directory
subprocess-errorlog-path (str) Log errors from subprocesses to
a specific file
ringbuffer-size (int) The size of the internal ring
buffer used by most stream types,
default: ``16777216`` (16MB)
rtmp-proxy (str) Specify a proxy (SOCKS) that RTMP
streams will use
rtmp-rtmpdump (str) Specify the location of the
rtmpdump executable used by RTMP streams,
e.g. ``/usr/local/bin/rtmpdump``
rtmp-timeout (float) Timeout for reading data from
RTMP streams, default: ``60.0``
ffmpeg-ffmpeg (str) Specify the location of the
ffmpeg executable use by Muxing streams
e.g. ``/usr/local/bin/ffmpeg``
ffmpeg-verbose (bool) Log stderr from ffmpeg to the
console
ffmpeg-verbose-path (str) Specify the location of the
ffmpeg stderr log file
ffmpeg-fout (str) The output file format
when muxing with ffmpeg
e.g. ``matroska``
ffmpeg-video-transcode (str) The codec to use if transcoding
video when muxing with ffmpeg
e.g. ``h264``
ffmpeg-audio-transcode (str) The codec to use if transcoding
audio when muxing with ffmpeg
e.g. ``aac``
ffmpeg-copyts (bool) When used with ffmpeg, do not shift input timestamps.
ffmpeg-start-at-zero (bool) When used with ffmpeg and copyts,
shift input timestamps so they start at zero
default: ``False``
mux-subtitles (bool) Mux available subtitles into the
output stream.
stream-segment-attempts (int) How many attempts should be done
to download each segment, default: ``3``.
General option used by streams not
covered by other options.
stream-segment-threads (int) The size of the thread pool used
to download segments, default: ``1``.
General option used by streams not
covered by other options.
stream-segment-timeout (float) Segment connect and read
timeout, default: ``10.0``.
General option used by streams not
covered by other options.
stream-timeout (float) Timeout for reading data from
stream, default: ``60.0``.
General option used by streams not
covered by other options.
locale (str) Locale setting, in the RFC 1766 format
eg. en_US or es_ES
default: ``system locale``.
user-input-requester (UserInputRequester) instance of UserInputRequester
to collect input from the user at runtime. Must be
set before the plugins are loaded.
default: ``UserInputRequester``.
======================== =========================================
"""
if key == "interface":
for scheme, adapter in self.http.adapters.items():
if scheme not in ("http://", "https://"):
continue
if not value:
adapter.poolmanager.connection_pool_kw.pop("source_address")
else:
adapter.poolmanager.connection_pool_kw.update(
# https://docs.python.org/3/library/socket.html#socket.create_connection
source_address=(value, 0)
)
self.options.set(key, None if not value else value)
elif key == "ipv4" or key == "ipv6":
self.options.set(key, value)
if value:
self.options.set("ipv6" if key == "ipv4" else "ipv4", False)
urllib3_connection.allowed_gai_family = \
(lambda: AF_INET) if key == "ipv4" else (lambda: AF_INET6)
else:
urllib3_connection.allowed_gai_family = allowed_gai_family
elif key == "http-proxy":
self.http.proxies["http"] = update_scheme("http://", value)
if "https" not in self.http.proxies:
self.http.proxies["https"] = update_scheme("http://", value)
elif key == "https-proxy":
self.http.proxies["https"] = update_scheme("https://", value)
elif key == "http-cookies":
if isinstance(value, dict):
self.http.cookies.update(value)
else:
self.http.parse_cookies(value)
elif key == "http-headers":
if isinstance(value, dict):
self.http.headers.update(value)
else:
self.http.parse_headers(value)
elif key == "http-query-params":
if isinstance(value, dict):
self.http.params.update(value)
else:
self.http.parse_query_params(value)
elif key == "http-trust-env":
self.http.trust_env = value
elif key == "http-ssl-verify":
self.http.verify = value
elif key == "http-disable-dh":
if value:
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS += ':!DH'
try:
requests.packages.urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST = \
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS.encode("ascii")
except AttributeError:
# no ssl to disable the cipher on
pass
elif key == "http-ssl-cert":
self.http.cert = value
elif key == "http-timeout":
self.http.timeout = value
else:
self.options.set(key, value)
def get_option(self, key):
"""Returns current value of specified option.
:param key: key of the option
"""
if key == "http-proxy":
return self.http.proxies.get("http")
elif key == "https-proxy":
return self.http.proxies.get("https")
elif key == "http-cookies":
return self.http.cookies
elif key == "http-headers":
return self.http.headers
elif key == "http-query-params":
return self.http.params
elif key == "http-trust-env":
return self.http.trust_env
elif key == "http-ssl-verify":
return self.http.verify
elif key == "http-ssl-cert":
return self.http.cert
elif key == "http-timeout":
return self.http.timeout
else:
return self.options.get(key)
def set_plugin_option(self, plugin, key, value):
"""Sets plugin specific options used by plugins originating
from this session object.
:param plugin: name of the plugin
:param key: key of the option
:param value: value to set the option to
"""
if plugin in self.plugins:
plugin = self.plugins[plugin]
plugin.set_option(key, value)
def get_plugin_option(self, plugin, key):
"""Returns current value of plugin specific option.
:param plugin: name of the plugin
:param key: key of the option
"""
if plugin in self.plugins:
plugin = self.plugins[plugin]
return plugin.get_option(key)
@lru_cache(maxsize=128)
def resolve_url(self, url, follow_redirect=True):
"""Attempts to find a plugin that can use this URL.
The default protocol (http) will be prefixed to the URL if
not specified.
Raises :exc:`NoPluginError` on failure.
:param url: a URL to match against loaded plugins
:param follow_redirect: follow redirects
"""
url = update_scheme("http://", url)
available_plugins = []
for name, plugin in self.plugins.items():
if plugin.can_handle_url(url):
available_plugins.append(plugin)
available_plugins.sort(key=lambda x: x.priority(url), reverse=True)
if available_plugins:
return available_plugins[0](url)
if follow_redirect:
# Attempt to handle a redirect URL
try:
res = self.http.head(url, allow_redirects=True, acceptable_status=[501])
# Fall back to GET request if server doesn't handle HEAD.
if res.status_code == 501:
res = self.http.get(url, stream=True)
if res.url != url:
return self.resolve_url(res.url, follow_redirect=follow_redirect)
except PluginError:
pass
raise NoPluginError
def resolve_url_no_redirect(self, url):
"""Attempts to find a plugin that can use this URL.
The default protocol (http) will be prefixed to the URL if
not specified.
Raises :exc:`NoPluginError` on failure.
:param url: a URL to match against loaded plugins
"""
return self.resolve_url(url, follow_redirect=False)
def streams(self, url, **params):
"""Attempts to find a plugin and extract streams from the *url*.
*params* are passed to :func:`Plugin.streams`.
Raises :exc:`NoPluginError` if no plugin is found.
"""
plugin = self.resolve_url(url)
return plugin.streams(**params)
def get_plugins(self):
"""Returns the loaded plugins for the session."""
return self.plugins
def load_builtin_plugins(self):
self.load_plugins(plugins.__path__[0])
def load_plugins(self, path: str) -> bool:
"""Attempt to load plugins from the path specified.
:param path: full path to a directory where to look for plugins
:return: success
"""
success = False
user_input_requester = self.get_option("user-input-requester")
for loader, name, ispkg in pkgutil.iter_modules([path]):
# set the full plugin module name
module_name = f"streamlink.plugins.{name}"
try:
mod = load_module(module_name, path)
except ImportError:
log.exception(f"Failed to load plugin {name} from {path}\n")
continue
if not hasattr(mod, "__plugin__") or not issubclass(mod.__plugin__, Plugin):
continue
success = True
plugin = mod.__plugin__
plugin.bind(self, name, user_input_requester)
if plugin.module in self.plugins:
log.debug(f"Plugin {plugin.module} is being overridden by {mod.__file__}")
self.plugins[plugin.module] = plugin
return success
@property
def version(self):
return __version__
@property
def localization(self):
return Localization(self.get_option("locale"))
__all__ = ["Streamlink"]
|
bsd-2-clause
|
lvmgeo/GISPython
|
GISPython/PublisherHealper.py
|
1
|
20261
|
# -*- coding: utf-8 -*-
"""
Deployment publishing operations module
"""
import codecs
import os
import shutil
import hashlib
import datetime
import ZipHelper
import xmlParamsHealper
import JsonParamsHelper
class PublisherHealperConfig:
"""Class for setting up publisher Healper"""
moduleName = "" # name of the module to be processing
destinationDir = "" # folder to deploy to
sourceDir = "" # folder from with to deploy
doBackup = False # does publisher need to make a backup
bacupType = 'Folder' # "Folder" to backup whole destinationDir, "Files" to backup only owerwritten files
backupFolder = "" # folder in witch the backup will be stored
includeFolders = [] # folders to publish
# includeFolders = [ # SAMPLE
# {
# "folder": "testFolder", # Folder to include. Do not provide this for source root folder
# "recursive": True, # Process folder recursively? Default is False
# "includeExtensions": ["py"], # extensions to be included. Do not provide this for all files in Folder
# "excludeExtensions": ["pyc"], # extensions to be excluded. Do not provide this if dont needed
# "clearExtensions": ["pyc"], # extensions to be deleted from destination. Do not provide this if dont needed
# "includeFiles": ["somespecificfile.py"], # files to be specificly included. Do not provide this if dont needed
# "excludeFiles": ["somespecificfile.py"], # files to be specificly excluded. Do not provide this if dont needed
# "clearFiles": ["somespecificfile.py"], # files to be specificly deleted from destination. Do not provide this if dont needed
# "renameFiles": {"somefilenamefromtorename.py": "somewithdifferentname.py"}
# }
# ]
configFilesJson = [] # config files of type Json to be processed
configFilesXML = [] # config files of type XML to be processed
# configFilesXML = [ # SAMPLE
# {
# "file": "Web.config", # relative path in destination
# "changes": [ # List of changes to be made
# {
# "xpath": '/Test/Level1/Level2/Level3', # xpath to tag to be changed (first found will be processed)
# "atribute": "someatribute", # Atribute to be updated. Do not provide this if tag text is to be updated
# "value": "value to be writen" # value to be writen
# }
# ]
# }
# ]
replacementMap = {}
# replacementMap = { # SAMPLE
# 'test.json': {
# '[find sting to replace]': 'replacement value'
# }
# }
class PublisherHealper(object):
"""Class for easing the Rar file operations"""
def __init__(self):
"""Class initialization procedure
Args:
self: The reserved object 'self'
"""
self.backup_zip_file = ''
def Deply(self, config):
"""Does the dployment
Args:
self: The reserved object 'self'
config ([PublisherHealperConfig]): Configuration of deplyment
"""
print u'... start publish for {}'.format(config.moduleName)
self.backup_zip_file = "{}_{}.zip".format(config.moduleName, _now_for_file())
destination_dir = config.destinationDir
if not os.path.exists(destination_dir):
raise AttributeError(u'destination folder {} not found'.format(destination_dir))
self.__create_backup(config)
for folder in config.includeFolders:
self.__do_deploy(folder, config)
self.__do_process_xml(config)
self.__do_process_json(config)
self.__do_string_repalce(config)
def __create_backup(self, config):
"""Does the backup creation
Args:
self: The reserved object 'self'
config ([PublisherHealperConfig]): Configuration of deplyment
"""
if hasattr(config, "doBackup") and config.doBackup:
if config.bacupType.upper() == 'FOLDER':
backup_dir = config.backupFolder
if not os.path.exists(backup_dir):
os.makedirs(backup_dir)
print u'... created backup folder {}'.format(backup_dir)
backup_file_name = os.path.join(backup_dir, self.backup_zip_file)
ZipHelper.ZipHelper().CompressDir(config.destinationDir, backup_file_name)
print u'... backup created!'
def __create_backup_one_file(self, file_path, config):
"""Does the backup creation for one file
Args:
self: The reserved object 'self'
config ([PublisherHealperConfig]): Configuration of deplyment
"""
if hasattr(config, "doBackup") and config.doBackup:
if config.bacupType.upper() == 'FILES':
backup_dir = config.backupFolder
if not os.path.exists(backup_dir):
os.makedirs(backup_dir)
print u'... created backup folder {}'.format(backup_dir)
backup_file_name = os.path.join(backup_dir, self.backup_zip_file)
ZipHelper.ZipHelper().CompressFileList(
filePathList=[file_path],
zipFileName=backup_file_name,
base_dir=config.destinationDir,
append=os.path.exists(backup_file_name))
print u'... file {} backup created!'.format(file_path)
def __do_deploy(self, folder, config):
"""Does the backup creation
Args:
self: The reserved object 'self'
folder ([string]): relative path to folder to be processed
config ([PublisherHealperConfig]): Configuration of deplyment
"""
self.__clear(folder, config)
files_to_copy = self.__files_to_copy(folder, config)
self.__do_copy_files_to_dest(folder, files_to_copy, config)
def __clear(self, folder, config):
"""Clears unnececery files
Args:
self: The reserved object 'self'
folder ([string]): relative path to folder to be processed
config ([PublisherHealperConfig]): Configuration of deplyment
"""
if folder.has_key("clearExtensions") or folder.has_key("clearFiles"):
clear_extensions = folder[u'clearExtensions'] if folder.has_key("clearExtensions") else []
clear_files = folder[u'clearFiles'] if folder.has_key("clearFiles") else []
recursive = folder[u'recursive'] if folder.has_key("recursive") else False
source_dir = os.path.join(config.sourceDir, folder["folder"]) if folder.has_key("folder") else config.sourceDir
destination_dir = os.path.join(config.destinationDir, folder["folder"]) if folder.has_key("folder") else config.destinationDir
if not recursive:
include_folders = []
else:
include_folders = _find_all_folders(destination_dir)
include_folders.append(destination_dir)
files_to_delete = []
for infolder in include_folders:
for ext in clear_extensions:
destination_folder = infolder.replace(source_dir, destination_dir)
if not os.path.exists(destination_folder):
os.makedirs(destination_folder)
print u'... output folder created {}'.format(destination_folder)
found_files = _find_file(os.path.join(destination_dir, infolder), ext)
if found_files:
files_to_delete = files_to_delete + found_files
for file_to_clear in clear_files:
file_name = os.path.join(destination_dir, infolder, file_to_clear)
if os.path.exists(file_name):
files_to_delete.append(file_name)
for file_to_delate in files_to_delete:
os.remove(file_to_delate)
print u'... file deleted {}'.format(file_to_delate)
def __files_to_copy(self, folder, config):
"""Finds files to be copyed
Args:
self: The reserved object 'self'
folder ([string]): relative path to folder to be processed
config ([PublisherHealperConfig]): Configuration of deplyment
"""
recursive = folder[u'recursive'] if folder.has_key("recursive") else False
source_dir = os.path.join(config.sourceDir, folder["folder"]) if folder.has_key("folder") else config.sourceDir
destination_dir = os.path.join(config.destinationDir, folder["folder"]) if folder.has_key("folder") else config.destinationDir
if not recursive:
include_folders = []
else:
include_folders = _find_all_folders(source_dir)
include_folders.append(source_dir)
files_to_copy = []
if folder.has_key("includeExtensions") or folder.has_key("includeFiles"):
files_to_copy = self.__find_files_to_include(folder, include_folders, source_dir, destination_dir)
else:
files_to_copy = self.__find_all_files_to_include(folder, include_folders, source_dir, destination_dir)
if folder.has_key("excludeExtensions") or folder.has_key("excludeFiles"):
files_to_copy = self.__exclude_files(folder, files_to_copy)
return files_to_copy
def __find_files_to_include(self, folder, include_folders, source_dir, destination_dir):
files_to_copy = []
include_extensions = folder[u'includeExtensions'] if folder.has_key("includeExtensions") else []
include_files = folder[u'includeFiles'] if folder.has_key("includeFiles") else []
for infolder in include_folders:
for ext in include_extensions:
found_files = _find_file(infolder, ext)
if found_files:
files_to_copy = files_to_copy + found_files
if not infolder == source_dir:
destination_folder = infolder.replace(source_dir, destination_dir)
if not os.path.exists(destination_folder):
os.makedirs(destination_folder)
print u'... output folder created {}'.format(destination_folder)
for file_name in include_files:
found_files = _find_file_by_name(infolder, file_name)
if found_files:
files_to_copy = files_to_copy + found_files
if not infolder == source_dir:
destination_folder = infolder.replace(source_dir, destination_dir)
if not os.path.exists(destination_folder):
os.makedirs(destination_folder)
print u'... output folder created {}'.format(destination_folder)
return files_to_copy
def __find_all_files_to_include(self, folder, include_folders, source_dir, destination_dir):
files_to_copy = []
for infolder in include_folders:
found_files = _find_all_files(os.path.join(source_dir, infolder))
if found_files:
files_to_copy = files_to_copy + found_files
if not folder == source_dir:
dir_name = infolder.replace(source_dir + '\\', '')
destination_folder = os.path.join(destination_dir, dir_name)
if not os.path.exists(destination_folder):
os.makedirs(destination_folder)
print u'... output folder created {}'.format(destination_folder)
return files_to_copy
def __exclude_files(self, folder, files_to_copy):
exclude_extensions = folder[u'excludeExtensions'] if folder.has_key("excludeExtensions") else []
exclude_files = folder[u'excludeFiles'] if folder.has_key("excludeFiles") else []
for ext in exclude_extensions:
files_to_copy = list(fn for fn in files_to_copy if not os.path.basename(fn).lower().endswith('.' + (ext.lower())))
for exclude_file in exclude_files:
files_to_copy = list(fn for fn in files_to_copy if not os.path.basename(fn).lower() == exclude_file.lower())
return files_to_copy
def __do_copy_files_to_dest(self, folder, files_to_copy, config,):
"""Finds files to be copyed
Args:
self: The reserved object 'self'
folder ([string]): relative path to folder to be processed
files_to_copy ([list]): path of files to be copyed
config ([PublisherHealperConfig]): Configuration of deplyment
"""
source_dir = os.path.join(config.sourceDir, folder["folder"]) if folder.has_key("folder") else config.sourceDir
destination_dir = os.path.join(config.destinationDir, folder["folder"]) if folder.has_key("folder") else config.destinationDir
for copy_file in files_to_copy:
dest_file = copy_file
dest_file = dest_file.replace(source_dir, destination_dir)
dest_file = self.__rename_file_if_needed(dest_file, folder)
replaced = False
thesame = False
if os.path.exists(dest_file):
copy_hash = _md5(copy_file)
dest_hash = _md5(dest_file)
if copy_hash <> dest_hash:
self.__create_backup_one_file(dest_file, config)
os.remove(dest_file)
replaced = True
else:
thesame = True
if not thesame:
if not os.path.isdir(os.path.dirname(dest_file)):
os.makedirs(os.path.dirname(dest_file))
shutil.copy2(copy_file, dest_file)
if not replaced:
print u'... file copy {}'.format(dest_file)
else:
print u'... file replace {}'.format(dest_file)
def __rename_file_if_needed(self, dest_file, folder):
rename_files = folder[u'renameFiles'] if folder.has_key("renameFiles") else {}
dir_name, file_name = os.path.split(dest_file)
for rename_file in rename_files:
if file_name.upper() == rename_file.upper():
return os.path.join(dir_name, rename_files[rename_file])
return dest_file
def __do_process_xml(self, config):
"""Changes required values in config xml
Args:
self: The reserved object 'self'
config ([PublisherHealperConfig]): Configuration of deplyment
"""
for config_file in config.configFilesXML:
params_helper = xmlParamsHealper.XMLParams(None, None, os.path.join(config.destinationDir, config_file['file']))
params_helper.GetParams()
for change in config_file['changes']:
is_string = False
do_append = False
if change.has_key('string'):
if change['string']:
is_string = True
if change.has_key('append'):
if change['append']:
do_append = True
if do_append:
attribute = None
key = None
if change.has_key("atribute"):
attribute = change['atribute']
if change.has_key("appendKey"):
key = change['appendKey']
params_helper.AppendValueByPath(change['xpath'], key, change['value'], attribute, isString=is_string)
else:
if change.has_key("atribute"):
params_helper.UpdateAtributeByPath(change['xpath'], change['atribute'], change['value'])
else:
params_helper.UpdateValueByPath(change['xpath'], change['value'])
params_helper.WriteParams()
print u'... config file {} updated'.format(config_file['file'])
def __do_process_json(self, config):
"""Changes required values in config xml
Args:
self: The reserved object 'self'
config ([PublisherHealperConfig]): Configuration of deplyment
"""
for config_file in config.configFilesJson:
params_helper = JsonParamsHelper.JsonParams(None, None, os.path.join(config.destinationDir, config_file['file']))
params_helper.GetParams()
for change in config_file['changes']:
is_json = False
do_append = False
if change.has_key('json'):
if change['json']:
is_json = True
if change.has_key('append'):
if change['append']:
do_append = True
if do_append:
params_helper.AppendValueByPath(change['xpath'], change['appendKey'], change['value'], is_json)
else:
params_helper.UpdateValueByPath(change['xpath'], change['value'], is_json)
params_helper.WriteParams(False)
print u'... config file {} updated'.format(config_file['file'])
def __do_string_repalce(self, config):
"""Replace required values by sring replacement
Args:
self: The reserved object 'self'
config ([PublisherHealperConfig]): Configuration of deplyment
"""
for file_name in config.replacementMap:
replacement_map = config.replacementMap[file_name]
path = os.path.join(config.destinationDir, file_name)
_replace_in_file(path, replacement_map)
print u'... file {} replaced strings'.format(path)
def _replace_in_file(path, replace_map):
"""replaces values in files using replace_map
"""
with codecs.open(path, 'r') as f:
newlines = []
for line in f.readlines():
for key, value in replace_map.items():
line = line.replace(key, value)
newlines.append(line)
with open(path, 'w') as f:
for line in newlines:
f.write(line)
def _find_all_files(directory):
"""Finds files in the directory
Args:
dir: The directory in which to look for the file
"""
found_files = [directory + "\\" + fn
for fn in os.listdir(directory) if os.path.isfile(directory + "\\" + fn)]
found_files.sort()
return found_files
def _find_file(directory, ext):
"""Finds files in the directory
Args:
Dir: The directory in which to look for the file
Ext: The extension to search for
"""
found_files = [directory + "\\" + fn
for fn in os.listdir(directory) if fn.lower().endswith('.' + (ext.lower()))]
found_files.sort()
return found_files
def _find_file_by_name(directory, file_name):
"""Finds files in the directory
Args:
Dir: The directory in which to look for the file
fileName: File name to search for
"""
found_files = [directory + "\\" + fn
for fn in os.listdir(directory) if fn.lower() == file_name.lower()]
found_files.sort()
return found_files
def _find_all_folders(directory):
"""Finds files in the directory
Args:
Dir: The directory in which to look for the file
Ext: The extension to search for
"""
result = []
for root, dirs, files in os.walk(directory):
for name in dirs:
result.append(os.path.join(root, name))
return result
def _md5(filename):
"""calculates file md5 cheksumm
Args:
fname ([string]): File path
Returns:
[string]: hex digest
"""
hash_md5 = hashlib.md5()
with open(filename, "rb") as opened_file:
for chunk in iter(lambda: opened_file.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def _now_for_file():
"""returns date now formated for filename
Returns:
[string]: [date reprezentation as string]
"""
return datetime.datetime.strftime(datetime.datetime.now(), "%Y%m%d_%H%M%S")
|
gpl-3.0
|
Exgibichi/statusquo
|
test/functional/getblocktemplate_longpoll.py
|
1
|
3120
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test longpolling with getblocktemplate."""
from test_framework.test_framework import StatusquoTestFramework
from test_framework.util import *
import threading
class LongpollThread(threading.Thread):
def __init__(self, node):
threading.Thread.__init__(self)
# query current longpollid
templat = node.getblocktemplate()
self.longpollid = templat['longpollid']
# create a new connection to the node, we can't use the same
# connection from two threads
self.node = get_rpc_proxy(node.url, 1, timeout=600)
def run(self):
self.node.getblocktemplate({'longpollid':self.longpollid})
class GetBlockTemplateLPTest(StatusquoTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 4
self.setup_clean_chain = False
def run_test(self):
self.log.info("Warning: this test will take about 70 seconds in the best case. Be patient.")
self.nodes[0].generate(10)
templat = self.nodes[0].getblocktemplate()
longpollid = templat['longpollid']
# longpollid should not change between successive invocations if nothing else happens
templat2 = self.nodes[0].getblocktemplate()
assert(templat2['longpollid'] == longpollid)
# Test 1: test that the longpolling wait if we do nothing
thr = LongpollThread(self.nodes[0])
thr.start()
# check that thread still lives
thr.join(5) # wait 5 seconds or until thread exits
assert(thr.is_alive())
# Test 2: test that longpoll will terminate if another node generates a block
self.nodes[1].generate(1) # generate a block on another node
# check that thread will exit now that new transaction entered mempool
thr.join(5) # wait 5 seconds or until thread exits
assert(not thr.is_alive())
# Test 3: test that longpoll will terminate if we generate a block ourselves
thr = LongpollThread(self.nodes[0])
thr.start()
self.nodes[0].generate(1) # generate a block on another node
thr.join(5) # wait 5 seconds or until thread exits
assert(not thr.is_alive())
# Test 4: test that introducing a new transaction into the mempool will terminate the longpoll
thr = LongpollThread(self.nodes[0])
thr.start()
# generate a random transaction and submit it
min_relay_fee = self.nodes[0].getnetworkinfo()["relayfee"]
# min_relay_fee is fee per 1000 bytes, which should be more than enough.
(txid, txhex, fee) = random_transaction(self.nodes, Decimal("1.1"), min_relay_fee, Decimal("0.001"), 20)
# after one minute, every 10 seconds the mempool is probed, so in 80 seconds it should have returned
thr.join(60 + 20)
assert(not thr.is_alive())
if __name__ == '__main__':
GetBlockTemplateLPTest().main()
|
mit
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.