repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
frozflame/molbiox | molbiox/execute/relational.py | 1 | 3111 | #!/usr/bin/env python3
# coding: utf-8
from __future__ import unicode_literals, print_function
import sys
import itertools
from molbiox.frame.command import Command
from molbiox.io import blast, tabular
"""
If your results come from more than 2 columns, use a SQL database instead.
"""
class CommandAggregate(Command):
abbr = 'ag'
name = 'aggregate'
desc = 'apply an aggregation function to a tabular text file'
@classmethod
def register(cls, subparser):
subparser = super(cls, cls).register(subparser)
subparser.add_argument(
'--subsep', metavar='character',
help="seperator used on subject names")
subparser.add_argument(
'-f', '--function', metavar='string', default='count',
choices=['count', 'list', 'set', 'avg', 'var', 'std'],
help='name of the aggregation function')
subparser.add_argument(
'-k', '--key', metavar='integer', default=0,
help='group by this column ')
subparser.add_argument(
'-v', '--val', metavar='integer', default=0,
help='apply aggregation function on this column')
subparser.add_argument(
'--ksort', metavar='string', choices=['alpha', 'num'],
help='sort keys alphabetically or numerically')
subparser.add_argument(
'--vsort', metavar='string', choices=['alpha', 'num'],
help='sort values alphabetically or numerically')
subparser.add_argument(
'-m', '--limit', type=int,
help='set max number of hits listed for each query')
return subparser
@classmethod
def render(cls, args, outfile):
recgens = [tabular.read(fn) for fn in args.filenames]
records = itertools.chain(*recgens)
aggregator = tabular.Aggregator(aggregator)
if args.function == 'count':
# groups = aggregator
pass
@classmethod
def render_(cls, args, outfile):
if args.format != '6m':
sys.exit('currently only blast6mini')
# TODO: decide what func to use based on -f option
func = blast.read_fmt6m
# a list of generators, then chain them
recgens = [func(fn) for fn in args.filenames]
records = itertools.chain(*recgens)
querydic = blast.aggregate(records, subsep=args.subsep)
if args.sort:
pairs = ((k, querydic[k]) for k in sorted(querydic))
else:
pairs = ((k, querydic[k]) for k in querydic)
if args.list:
for k, v in pairs:
v = sorted(v) if args.sort else v
v = itertools.islice(v, args.limit) if args.limit else v
subj = ' '.join(v)
print(k, subj, sep='\t', file=outfile)
else:
for k, v in querydic.items():
print(len(v), k, sep='\t', file=outfile)
@classmethod
def get_agg_func(cls, name):
"""
Get a function which returns a dict-like object
:param name:
:return:
"""
pass
| gpl-2.0 | 1,638,024,055,608,239,600 | 32.095745 | 74 | 0.573449 | false |
gem/oq-engine | openquake/hazardlib/scalerel/leonard2010.py | 1 | 3472 | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2012-2021 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Module :mod:`openquake.hazardlib.scalerel.leonard2014` implements
:class:`Leonard2010_SCR`
:class:`Leonard2010_SCR_M0`
:class:`Leonard2010_SCR_MX`
"""
from numpy import power, log10
from openquake.hazardlib.scalerel.base import BaseMSRSigma, BaseASRSigma
class Leonard2010_SCR(BaseMSRSigma, BaseASRSigma):
"""
Leonard, Mark. "Earthquake fault scaling: Self-consistent relating of rupture
length, width, average displacement, and moment release." Bulletin of the
Seismological Society of America 100.5A (2010): 1971-1988.
Implements both magnitude-area and area-magnitude scaling relationships from
Table 6, but only for the category SCR
"""
def get_median_area(self, mag, rake):
"""
Calculates median fault area from magnitude.
"""
#based on table 6 relationship for SCR
return power(10.0, (mag - 4.19))
def get_std_dev_area(self, mag, rake):
"""
Returns zero for now
"""
return 0.0
def get_median_mag(self, area, rake):
"""
Returns magnitude for a given fault area
"""
#based on table 6 relationship for SCR
return log10(area) + 4.19
def get_std_dev_mag(self, area, rake):
"""
Returns zero for now
"""
return 0.0
class Leonard2010_SCR_M0(Leonard2010_SCR):
"""
Leonard, Mark. "Earthquake fault scaling: Self-consistent relating of rupture
length, width, average displacement, and moment release." Bulletin of the
Seismological Society of America 100.5A (2010): 1971-1988.
modifies Leonard2010_SCR for a term based on Table 5 and a more precise
conversion between M0 and Mw
"""
def get_median_area(self, mag, rake):
"""
Calculates median fault area from magnitude.
"""
#based on table 6 relationship for SCR with modification
return power(10.0, (mag - 4.22))
def get_median_mag(self, area, rake):
"""
Returns magnitude for a given fault area
"""
#based on table 6 relationship for SCR with modification
return log10(area) + 4.22
class Leonard2010_SCR_MX(Leonard2010_SCR):
"""
Modified for specific individual use. NOT RECOMMENDED!
"""
def get_median_area(self, mag, rake):
"""
Calculates median fault area from magnitude.
"""
#based on table 6 relationship for SCR with modification
return power(10.0, (mag - 4.00))
def get_median_mag(self, area, rake):
"""
Returns magnitude for a given fault area
"""
#based on table 6 relationship for SCR with modification
return log10(area) + 4.00
| agpl-3.0 | -106,655,219,615,109,520 | 31.448598 | 82 | 0.66273 | false |
rbejar/odrl-ogc-cache-policies | owslib/feature/wfs100.py | 1 | 14224 | # =============================================================================
# OWSLib. Copyright (C) 2005 Sean C. Gillies
#
# Contact email: [email protected]
#
# $Id: wfs.py 503 2006-02-01 17:09:12Z dokai $
# =============================================================================
import cgi
from cStringIO import StringIO
from urllib import urlencode
from urllib2 import urlopen
from owslib.util import openURL, testXMLValue, extract_xml_list, ServiceException
from owslib.etree import etree
from owslib.fgdc import Metadata
from owslib.iso import MD_Metadata
from owslib.crs import Crs
from owslib.namespaces import Namespaces
from owslib.util import log
n = Namespaces()
WFS_NAMESPACE = n.get_namespace("wfs")
OGC_NAMESPACE = n.get_namespace("ogc")
#TODO: use nspath in util.py
def nspath(path, ns=WFS_NAMESPACE):
"""
Prefix the given path with the given namespace identifier.
Parameters
----------
path : string
ElementTree API Compatible path expression
ns : string
The XML namespace. Defaults to WFS namespace.
"""
components = []
for component in path.split("/"):
if component != '*':
component = "{%s}%s" % (ns, component)
components.append(component)
return "/".join(components)
class WebFeatureService_1_0_0(object):
"""Abstraction for OGC Web Feature Service (WFS).
Implements IWebFeatureService.
"""
def __new__(self,url, version, xml, parse_remote_metadata=False):
""" overridden __new__ method
@type url: string
@param url: url of WFS capabilities document
@type xml: string
@param xml: elementtree object
@type parse_remote_metadata: boolean
@param parse_remote_metadata: whether to fully process MetadataURL elements
@return: initialized WebFeatureService_1_0_0 object
"""
obj=object.__new__(self)
obj.__init__(url, version, xml, parse_remote_metadata)
return obj
def __getitem__(self,name):
''' check contents dictionary to allow dict like access to service layers'''
if name in self.__getattribute__('contents').keys():
return self.__getattribute__('contents')[name]
else:
raise KeyError, "No content named %s" % name
def __init__(self, url, version, xml=None, parse_remote_metadata=False):
"""Initialize."""
self.url = url
self.version = version
self._capabilities = None
reader = WFSCapabilitiesReader(self.version)
if xml:
self._capabilities = reader.readString(xml)
else:
self._capabilities = reader.read(self.url)
self._buildMetadata(parse_remote_metadata)
def _buildMetadata(self, parse_remote_metadata=False):
'''set up capabilities metadata objects: '''
#serviceIdentification metadata
serviceelem=self._capabilities.find(nspath('Service'))
self.identification=ServiceIdentification(serviceelem, self.version)
#serviceProvider metadata
self.provider=ServiceProvider(serviceelem)
#serviceOperations metadata
self.operations=[]
for elem in self._capabilities.find(nspath('Capability/Request'))[:]:
self.operations.append(OperationMetadata(elem))
#serviceContents metadata: our assumption is that services use a top-level
#layer as a metadata organizer, nothing more.
self.contents={}
featuretypelist=self._capabilities.find(nspath('FeatureTypeList'))
features = self._capabilities.findall(nspath('FeatureTypeList/FeatureType'))
for feature in features:
cm=ContentMetadata(feature, featuretypelist, parse_remote_metadata)
self.contents[cm.id]=cm
#exceptions
self.exceptions = [f.text for f \
in self._capabilities.findall('Capability/Exception/Format')]
def getcapabilities(self, timeout=30):
"""Request and return capabilities document from the WFS as a
file-like object.
NOTE: this is effectively redundant now"""
reader = WFSCapabilitiesReader(self.version)
return urlopen(reader.capabilities_url(self.url), timeout=timeout)
def items(self):
'''supports dict-like items() access'''
items=[]
for item in self.contents:
items.append((item,self.contents[item]))
return items
def getfeature(self, typename=None, filter=None, bbox=None, featureid=None,
featureversion=None, propertyname=['*'], maxfeatures=None,
srsname=None, outputFormat=None, method='{http://www.opengis.net/wfs}Get'):
"""Request and return feature data as a file-like object.
Parameters
----------
typename : list
List of typenames (string)
filter : string
XML-encoded OGC filter expression.
bbox : tuple
(left, bottom, right, top) in the feature type's coordinates.
featureid : list
List of unique feature ids (string)
featureversion : string
Default is most recent feature version.
propertyname : list
List of feature property names. '*' matches all.
maxfeatures : int
Maximum number of features to be returned.
method : string
Qualified name of the HTTP DCP method to use.
srsname: string
EPSG code to request the data in
outputFormat: string (optional)
Requested response format of the request.
There are 3 different modes of use
1) typename and bbox (simple spatial query)
2) typename and filter (more expressive)
3) featureid (direct access to known features)
"""
base_url = self.getOperationByName('{http://www.opengis.net/wfs}GetFeature').methods[method]['url']
request = {'service': 'WFS', 'version': self.version, 'request': 'GetFeature'}
# check featureid
if featureid:
request['featureid'] = ','.join(featureid)
elif bbox and typename:
request['bbox'] = ','.join([repr(x) for x in bbox])
elif filter and typename:
request['filter'] = str(filter)
if srsname:
request['srsname'] = str(srsname)
assert len(typename) > 0
request['typename'] = ','.join(typename)
if propertyname:
request['propertyname'] = ','.join(propertyname)
if featureversion: request['featureversion'] = str(featureversion)
if maxfeatures: request['maxfeatures'] = str(maxfeatures)
if outputFormat is not None:
request["outputFormat"] = outputFormat
data = urlencode(request)
log.debug("Making request: %s?%s" % (base_url, data))
u = openURL(base_url, data, method)
# check for service exceptions, rewrap, and return
# We're going to assume that anything with a content-length > 32k
# is data. We'll check anything smaller.
try:
length = int(u.info()['Content-Length'])
have_read = False
except (KeyError, AttributeError):
data = u.read()
have_read = True
length = len(data)
if length < 32000:
if not have_read:
data = u.read()
try:
tree = etree.fromstring(data)
except BaseException:
# Not XML
return StringIO(data)
else:
if tree.tag == "{%s}ServiceExceptionReport" % OGC_NAMESPACE:
se = tree.find(nspath('ServiceException', OGC_NAMESPACE))
raise ServiceException(str(se.text).strip())
else:
return StringIO(data)
else:
if have_read:
return StringIO(data)
return u
def getOperationByName(self, name):
"""Return a named content item."""
for item in self.operations:
if item.name == name:
return item
raise KeyError, "No operation named %s" % name
class ServiceIdentification(object):
''' Implements IServiceIdentificationMetadata '''
def __init__(self, infoset, version):
self._root=infoset
self.type = testXMLValue(self._root.find(nspath('Name')))
self.version = version
self.title = testXMLValue(self._root.find(nspath('Title')))
self.abstract = testXMLValue(self._root.find(nspath('Abstract')))
self.keywords = [f.text for f in self._root.findall(nspath('Keywords'))]
self.fees = testXMLValue(self._root.find(nspath('Fees')))
self.accessconstraints = testXMLValue(self._root.find(nspath('AccessConstraints')))
class ServiceProvider(object):
''' Implements IServiceProviderMetatdata '''
def __init__(self, infoset):
self._root = infoset
self.name = testXMLValue(self._root.find(nspath('Name')))
self.url = testXMLValue(self._root.find(nspath('OnlineResource')))
self.keywords = extract_xml_list(self._root.find(nspath('Keywords')))
class ContentMetadata:
"""Abstraction for WFS metadata.
Implements IMetadata.
"""
def __init__(self, elem, parent, parse_remote_metadata=False, timeout=30):
"""."""
self.id = testXMLValue(elem.find(nspath('Name')))
self.title = testXMLValue(elem.find(nspath('Title')))
self.abstract = testXMLValue(elem.find(nspath('Abstract')))
self.keywords = [f.text for f in elem.findall(nspath('Keywords'))]
# bboxes
self.boundingBox = None
b = elem.find(nspath('BoundingBox'))
if b is not None:
self.boundingBox = (float(b.attrib['minx']),float(b.attrib['miny']),
float(b.attrib['maxx']), float(b.attrib['maxy']),
b.attrib['SRS'])
self.boundingBoxWGS84 = None
b = elem.find(nspath('LatLongBoundingBox'))
if b is not None:
self.boundingBoxWGS84 = (
float(b.attrib['minx']),float(b.attrib['miny']),
float(b.attrib['maxx']), float(b.attrib['maxy']),
)
# crs options
self.crsOptions = [Crs(srs.text) for srs in elem.findall(nspath('SRS'))]
# verbs
self.verbOptions = [op.tag for op \
in parent.findall(nspath('Operations/*'))]
self.verbOptions + [op.tag for op \
in elem.findall(nspath('Operations/*')) \
if op.tag not in self.verbOptions]
#others not used but needed for iContentMetadata harmonisation
self.styles=None
self.timepositions=None
self.defaulttimeposition=None
# MetadataURLs
self.metadataUrls = []
for m in elem.findall(nspath('MetadataURL')):
metadataUrl = {
'type': testXMLValue(m.attrib['type'], attrib=True),
'format': testXMLValue(m.find('Format')),
'url': testXMLValue(m)
}
if metadataUrl['url'] is not None and parse_remote_metadata: # download URL
try:
content = urlopen(metadataUrl['url'], timeout=timeout)
doc = etree.parse(content)
if metadataUrl['type'] is not None:
if metadataUrl['type'] == 'FGDC':
metadataUrl['metadata'] = Metadata(doc)
if metadataUrl['type'] == 'TC211':
metadataUrl['metadata'] = MD_Metadata(doc)
except Exception, err:
metadataUrl['metadata'] = None
self.metadataUrls.append(metadataUrl)
class OperationMetadata:
"""Abstraction for WFS metadata.
Implements IMetadata.
"""
def __init__(self, elem):
"""."""
self.name = elem.tag
# formatOptions
self.formatOptions = [f.tag for f in elem.findall(nspath('ResultFormat/*'))]
methods = []
for verb in elem.findall(nspath('DCPType/HTTP/*')):
url = verb.attrib['onlineResource']
methods.append((verb.tag, {'url': url}))
self.methods = dict(methods)
class WFSCapabilitiesReader(object):
"""Read and parse capabilities document into a lxml.etree infoset
"""
def __init__(self, version='1.0'):
"""Initialize"""
self.version = version
self._infoset = None
def capabilities_url(self, service_url):
"""Return a capabilities url
"""
qs = []
if service_url.find('?') != -1:
qs = cgi.parse_qsl(service_url.split('?')[1])
params = [x[0] for x in qs]
if 'service' not in params:
qs.append(('service', 'WFS'))
if 'request' not in params:
qs.append(('request', 'GetCapabilities'))
if 'version' not in params:
qs.append(('version', self.version))
urlqs = urlencode(tuple(qs))
return service_url.split('?')[0] + '?' + urlqs
def read(self, url, timeout=30):
"""Get and parse a WFS capabilities document, returning an
instance of WFSCapabilitiesInfoset
Parameters
----------
url : string
The URL to the WFS capabilities document.
timeout : number
A timeout value (in seconds) for the request.
"""
request = self.capabilities_url(url)
u = urlopen(request, timeout=timeout)
return etree.fromstring(u.read())
def readString(self, st):
"""Parse a WFS capabilities document, returning an
instance of WFSCapabilitiesInfoset
string should be an XML capabilities document
"""
if not isinstance(st, str):
raise ValueError("String must be of type string, not %s" % type(st))
return etree.fromstring(st)
| mit | -1,141,654,303,511,159,800 | 35.471795 | 107 | 0.577897 | false |
Azure/azure-sdk-for-python | sdk/cognitiveservices/azure-cognitiveservices-search-visualsearch/azure/cognitiveservices/search/visualsearch/models/rating.py | 1 | 2017 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .properties_item import PropertiesItem
class Rating(PropertiesItem):
"""Defines a rating.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: AggregateRating
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar text: Text representation of an item.
:vartype text: str
:param _type: Required. Constant filled by server.
:type _type: str
:param rating_value: Required. The mean (average) rating. The possible
values are 1.0 through 5.0.
:type rating_value: float
:ivar best_rating: The highest rated review. The possible values are 1.0
through 5.0.
:vartype best_rating: float
"""
_validation = {
'text': {'readonly': True},
'_type': {'required': True},
'rating_value': {'required': True},
'best_rating': {'readonly': True},
}
_attribute_map = {
'text': {'key': 'text', 'type': 'str'},
'_type': {'key': '_type', 'type': 'str'},
'rating_value': {'key': 'ratingValue', 'type': 'float'},
'best_rating': {'key': 'bestRating', 'type': 'float'},
}
_subtype_map = {
'_type': {'AggregateRating': 'AggregateRating'}
}
def __init__(self, **kwargs):
super(Rating, self).__init__(**kwargs)
self.rating_value = kwargs.get('rating_value', None)
self.best_rating = None
self._type = 'Rating'
| mit | -4,623,109,599,950,055,000 | 32.616667 | 79 | 0.584036 | false |
Wang-Sen/bdmap-app | draw.py | 1 | 1436 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import datetime
import matplotlib.pyplot as plt
import argparse
import os
if __name__=='__main__':
parser = argparse.ArgumentParser(description='Draw pictures based on the data of input file.')
parser.add_argument('-i', '--ifile', type=str, required=True)
parser.add_argument('-o', '--opath', type=str, required=True)
parser.add_argument('-t', '--title', type=str, required=True)
args = parser.parse_args()
x = []
y = []
old_date = ''
cur_date = ''
with open(args.ifile, 'r') as f:
for i in f.readlines():
data = i.split()
if data[0] != cur_date:
old_date = cur_date
cur_date = data[0]
if x and y:
plt.plot(x, y, label=data[0])
plt.gcf().autofmt_xdate()
plt.title(args.title + ' ' + old_date)
plt.savefig(os.path.join(args.opath, args.title + old_date + '.jpg'))
plt.clf()
x = []
y = []
x.append(datetime.datetime.strptime(data[0] + ' ' + data[1], '%Y-%m-%d %H:%M:%S'))
y.append(data[2])
plt.plot(x, y, label=data[0])
plt.gcf().autofmt_xdate()
plt.title(args.title + ' ' + cur_date)
plt.savefig(os.path.join(args.opath, args.title + cur_date + '.jpg'))
plt.clf()
| gpl-3.0 | -5,511,461,292,612,271,000 | 34.02439 | 98 | 0.501393 | false |
wcong/ants | ants/http/request/rpc.py | 1 | 1073 | """
This module implements the XmlRpcRequest class which is a more convenient class
(that Request) to generate xml-rpc requests.
See documentation in docs/topics/request-response.rst
"""
import xmlrpclib
from ants.http.request import Request
from ants.utils.python import get_func_args
DUMPS_ARGS = get_func_args(xmlrpclib.dumps)
class XmlRpcRequest(Request):
def __init__(self, *args, **kwargs):
encoding = kwargs.get('encoding', None)
if 'body' not in kwargs and 'params' in kwargs:
kw = dict((k, kwargs.pop(k)) for k in DUMPS_ARGS if k in kwargs)
kwargs['body'] = xmlrpclib.dumps(**kw)
# spec defines that requests must use POST method
kwargs.setdefault('method', 'POST')
# xmlrpc query multiples times over the same url
kwargs.setdefault('dont_filter', True)
# restore encoding
if encoding is not None:
kwargs['encoding'] = encoding
super(XmlRpcRequest, self).__init__(*args, **kwargs)
self.headers.setdefault('Content-Type', 'text/xml')
| bsd-3-clause | 4,317,994,134,569,879,000 | 29.657143 | 79 | 0.661696 | false |
tulsluper/sanscript | apps/da/apps.py | 1 | 2212 | import os
from django.apps import AppConfig
from django.apps import apps
def samevalues(names):
records = []
for name in names:
if type(name) == str:
records.append({key: name for key in ['label', 'model', 'title']})
return records
pages = [
{'label': 'capacity', 'view': 'capacity', 'title': 'Capacity'},
{'label': 'capacity_history', 'view': 'capacity_history', 'title': 'Capacity History'},
{'label': 'pd_types_capacity', 'view': 'pd_types_capacity', 'title': 'PD Capacity'},
{'label': 'pd_types_quantity', 'view': 'pd_types_quantity', 'title': 'PD Quantity'},
{'label': 'capacity_3par', 'view': 'capacity_3par', 'title': '3PAR Capacity'},
{'label': 'capacity_3par_history', 'view': 'capacity_3par_history', 'title': '3PAR Capacity History'},
{'label': 'volumes', 'view': 'volumes', 'title': 'Volumes'},
{'label': 'hosts', 'view': 'hosts', 'title': 'Hosts'},
{'label': 'hosts_capacity', 'view': 'hosts_capacity', 'title': 'HostsCapacity'},
{'label': 'hosts_capacity_history', 'view': 'hosts_capacity_history', 'title': 'HostsCapacityHistory'},
{'label': 'changes', 'view': 'changes', 'title': 'Changes'},
{'label': 'change_acknowledge', 'view': 'change_acknowledge', 'title': ''},
{'label': 'change_delete', 'view': 'change_delete', 'title': ''},
]
commands = [
{'label': 'test_connections', 'title': 'Test connections'},
{'label': 'collect_data', 'title': 'Collect data'},
]
config_models = samevalues([
'StorageConnection',
])
show_models = samevalues([
'StorageConnection',
'Capacity',
'CapacityHistory',
'TPARCapacity',
'TPARCapacityHistory',
'PDTypesCapacity',
'PDTypesQuantity',
'TPARHost',
'TPARVV',
'TPARVLUN',
'EVAVdisk',
'EVAHost',
'HDSHost',
'HDSLU',
'HDSMap',
'Volume',
'Host',
'HostCapacity',
'HostCapacityHistory',
'VolumeChange',
])
class appAppConfig(AppConfig):
label = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
name = 'apps.{}'.format(label)
verbose_name = 'Storages'
pages = pages
commands = commands
config_models = config_models
show_models = show_models
| gpl-3.0 | -1,206,674,240,758,519,800 | 30.6 | 107 | 0.607595 | false |
rst2pdf/rst2pdf | doc/assets/flowables.py | 1 | 38576 | # -*- coding: utf-8 -*-
# See LICENSE.txt for licensing terms
__docformat__ = 'reStructuredText'
from copy import copy
import re
import sys
from xml.sax.saxutils import unescape
from reportlab.lib.enums import TA_CENTER, TA_RIGHT
from reportlab.lib.styles import ParagraphStyle
from reportlab.lib.units import cm
from reportlab.platypus.doctemplate import FrameActionFlowable, FrameBreak, Indenter
from reportlab.platypus.flowables import (
_listWrapOn,
_FUZZ,
Flowable,
NullDraw,
PageBreak,
Spacer,
)
from reportlab.platypus.frames import Frame
from reportlab.platypus.paragraph import Paragraph
from reportlab.platypus.tables import Table, TableStyle
from reportlab.platypus.tableofcontents import TableOfContents
from reportlab.platypus.xpreformatted import XPreformatted
from . import styles
from .log import log
class XXPreformatted(XPreformatted):
"""An extended XPreformattedFit"""
def __init__(self, *args, **kwargs):
XPreformatted.__init__(self, *args, **kwargs)
def split(self, aW, aH):
# Figure out a nice range of splits
#
# Assume we would prefer 5 lines (at least) on
# a splitted flowable before a break, and 4 on
# the last flowable after a break.
# So, the minimum wrap height for a fragment
# will be 5*leading
rW, rH = self.wrap(aW, aH)
if rH > aH:
minH1 = getattr(self.style, 'allowOrphans', 5) * self.style.leading
minH2 = getattr(self.style, 'allowWidows', 4) * self.style.leading
# If there's no way to fid a decent fragment,
# refuse to split
if aH < minH1:
return []
# Now, don't split too close to the end either
pw, ph = self.wrap(aW, aH)
if ph - aH < minH2:
aH = ph - minH2
return XPreformatted.split(self, aW, aH)
class MyIndenter(Indenter):
"""An indenter that has a width, because otherwise you get crashes
if added inside tables"""
width = 0
height = 0
def draw(self):
pass
class TocEntry(NullDraw):
"""A flowable that adds a TOC entry but draws nothing"""
def __init__(self, level, label):
self.level = level
self.label = label
self.width = 0
self.height = 0
self.keepWithNext = True
def draw(self):
# Add outline entry
self.canv.bookmarkHorizontal(self.label, 0, 0 + self.height)
self.canv.addOutlineEntry(
self.label, self.label, max(0, int(self.level)), False
)
class Heading(Paragraph):
"""A paragraph that also adds an outline entry in
the PDF TOC."""
def __init__(
self,
text,
style,
bulletText=None,
caseSensitive=1,
level=0,
snum=None,
parent_id=None,
node=None,
section_header_depth=2,
):
# Issue 114: need to convert "&" to "&" and such.
# Issue 140: need to make it plain text
self.stext = re.sub(r'<[^>]*?>', '', unescape(text))
self.stext = self.stext.strip()
self.level = int(level)
self.snum = snum
self.parent_id = parent_id
self.node = node
self.section_header_depth = section_header_depth
Paragraph.__init__(self, text, style, bulletText)
def draw(self):
# Add outline entry
self.canv.bookmarkHorizontal(self.parent_id, 0, 0 + self.height)
# self.section_header_depth is for Issue 391
if self.canv.firstSect and self.level < self.section_header_depth:
self.canv.sectName = self.stext
self.canv.firstSect = False
if self.snum is not None:
self.canv.sectNum = self.snum
else:
self.canv.sectNum = ""
self.canv.addOutlineEntry(self.stext, self.parent_id, int(self.level), False)
Paragraph.draw(self)
class Separation(Flowable):
"""A simple <hr>-like flowable"""
def wrap(self, w, h):
self.w = w
return w, 1 * cm
def draw(self):
self.canv.line(0, 0.5 * cm, self.w, 0.5 * cm)
class Reference(Flowable):
"""A flowable to insert an anchor without taking space"""
def __init__(self, refid):
self.refid = refid
self.keepWithNext = True
Flowable.__init__(self)
def wrap(self, w, h):
"""This takes no space"""
return 0, 0
def draw(self):
self.canv.bookmarkPage(self.refid)
def repr(self):
return "Reference: %s" % self.refid
def __str__(self):
return "Reference: %s" % self.refid
class OddEven(Flowable):
"""This flowable takes two lists of flowables as arguments, odd and even.
If will draw the "odd" list when drawn in odd pages and the "even" list on
even pages.
wrap() will always return a size large enough for both lists, and this flowable
**cannot** be split, so use with care.
"""
def __init__(self, odd, even, style=None):
self.odd = DelayedTable([[odd]], ['100%'], style)
self.even = DelayedTable([[even]], ['100%'], style)
def wrap(self, w, h):
"""Return a box large enough for both odd and even"""
w1, h1 = self.odd.wrap(w, h)
w2, h2 = self.even.wrap(w, h)
return max(w1, w2), max(h1, h2)
def drawOn(self, canvas, x, y, _sW=0):
if canvas._pagenum % 2 == 0:
self.even.drawOn(canvas, x, y, _sW)
else:
self.odd.drawOn(canvas, x, y, _sW)
def split(self):
"""Makes no sense to split this..."""
return []
class DelayedTable(Table):
"""A flowable that inserts a table for which it has the data.
Needed so column widths can be determined after we know on what frame
the table will be inserted, thus making the overal table width correct.
"""
def __init__(self, data, colWidths, style=None, repeatrows=False, splitByRow=True):
self.data = data
self._colWidths = colWidths
if style is None:
style = TableStyle(
[
('LEFTPADDING', (0, 0), (-1, -1), 0),
('RIGHTPADDING', (0, 0), (-1, -1), 0),
('TOPPADDING', (0, 0), (-1, -1), 0),
('BOTTOMPADDING', (0, 0), (-1, -1), 0),
]
)
self.style = style
self.t = None
self.repeatrows = repeatrows
self.hAlign = TA_CENTER
self.splitByRow = splitByRow
def wrap(self, w, h):
# Create the table, with the widths from colWidths reinterpreted
# if needed as percentages of frame/cell/whatever width w is.
# _tw = w/sum(self.colWidths)
def adjust(*args, **kwargs):
kwargs['total'] = w
return styles.adjustUnits(*args, **kwargs)
# adjust=functools.partial(styles.adjustUnits, total=w)
self.colWidths = [adjust(x) for x in self._colWidths]
# colWidths = [_w * _tw for _w in self.colWidths]
self.t = Table(
self.data,
colWidths=self.colWidths,
style=self.style,
repeatRows=self.repeatrows,
splitByRow=True,
)
# splitByRow=self.splitByRow)
self.t.hAlign = self.hAlign
return self.t.wrap(w, h)
def split(self, w, h):
if self.splitByRow:
if not self.t:
self.wrap(w, h)
return self.t.split(w, h)
else:
return []
def drawOn(self, canvas, x, y, _sW=0):
self.t.drawOn(canvas, x, y, _sW)
def identity(self, maxLen=None):
return "<%s at %s%s%s> containing: %s" % (
self.__class__.__name__,
hex(id(self)),
self._frameName(),
getattr(self, 'name', '')
and (' name="%s"' % getattr(self, 'name', ''))
or '',
repr(self.data[0]),
)[:180]
def tablepadding(padding):
if not isinstance(padding, (list, tuple)):
padding = [padding] * 4
return (
padding,
('TOPPADDING', [0, 0], [-1, -1], padding[0]),
('RIGHTPADDING', [-1, 0], [-1, -1], padding[1]),
('BOTTOMPADDING', [0, 0], [-1, -1], padding[2]),
('LEFTPADDING', [1, 0], [1, -1], padding[3]),
)
class SplitTable(DelayedTable):
def __init__(self, data, colWidths, style, padding=3):
if len(data) != 1 or len(data[0]) != 2:
log.error('SplitTable can only be 1 row and two columns!')
sys.exit(1)
DelayedTable.__init__(self, data, colWidths, style)
self.padding, p1, p2, p3, p4 = tablepadding(padding)
self.style._cmds.insert(0, p1)
self.style._cmds.insert(0, p2)
self.style._cmds.insert(0, p3)
self.style._cmds.insert(0, p4)
def identity(self, maxLen=None):
return "<%s at %s%s%s> containing: %s" % (
self.__class__.__name__,
hex(id(self)),
self._frameName(),
getattr(self, 'name', '')
and (' name="%s"' % getattr(self, 'name', ''))
or '',
repr(self.data[0][1])[:180],
)
def split(self, w, h):
_w, _h = self.wrap(w, h)
if _h > h: # Can't split!
# The right column data mandates the split
# Find which flowable exceeds the available height
dw = self.colWidths[0] + self.padding[1] + self.padding[3]
dh = self.padding[0] + self.padding[2]
bullet = self.data[0][0]
text = self.data[0][1]
for l in range(0, len(text)):
_, fh = _listWrapOn(text[: l + 1], w - dw, None)
if fh + dh > h:
# The lth flowable is the guilty one
# split it
_, lh = _listWrapOn(text[:l], w - dw, None)
# Workaround for Issue 180
text[l].wrap(w - dw, h - lh - dh)
l2 = text[l].split(w - dw, h - lh - dh)
if l2 == []: # Not splittable, push some to next page
if l == 0: # Can't fit anything, push all to next page
return l2
# We reduce the number of items we keep on the
# page for two reasons:
# 1) If an item is associated with the following
# item (getKeepWithNext() == True), we have
# to back up to a previous one.
# 2) If we miscalculated the size required on
# the first page (I dunno why, probably not
# counting borders properly, but we do
# miscalculate occasionally). Seems to
# have to do with nested tables, so it might
# be the extra space on the border on the
# inner table.
while l > 0:
if not text[l - 1].getKeepWithNext():
first_t = Table(
[[bullet, text[:l]]],
colWidths=self.colWidths,
style=self.style,
)
_w, _h = first_t.wrap(w, h)
if _h <= h:
break
l -= 1
if l > 0:
# Workaround for Issue 180 with wordaxe:
# if wordaxe is not None:
# l3=[Table([
# [bullet,
# text[:l]]
# ],
# colWidths=self.colWidths,
# style=self.style),
# Table([['',text[l:]]],
# colWidths=self.colWidths,
# style=self.style)]
# else:
l3 = [
first_t,
SplitTable(
[['', text[l:]]],
colWidths=self.colWidths,
style=self.style,
padding=self.padding,
),
]
else: # Everything flows
l3 = []
else:
l3 = [
Table(
[[bullet, text[:l] + [l2[0]]]],
colWidths=self.colWidths,
rowHeights=[h],
style=self.style,
)
]
if l2[1:] + text[l + 1 :]:
l3.append(
SplitTable(
[['', l2[1:] + text[l + 1 :]]],
colWidths=self.colWidths,
style=self.style,
padding=self.padding,
)
)
return l3
log.debug("Can't split splittable")
return self.t.split(w, h)
else:
return DelayedTable.split(self, w, h)
class MySpacer(Spacer):
def wrap(self, aW, aH):
w, h = Spacer.wrap(self, aW, aH)
self.height = min(aH, h)
return w, self.height
class MyPageBreak(FrameActionFlowable):
def __init__(self, templateName=None, breakTo='any'):
"""templateName switches the page template starting in the
next page.
breakTo can be 'any' 'even' or 'odd'.
'even' will break one page if the current page is odd
or two pages if it's even. That way the next flowable
will be in an even page.
'odd' is the opposite of 'even'
'any' is the default, and means it will always break
only one page.
"""
self.templateName = templateName
self.breakTo = breakTo
self.forced = False
self.extraContent = []
def frameAction(self, frame):
frame._generated_content = []
if self.breakTo == 'any': # Break only once. None if at top of page
if not frame._atTop:
frame._generated_content.append(SetNextTemplate(self.templateName))
frame._generated_content.append(PageBreak())
elif self.breakTo == 'odd': # Break once if on even page, twice
# on odd page, none if on top of odd page
if frame._pagenum % 2: # odd pageNum
if not frame._atTop:
# Blank pages get no heading or footer
frame._generated_content.append(SetNextTemplate(self.templateName))
frame._generated_content.append(SetNextTemplate('emptyPage'))
frame._generated_content.append(PageBreak())
frame._generated_content.append(ResetNextTemplate())
frame._generated_content.append(PageBreak())
else: # even
frame._generated_content.append(SetNextTemplate(self.templateName))
frame._generated_content.append(PageBreak())
elif self.breakTo == 'even': # Break once if on odd page, twice
# on even page, none if on top of even page
if frame._pagenum % 2: # odd pageNum
frame._generated_content.append(SetNextTemplate(self.templateName))
frame._generated_content.append(PageBreak())
else: # even
if not frame._atTop:
# Blank pages get no heading or footer
frame._generated_content.append(SetNextTemplate(self.templateName))
frame._generated_content.append(SetNextTemplate('emptyPage'))
frame._generated_content.append(PageBreak())
frame._generated_content.append(ResetNextTemplate())
frame._generated_content.append(PageBreak())
class SetNextTemplate(Flowable):
"""Set canv.templateName when drawing.
rst2pdf uses that to switch page templates.
"""
def __init__(self, templateName=None):
self.templateName = templateName
Flowable.__init__(self)
def draw(self):
if self.templateName:
try:
self.canv.oldTemplateName = self.canv.templateName
except Exception:
self.canv.oldTemplateName = 'oneColumn'
self.canv.templateName = self.templateName
class ResetNextTemplate(Flowable):
"""Go back to the previous template.
rst2pdf uses that to switch page templates back when
temporarily it needed to switch to another template.
For example, after a OddPageBreak, there can be a totally
blank page. Those have to use coverPage as a template,
because they must not have headers or footers.
And then we need to switch back to whatever was used.
"""
def __init__(self):
Flowable.__init__(self)
def draw(self):
self.canv.templateName, self.canv.oldTemplateName = (
self.canv.oldTemplateName,
self.canv.templateName,
)
def wrap(self, aW, aH):
return 0, 0
class TextAnnotation(Flowable):
"""Add text annotation flowable"""
def __init__(self, *args):
self.annotationText = ""
self.position = [-1, -1, -1, -1]
if len(args) >= 1:
self.annotationText = args[0].lstrip('"').rstrip('"')
if len(args) >= 5:
self.position = args[1:]
def wrap(self, w, h):
return 0, 0
def draw(self):
# Format of Reportlab's textAnnotation():
# textAnnotation("Your content", Rect=[x_begin, y_begin, x_end, y_end], relative=1)
self.canv.textAnnotation(self.annotationText, self.position, 1)
class Transition(Flowable):
"""Wrap canvas.setPageTransition.
Sets the transition effect from the current page to the next.
"""
PageTransitionEffects = dict(
Split=['direction', 'motion'],
Blinds=['dimension'],
Box=['motion'],
Wipe=['direction'],
Dissolve=[],
Glitter=['direction'],
)
def __init__(self, *args):
if len(args) < 1:
args = [None, 1] # No transition
# See if we got a valid transition effect name
if args[0] not in self.PageTransitionEffects:
log.error('Unknown transition effect name: %s' % args[0])
args[0] = None
elif len(args) == 1:
args.append(1)
# FIXME: validate more
self.args = args
def wrap(self, aw, ah):
return 0, 0
def draw(self):
kwargs = dict(
effectname=None, duration=1, direction=0, dimension='H', motion='I'
)
ceff = ['effectname', 'duration'] + self.PageTransitionEffects[self.args[0]]
for argname, argvalue in zip(ceff, self.args):
kwargs[argname] = argvalue
kwargs['duration'] = int(kwargs['duration'])
kwargs['direction'] = int(kwargs['direction'])
self.canv.setPageTransition(**kwargs)
class SmartFrame(Frame):
"""A (Hopefully) smarter frame object.
This frame object knows how to handle a two-pass
layout procedure (someday).
"""
def __init__(
self,
container,
x1,
y1,
width,
height,
leftPadding=6,
bottomPadding=6,
rightPadding=6,
topPadding=6,
id=None,
showBoundary=0,
overlapAttachedSpace=None,
_debug=None,
):
self.container = container
self.onSidebar = False
self.__s = '[%s, %s, %s, %s, %s, %s, %s, %s,]' % (
x1,
y1,
width,
height,
leftPadding,
bottomPadding,
rightPadding,
topPadding,
)
Frame.__init__(
self,
x1,
y1,
width,
height,
leftPadding,
bottomPadding,
rightPadding,
topPadding,
id,
showBoundary,
overlapAttachedSpace,
_debug,
)
def add(self, flowable, canv, trySplit=0):
flowable._atTop = self._atTop
return Frame.add(self, flowable, canv, trySplit)
def __repr__(self):
return self.__s
def __deepcopy__(self, *whatever):
return copy(self)
class FrameCutter(FrameActionFlowable):
def __init__(self, dx, width, flowable, padding, lpad, floatLeft=True):
self.width = width
self.dx = dx
self.f = flowable
self.padding = padding
self.lpad = lpad
self.floatLeft = floatLeft
def frameAction(self, frame):
idx = frame.container.frames.index(frame)
if self.floatLeft:
# Don't bother inserting a silly thin frame
if self.width - self.padding > 30:
f1 = SmartFrame(
frame.container,
frame._x1 + self.dx - 2 * self.padding,
frame._y2 - self.f.height - 3 * self.padding,
self.width + 2 * self.padding,
self.f.height + 3 * self.padding,
bottomPadding=0,
topPadding=0,
leftPadding=self.lpad,
)
f1._atTop = frame._atTop
# This is a frame next to a sidebar.
f1.onSidebar = True
frame.container.frames.insert(idx + 1, f1)
# Don't add silly thin frame
if frame._height - self.f.height - 2 * self.padding > 30:
frame.container.frames.insert(
idx + 2,
SmartFrame(
frame.container,
frame._x1,
frame._y1p,
self.width + self.dx,
frame._height - self.f.height - 3 * self.padding,
topPadding=0,
),
)
else:
# Don't bother inserting a silly thin frame
if self.width - self.padding > 30:
f1 = SmartFrame(
frame.container,
frame._x1 - self.width,
frame._y2 - self.f.height - 2 * self.padding,
self.width,
self.f.height + 2 * self.padding,
bottomPadding=0,
topPadding=0,
rightPadding=self.lpad,
)
f1._atTop = frame._atTop
# This is a frame next to a sidebar.
f1.onSidebar = True
frame.container.frames.insert(idx + 1, f1)
if frame._height - self.f.height - 2 * self.padding > 30:
frame.container.frames.insert(
idx + 2,
SmartFrame(
frame.container,
frame._x1 - self.width,
frame._y1p,
self.width + self.dx,
frame._height - self.f.height - 2 * self.padding,
topPadding=0,
),
)
class Sidebar(FrameActionFlowable):
def __init__(self, flowables, style):
self.style = style
self.width = self.style.width
self.flowables = flowables
def frameAction(self, frame):
if self.style.float not in ('left', 'right'):
return
if frame.onSidebar: # We are still on the frame next to a sidebar!
frame._generated_content = [FrameBreak(), self]
else:
w = frame.container.styles.adjustUnits(self.width, frame.width)
idx = frame.container.frames.index(frame)
padding = self.style.borderPadding
width = self.style.width
self.style.padding = frame.container.styles.adjustUnits(
str(padding), frame.width
)
self.style.width = frame.container.styles.adjustUnits(
str(width), frame.width
)
self.kif = BoxedContainer(self.flowables, self.style)
if self.style.float == 'left':
self.style.lpad = frame.leftPadding
f1 = SmartFrame(
frame.container,
frame._x1,
frame._y1p,
w - 2 * self.style.padding,
frame._y - frame._y1p,
leftPadding=self.style.lpad,
rightPadding=0,
bottomPadding=0,
topPadding=0,
)
f1._atTop = frame._atTop
frame.container.frames.insert(idx + 1, f1)
frame._generated_content = [
FrameBreak(),
self.kif,
FrameCutter(
w,
frame.width - w,
self.kif,
padding,
self.style.lpad,
True,
),
FrameBreak(),
]
elif self.style.float == 'right':
self.style.lpad = frame.rightPadding
frame.container.frames.insert(
idx + 1,
SmartFrame(
frame.container,
frame._x1 + frame.width - self.style.width,
frame._y1p,
w,
frame._y - frame._y1p,
rightPadding=self.style.lpad,
leftPadding=0,
bottomPadding=0,
topPadding=0,
),
)
frame._generated_content = [
FrameBreak(),
self.kif,
FrameCutter(
w,
frame.width - w,
self.kif,
padding,
self.style.lpad,
False,
),
FrameBreak(),
]
class BoundByWidth(Flowable):
"""Limit a list of flowables by width.
This still lets the flowables break over pages and frames.
"""
def __init__(self, maxWidth, content=[], style=None, mode=None, scale=None):
self.maxWidth = maxWidth
self.content = content
self.style = style
self.mode = mode
self.pad = None
self.scale = scale
Flowable.__init__(self)
def border_padding(self, useWidth, additional):
sdict = self.style
sdict = sdict.__dict__ or {}
bp = sdict.get("borderPadding", 0)
if useWidth:
additional += sdict.get("borderWidth", 0)
if not isinstance(bp, list):
bp = [bp] * 4
return [x + additional for x in bp]
def identity(self, maxLen=None):
return "<%s at %s%s%s> containing: %s" % (
self.__class__.__name__,
hex(id(self)),
self._frameName(),
getattr(self, 'name', '')
and (' name="%s"' % getattr(self, 'name', ''))
or '',
repr([c.identity() for c in self.content])[:80],
)
def wrap(self, availWidth, availHeight):
"""If we need more width than we have, complain, keep a scale"""
self.pad = self.border_padding(True, 0.1)
maxWidth = float(
min(
styles.adjustUnits(self.maxWidth, availWidth) or availWidth,
availWidth,
)
)
self.maxWidth = maxWidth
maxWidth -= self.pad[1] + self.pad[3]
self.width, self.height = _listWrapOn(
self.content, maxWidth, None, fakeWidth=False
)
if self.width > maxWidth:
if self.mode != 'shrink':
self.scale = 1.0
log.warning(
"BoundByWidth too wide to fit in frame (%s > %s): %s",
self.width,
maxWidth,
self.identity(),
)
if self.mode == 'shrink' and not self.scale:
self.scale = (maxWidth + self.pad[1] + self.pad[3]) / (
self.width + self.pad[1] + self.pad[3]
)
else:
self.scale = 1.0
self.height *= self.scale
self.width *= self.scale
return (
self.width,
self.height + (self.pad[0] + self.pad[2]) * self.scale,
)
def split(self, availWidth, availHeight):
if not self.pad:
self.wrap(availWidth, availHeight)
content = self.content
if len(self.content) == 1:
# We need to split the only element we have
content = content[0].split(
availWidth - (self.pad[1] + self.pad[3]),
availHeight - (self.pad[0] + self.pad[2]),
)
result = [
BoundByWidth(self.maxWidth, [f], self.style, self.mode, self.scale)
for f in content
]
return result
def draw(self):
"""we simulate being added to a frame"""
canv = self.canv
canv.saveState()
x = canv._x
y = canv._y
_sW = 0
scale = self.scale
content = None
# , canv, x, y, _sW=0, scale=1.0, content=None, aW=None):
pS = 0
aW = self.width
aW = scale * (aW + _sW)
if content is None:
content = self.content
y += (self.height + self.pad[2]) / scale
x += self.pad[3]
for c in content:
w, h = c.wrapOn(canv, aW, 0xFFFFFFF)
if (w < _FUZZ or h < _FUZZ) and not getattr(c, '_ZEROSIZE', None):
continue
if c is not content[0]:
h += max(c.getSpaceBefore() - pS, 0)
y -= h
canv.saveState()
if self.mode == 'shrink':
canv.scale(scale, scale)
elif self.mode == 'truncate':
p = canv.beginPath()
p.rect(
x - self.pad[3],
y - self.pad[2],
self.maxWidth,
self.height + self.pad[0] + self.pad[2],
)
canv.clipPath(p, stroke=0)
c.drawOn(canv, x, y, _sW=aW - w)
canv.restoreState()
if c is not content[-1]:
pS = c.getSpaceAfter()
y -= pS
canv.restoreState()
class BoxedContainer(BoundByWidth):
def __init__(self, content, style, mode='shrink'):
try:
w = style.width
except AttributeError:
w = '100%'
BoundByWidth.__init__(self, w, content, mode=mode, style=None)
self.style = style
self.mode = mode
def identity(self, maxLen=None):
return repr(
['BoxedContainer containing: ', [c.identity() for c in self.content]]
)[:80]
def draw(self):
canv = self.canv
canv.saveState()
x = canv._x
y = canv._y
lw = 0
if self.style and self.style.borderWidth > 0:
lw = self.style.borderWidth
canv.setLineWidth(self.style.borderWidth)
if self.style.borderColor: # This could be None :-(
canv.setStrokeColor(self.style.borderColor)
stroke = 1
else:
stroke = 0
else:
stroke = 0
if self.style and self.style.backColor:
canv.setFillColor(self.style.backColor)
fill = 1
else:
fill = 0
padding = self.border_padding(False, lw)
xpadding = padding[1] + padding[3]
ypadding = padding[0] + padding[2]
p = canv.beginPath()
p.rect(x, y, self.width + xpadding, self.height + ypadding)
canv.drawPath(p, stroke=stroke, fill=fill)
canv.restoreState()
BoundByWidth.draw(self)
def split(self, availWidth, availHeight):
self.wrap(availWidth, availHeight)
padding = (self.pad[1] + self.pad[3]) * self.scale
if self.height + padding <= availHeight:
return [self]
else:
# Try to figure out how many elements
# we can put in the available space
candidate = None
remainder = None
for p in range(1, len(self.content)):
b = BoxedContainer(self.content[:p], self.style, self.mode)
w, h = b.wrap(availWidth, availHeight)
if h < availHeight:
candidate = b
if self.content[p:]:
remainder = BoxedContainer(
self.content[p:], self.style, self.mode
)
else:
break
if not candidate or not remainder: # Nothing fits, break page
return []
if not remainder: # Everything fits?
return [self]
return [candidate, remainder]
class MyTableOfContents(TableOfContents):
"""
Subclass of reportlab.platypus.tableofcontents.TableOfContents
which supports hyperlinks to corresponding sections.
"""
def __init__(self, *args, **kwargs):
# The parent argument is to define the locality of
# the TOC. If it's none, it's a global TOC and
# any heading it's notified about is accepted.
# If it's a node, then the heading needs to be "inside"
# that node. This can be figured out because
# the heading flowable keeps a reference to the title
# node it was creatd from.
#
# Yes, this is gross.
self.parent = kwargs.pop('parent')
TableOfContents.__init__(self, *args, **kwargs)
# reference ids for which this TOC should be notified
self.refids = []
# revese lookup table from (level, text) to refid
self.refid_lut = {}
self.linkColor = "#0000ff"
def notify(self, kind, stuff):
# stuff includes (level, text, pagenum, label)
level, text, pageNum, label, node = stuff
rlabel = '-'.join(label.split('-')[:-1])
def islocal(_node):
"""See if this node is "local enough" for this TOC.
This is for Issue 196"""
if self.parent is None:
return True
while _node.parent:
if _node.parent == self.parent:
return True
_node = _node.parent
return False
if rlabel in self.refids and islocal(node):
self.addEntry(level, text, pageNum)
self.refid_lut[(level, text, pageNum)] = label
def wrap(self, availWidth, availHeight):
"""Adds hyperlink to toc entry."""
widths = (availWidth - self.rightColumnWidth, self.rightColumnWidth)
# makes an internal table which does all the work.
# we draw the LAST RUN's entries! If there are
# none, we make some dummy data to keep the table
# from complaining
if len(self._lastEntries) == 0:
_tempEntries = [(0, 'Placeholder for table of contents', 0, None)]
else:
_tempEntries = self._lastEntries
if _tempEntries:
base_level = _tempEntries[0][0]
else:
base_level = 0
tableData = []
for entry in _tempEntries:
level, text, pageNum = entry[:3]
left_col_level = level - base_level
leftColStyle = self.getLevelStyle(left_col_level)
label = self.refid_lut.get((level, text, pageNum), None)
if label:
pre = u'<a href="#%s" color="%s">' % (label, self.linkColor)
post = u'</a>'
if isinstance(text, bytes):
text = text.decode('utf-8')
text = pre + text + post
else:
pre = ''
post = ''
# right col style is right aligned
rightColStyle = ParagraphStyle(
name='leftColLevel%d' % left_col_level,
parent=leftColStyle,
leftIndent=0,
alignment=TA_RIGHT,
)
leftPara = Paragraph(text, leftColStyle)
rightPara = Paragraph(pre + str(pageNum) + post, rightColStyle)
tableData.append([leftPara, rightPara])
self._table = Table(tableData, colWidths=widths, style=self.tableStyle)
self.width, self.height = self._table.wrapOn(self.canv, availWidth, availHeight)
return self.width, self.height
def split(self, aW, aH):
# Make sure _table exists before splitting.
# This was only triggered in rare cases using sphinx.
if not self._table:
self.wrap(aW, aH)
return TableOfContents.split(self, aW, aH)
def isSatisfied(self):
if self._entries == self._lastEntries:
log.debug('Table Of Contents is stable')
return True
else:
if len(self._entries) != len(self._lastEntries):
log.info(
'Number of items in TOC changed '
'from %d to %d, not satisfied'
% (len(self._lastEntries), len(self._entries))
)
return False
log.info('TOC entries that moved in this pass:')
for i in range(len(self._entries)):
if self._entries[i] != self._lastEntries[i]:
log.info(str(self._entries[i]))
log.info(str(self._lastEntries[i]))
return False
| mit | 4,092,100,191,561,448,400 | 33.077739 | 91 | 0.495697 | false |
cyyber/QRL | src/qrl/core/VoteStats.py | 1 | 11427 | from pyqrllib.pyqrllib import bin2hstr, QRLHelper
from qrl.generated import qrl_pb2
from qrl.core.misc import logger
from qrl.core.StateContainer import StateContainer
from qrl.core.PaginatedData import PaginatedData
from qrl.core.txs.multisig.MultiSigVote import MultiSigVote
from qrl.core.State import State
class VoteStats:
def __init__(self, protobuf_block=None):
self._data = protobuf_block
if protobuf_block is None:
self._data = qrl_pb2.VoteStats()
def pbdata(self):
return self._data
def is_active(self, current_block_number) -> bool:
return not self.executed and current_block_number <= self.expiry_block_number
@property
def multi_sig_address(self):
return self._data.multi_sig_address
@property
def expiry_block_number(self):
return self._data.expiry_block_number
@property
def shared_key(self):
return self._data.shared_key
@property
def signatories(self):
return self._data.signatories
@property
def tx_hashes(self):
return self._data.tx_hashes
@property
def unvotes(self):
return self._data.unvotes
@property
def total_weight(self):
return self._data.total_weight
@property
def executed(self):
return self._data.executed
def update_total_weight(self, value, subtract):
if subtract:
self._data.total_weight -= value
else:
self._data.total_weight += value
def get_address_index(self, address: bytes):
for i in range(len(self.signatories)):
if address == self.signatories[i]:
return i
return -1
def get_unvote_by_address(self, address) -> [bool, int]:
i = self.get_address_index(address)
if i != -1:
return self.unvotes[i], i
return False, -1
def get_vote_tx_hash_by_signatory_address(self, address):
i = self.get_address_index(address)
return self.tx_hashes[i]
def apply_vote_stats(self,
tx: MultiSigVote,
weight: int,
state_container: StateContainer) -> bool:
if state_container.block_number > self.expiry_block_number:
return False
i = self.get_address_index(tx.addr_from)
if i == -1:
return False
if tx.unvote == self.unvotes[i]:
return False
self._data.tx_hashes[i] = tx.txhash
if tx.unvote:
self._data.total_weight -= weight
else:
self._data.total_weight += weight
self._data.unvotes[i] = tx.unvote
multi_sig_spend = state_container.multi_sig_spend_txs[self.shared_key]
threshold = state_container.addresses_state[self.multi_sig_address].threshold
# TODO: return bool response of apply function
self.apply(state_container,
multi_sig_spend,
state_container.addresses_state,
state_container.paginated_tx_hash,
state_container.block_number,
threshold)
return True
def revert_vote_stats(self,
tx: MultiSigVote,
weight: int,
state_container: StateContainer) -> bool:
if state_container.block_number > self.expiry_block_number:
return False
i = self.get_address_index(tx.addr_from)
if i == -1:
return False
if tx.unvote != self.unvotes[i]:
return False
if self._data.tx_hashes[i] != tx.txhash:
return False
multi_sig_spend = state_container.multi_sig_spend_txs[self.shared_key]
threshold = state_container.addresses_state[self.multi_sig_address].threshold
self.revert(state_container,
multi_sig_spend,
state_container.addresses_state,
state_container.paginated_tx_hash,
state_container.block_number,
threshold)
self._data.tx_hashes[i] = tx.prev_tx_hash
if tx.unvote:
self._data.total_weight += weight
else:
self._data.total_weight -= weight
self._data.unvotes[i] = not tx.unvote
return True
@staticmethod
def create(multi_sig_address: bytes,
shared_key: bytes,
signatories: bytes,
expiry_block_number: int):
vote_stats = VoteStats()
vote_stats._data.multi_sig_address = multi_sig_address
vote_stats._data.shared_key = shared_key
vote_stats._data.expiry_block_number = expiry_block_number
for signatory in signatories:
vote_stats._data.signatories.append(signatory)
vote_stats._data.tx_hashes.append(b'')
vote_stats._data.unvotes.append(True)
return vote_stats
def apply(self,
state_container,
multi_sig_spend,
addresses_state: dict,
paginated_tx_hash: PaginatedData,
current_block_number: int,
threshold: int) -> bool:
# TODO: return False if executed
if self.executed:
return True
if self.total_weight < threshold:
return False
if current_block_number > self.expiry_block_number:
return False
if multi_sig_spend.total_amount > addresses_state[self.multi_sig_address].balance:
logger.info("[VoteStats] Insufficient funds to execute Multi Sig Spend")
logger.info("Multi Sig Spend Amount: %s, Funds Available: %s",
multi_sig_spend.total_amount,
addresses_state[self.multi_sig_address].balance)
logger.info("Multi Sig Spend txn hash: %s", bin2hstr(multi_sig_spend.txhash))
logger.info("Multi Sig Address: %s", bin2hstr(multi_sig_spend.multi_sig_address))
return False
addresses_state[self.multi_sig_address].update_balance(state_container,
multi_sig_spend.total_amount,
subtract=True)
addr_from_pk = bytes(QRLHelper.getAddress(multi_sig_spend.PK))
for index in range(0, len(multi_sig_spend.addrs_to)):
addr_to = multi_sig_spend.addrs_to[index]
address_state = addresses_state[addr_to]
if addr_to not in (multi_sig_spend.addr_from, addr_from_pk):
paginated_tx_hash.insert(address_state, multi_sig_spend.txhash)
address_state.update_balance(state_container, multi_sig_spend.amounts[index])
self._data.executed = True
return True
def revert(self,
state_container,
multi_sig_spend,
addresses_state: dict,
paginated_tx_hash: PaginatedData,
current_block_number: int,
threshold: int) -> bool:
if not self.executed:
return True
if self.total_weight < threshold:
return False
if current_block_number > self.expiry_block_number:
return False
addresses_state[self.multi_sig_address].update_balance(state_container, multi_sig_spend.total_amount)
addr_from_pk = bytes(QRLHelper.getAddress(multi_sig_spend.PK))
for index in range(0, len(multi_sig_spend.addrs_to)):
addr_to = multi_sig_spend.addrs_to[index]
address_state = addresses_state[addr_to]
if addr_to not in (multi_sig_spend.addr_from, addr_from_pk):
paginated_tx_hash.insert(address_state, multi_sig_spend.txhash)
address_state.update_balance(state_container, multi_sig_spend.amounts[index], subtract=True)
self._data.executed = False
return True
def serialize(self):
return self._data.SerializeToString()
@staticmethod
def deserialize(data):
pbdata = qrl_pb2.VoteStats()
pbdata.ParseFromString(bytes(data))
return VoteStats(pbdata)
def put_state(self, state: State, batch):
try:
state._db.put_raw(b'shared_key_' + self.shared_key, self.serialize(), batch)
except Exception as e:
raise Exception("[put_state] Exception in VoteStats %s", e)
@staticmethod
def delete_state(state: State, shared_key: bytes, batch):
try:
state._db.delete(b'shared_key_' + shared_key, batch)
except Exception as e:
raise Exception("[delete_state] Exception in VoteStats %s", e)
@staticmethod
def get_state(state: State, shared_key):
try:
data = state._db.get_raw(b'shared_key_' + shared_key)
return VoteStats.deserialize(data)
except KeyError:
logger.debug('[get_state] VoteStats %s not found', bin2hstr(shared_key).encode())
except Exception as e:
logger.error('[get_state] %s', e)
return None
# @staticmethod
# def apply_and_put(state: State,
# state_container: StateContainer):
# for key in state_container.votes_stats:
# vote_stats = state_container.votes_stats[key]
# multi_sig_spend = state_container.multi_sig_spend_txs[vote_stats.shared_key]
# threshold = state_container.addresses_state[vote_stats.multi_sig_address].threshold
#
# vote_stats.apply(state_container,
# multi_sig_spend,
# state_container.addresses_state,
# state_container.paginated_tx_hash,
# state_container.block_number,
# threshold)
# vote_stats.put_state(state, state_container.batch)
#
# return True
#
# @staticmethod
# def revert_and_put(state: State,
# state_container: StateContainer):
# for key in state_container.votes_stats:
# vote_stats = state_container.votes_stats[key]
# multi_sig_spend = state_container.multi_sig_spend_txs[vote_stats.shared_key]
# threshold = state_container.addresses_state[vote_stats.multi_sig_address].threshold
#
# vote_stats.revert(state_container,
# multi_sig_spend,
# state_container.addresses_state,
# state_container.paginated_tx_hash,
# state_container.block_number,
# threshold)
# vote_stats.put_state(state, state_container.batch)
#
# return True
@staticmethod
def put_all(state: State,
state_container: StateContainer):
for key in state_container.votes_stats:
vote_stats = state_container.votes_stats[key]
vote_stats.put_state(state, state_container.batch)
return True
@staticmethod
def revert_all(state: State,
state_container: StateContainer):
for key in state_container.votes_stats:
vote_stats = state_container.votes_stats[key]
vote_stats.put_state(state, state_container.batch)
return True
| mit | 6,609,360,542,137,994,000 | 33.838415 | 109 | 0.577404 | false |
mazvv/travelcrm | travelcrm/views/suppliers.py | 1 | 8210 | # -*-coding: utf-8-*-
import logging
from pyramid.view import view_config, view_defaults
from pyramid.response import Response
from pyramid.httpexceptions import HTTPFound
from . import BaseView
from ..models import DBSession
from ..models.resource import Resource
from ..models.supplier import Supplier
from ..lib.bl.subscriptions import subscribe_resource
from ..lib.utils.common_utils import translate as _
from ..lib.helpers.fields import suppliers_combogrid_field
from ..forms.suppliers import (
SupplierForm,
SupplierSearchForm,
SupplierAssignForm,
)
from ..lib.events.resources import (
ResourceCreated,
ResourceChanged,
ResourceDeleted,
)
log = logging.getLogger(__name__)
@view_defaults(
context='..resources.suppliers.SuppliersResource',
)
class SuppliersView(BaseView):
@view_config(
request_method='GET',
renderer='travelcrm:templates/suppliers/index.mako',
permission='view'
)
def index(self):
return {
'title': self._get_title(),
}
@view_config(
name='list',
xhr='True',
request_method='POST',
renderer='json',
permission='view'
)
def list(self):
form = SupplierSearchForm(self.request, self.context)
form.validate()
qb = form.submit()
return {
'total': qb.get_count(),
'rows': qb.get_serialized()
}
@view_config(
name='view',
request_method='GET',
renderer='travelcrm:templates/suppliers/form.mako',
permission='view'
)
def view(self):
if self.request.params.get('rid'):
resource_id = self.request.params.get('rid')
supplier = Supplier.by_resource_id(resource_id)
return HTTPFound(
location=self.request.resource_url(
self.context, 'view', query={'id': supplier.id}
)
)
result = self.edit()
result.update({
'title': self._get_title(_(u'View')),
'readonly': True,
})
return result
@view_config(
name='add',
request_method='GET',
renderer='travelcrm:templates/suppliers/form.mako',
permission='add'
)
def add(self):
return {
'title': self._get_title(_(u'Add')),
}
@view_config(
name='add',
request_method='POST',
renderer='json',
permission='add'
)
def _add(self):
form = SupplierForm(self.request)
if form.validate():
supplier = form.submit()
DBSession.add(supplier)
DBSession.flush()
event = ResourceCreated(self.request, supplier)
event.registry()
return {
'success_message': _(u'Saved'),
'response': supplier.id
}
else:
return {
'error_message': _(u'Please, check errors'),
'errors': form.errors
}
@view_config(
name='edit',
request_method='GET',
renderer='travelcrm:templates/suppliers/form.mako',
permission='edit'
)
def edit(self):
supplier = Supplier.get(self.request.params.get('id'))
return {
'item': supplier,
'title': self._get_title(_(u'Edit')),
}
@view_config(
name='edit',
request_method='POST',
renderer='json',
permission='edit'
)
def _edit(self):
supplier = Supplier.get(self.request.params.get('id'))
form = SupplierForm(self.request)
if form.validate():
form.submit(supplier)
event = ResourceChanged(self.request, supplier)
event.registry()
return {
'success_message': _(u'Saved'),
'response': supplier.id,
}
else:
return {
'error_message': _(u'Please, check errors'),
'errors': form.errors
}
@view_config(
name='copy',
request_method='GET',
renderer='travelcrm:templates/suppliers/form.mako',
permission='add'
)
def copy(self):
supplier = Supplier.get_copy(self.request.params.get('id'))
return {
'action': self.request.path_url,
'item': supplier,
'title': self._get_title(_(u'Copy')),
}
@view_config(
name='copy',
request_method='POST',
renderer='json',
permission='add'
)
def _copy(self):
return self._add()
@view_config(
name='details',
request_method='GET',
renderer='travelcrm:templates/suppliers/details.mako',
permission='view'
)
def details(self):
supplier = Supplier.get(self.request.params.get('id'))
return {
'item': supplier,
}
@view_config(
name='delete',
request_method='GET',
renderer='travelcrm:templates/suppliers/delete.mako',
permission='delete'
)
def delete(self):
return {
'title': self._get_title(_(u'Delete')),
'id': self.request.params.get('id')
}
@view_config(
name='delete',
request_method='POST',
renderer='json',
permission='delete'
)
def _delete(self):
errors = False
ids = self.request.params.getall('id')
if ids:
try:
items = DBSession.query(Supplier).filter(
Supplier.id.in_(ids)
)
for item in items:
DBSession.delete(item)
event = ResourceDeleted(self.request, item)
event.registry()
DBSession.flush()
except:
errors=True
DBSession.rollback()
if errors:
return {
'error_message': _(
u'Some objects could not be delete'
),
}
return {'success_message': _(u'Deleted')}
@view_config(
name='assign',
request_method='GET',
renderer='travelcrm:templates/suppliers/assign.mako',
permission='assign'
)
def assign(self):
return {
'id': self.request.params.get('id'),
'title': self._get_title(_(u'Assign Maintainer')),
}
@view_config(
name='assign',
request_method='POST',
renderer='json',
permission='assign'
)
def _assign(self):
form = SupplierAssignForm(self.request)
if form.validate():
form.submit(self.request.params.getall('id'))
return {
'success_message': _(u'Assigned'),
}
else:
return {
'error_message': _(u'Please, check errors'),
'errors': form.errors
}
@view_config(
name='combobox',
request_method='POST',
permission='view'
)
def _combobox(self):
value = None
resource = Resource.get(self.request.params.get('resource_id'))
if resource:
value = resource.supplier.id
return Response(
suppliers_combogrid_field(
self.request, self.request.params.get('name'), value
)
)
@view_config(
name='subscribe',
request_method='GET',
renderer='travelcrm:templates/suppliers/subscribe.mako',
permission='view'
)
def subscribe(self):
return {
'id': self.request.params.get('id'),
'title': self._get_title(_(u'Subscribe')),
}
@view_config(
name='subscribe',
request_method='POST',
renderer='json',
permission='view'
)
def _subscribe(self):
ids = self.request.params.getall('id')
for id in ids:
supplier = Supplier.get(id)
subscribe_resource(self.request, supplier.resource)
return {
'success_message': _(u'Subscribed'),
}
| gpl-3.0 | -3,106,804,341,338,659,300 | 26.006579 | 71 | 0.521803 | false |
nt/code-jam-ruby | gcj_submit_solution.py | 1 | 14079 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file implements the main function for the output submitter, which uses
the OutputSubmitter class in the lib directory."""
import optparse
import os
import re
import sys
from lib import code_jam_login
from lib import constants
from lib import contest_manager
from lib import data_manager
from lib import error
from lib import google_login
from lib import output_submitter
from lib import user_status
from lib import utils
def main():
"""Main function for the output submitter script.
This script receives three positional arguments, the problem letter, the
input size and the submit id.
"""
try:
# Create an option parser and use it to parse the supplied arguments.
program_version = 'GCJ solution submitter {0}'.format(
constants.VERSION)
parser = optparse.OptionParser(usage='%prog [options] problem input id',
version=program_version)
parser.add_option('-l', '--login', action='store_true', dest='renew_cookie',
help='Ignore the stored cookie and log in again')
parser.add_option('-p', '--passwd', action='store', dest='password',
help=('Password used to log in. You will be prompted for '
'a password if one is required and this flag is '
'left empty and there is no password in the '
'configuration files'))
parser.add_option('-f', '--force', action='store_true', dest='force',
help=('Skip check to verify if there is a running timer '
'and there is no submission if the input is large'))
parser.add_option('-d', '--data-directory', action='store',
dest='data_directory',
help=('Directory with the I/O files and main source '
'files [default: ./source]'))
parser.add_option('-o', '--output-name', action='store', dest='output_name',
help='Name of the file with the solution\'s output')
parser.add_option('-a', '--add-source', action='append',
dest='extra_sources',
help='Add EXTRA_SOURCE to the submitted source files',
metavar='EXTRA_SOURCE')
parser.add_option('-z', '--zip-sources', action='store_true',
dest='zip_sources',
help=('Put the source files into a zip file before '
'submitting'))
parser.add_option('--ignore-zip', action='store_true', dest='ignore_zip',
help=('Ignore source zip files not specified directly '
'using the -a option'))
parser.add_option('--ignore-default-source', action='store_true',
dest='ignore_def_source',
help=('Ignore files in the default source directory, '
'except for those specified using the -a option'))
parser.add_option('--gzip-content', action='store_true',
dest='gzip_content',
help=('Send the output and source code using gzip '
'encoding (faster)'))
parser.add_option('--nogzip-content', action='store_false',
dest='gzip_content',
help=('Send the output and sources using plain encoding '
'(slower)'))
parser.set_defaults(renew_login=False, force=False, gzip_content=True,
zip_sources=False, ignore_zip=False,
ignore_def_source=False)
options, args = parser.parse_args()
# Check that the number of arguments is valid.
if len(args) != 3:
raise error.OptionError('need 3 positional arguments')
# Check that the problem idenfier is valid.
problem_letter = args[0].upper()
if len(problem_letter) != 1 or not problem_letter.isupper():
raise error.OptionError(
'invalid problem {0}, must be one uppercase letter'.format(
problem_letter))
# Check that the submit id is a valid identifier.
id = args[2]
if not re.match('^\w+$', id):
raise error.OptionError('invalid id {0}, can only have numbers, letters '
'and underscores'.format(id))
# Check that the contest has been initialized.
if not contest_manager.IsInitialized():
raise error.ConfigurationError(
'Contest is not initialized, please initialize the contest before '
'trying to download input files.\n')
# Read user and input information from the config file.
try:
current_config = data_manager.ReadData()
host = current_config['host']
user = current_config['user']
input_spec = current_config['input_spec']
except KeyError as e:
raise error.ConfigurationError(
'Cannot find all required user data in the configuration files: {0}. '
'Please fill the missing fields in the user configuration '
'file.\n'.format(e))
# Read current contest information from the config file.
try:
middleware_tokens = current_config['middleware_tokens']
cookie = None if options.renew_cookie else current_config['cookie']
contest_id = current_config['contest_id']
problems = current_config['problems']
except KeyError as e:
raise error.ConfigurationError(
'Cannot find all required contest data in configuration files: {0}. '
'Reinitializing the contest might solve this error.\n'.format(e))
# Check that the input type is valid.
input_type = args[1].lower()
if input_type not in input_spec:
raise error.OptionError('invalid input type {0}, must be one of '
'({1})'.format(input_type, ','.join(input_spec)))
# Get the needed middleware tokens to submit solutions and check for running
# attempts.
try:
get_initial_values_token = middleware_tokens['GetInitialValues']
user_status_token = middleware_tokens['GetUserStatus']
submit_output_token = middleware_tokens['SubmitAnswer']
except KeyError as e:
raise error.ConfigurationError(
'Cannot find {0} token in configuration file. Reinitializing the '
'contest might solve this error.\n'.format(e))
# Calculate the problem index and check if it is inside the range.
problem_index = ord(problem_letter) - ord('A')
if problem_index < 0 or problem_index >= len(problems):
raise error.UserError('Cannot find problem {0}, there are only {1} '
'problem(s).\n'.format(problem_letter,
len(problems)))
# Get the problem specification and the input id from the configuration.
problem = problems[problem_index]
try:
input_id = input_spec[input_type]['input_id']
except KeyError:
raise error.ConfigurationError('Input specification for "{1}" has no '
'input_id.\n'.format(input_type))
# Get the data directory from the options, if not defined, get it from the
# configuration, using './source' as the default value if not found. In the
# same way, get the output filename format and the main source code filename
# format.
data_directory = (options.data_directory or
current_config.get('data_directory', './source'))
output_name_format = (options.output_name or
current_config.get('output_name_format',
'{problem}-{input}-{id}.out'))
source_names_format = current_config.get('source_names_format')
# There is no sensible default for the main source, so exit with error if no
# value is found and it wasn't ignored.
if not options.ignore_def_source and not source_names_format:
raise error.UserError(
'No format found for the default sources file name. Specify '
'"source_name_format" in the configuration file or ignore it passing '
'--ignore-default-source.\n')
# Generate the output file name using the specified format and then return.
try:
output_basename = output_name_format.format(
problem=problem_letter, input=input_type, id=id)
output_filename = os.path.normpath(os.path.join(data_directory,
output_basename))
except KeyError as e:
raise error.ConfigurationError(
'Invalid output name format {0}, {1} is an invalid key, only use '
'"problem", "input" and "id".\n'.format(input_name_format, e))
# Create the list with all the source files and add the default source file
# if it was requested.
source_names = []
if not options.ignore_def_source:
try:
# Process each source name in the source formats list.
for source_name_format in source_names_format:
# Generate the source file name using the specified format and append
# it to the source list.
def_source_basename = source_name_format.format(
problem=problem_letter, input=input_type, id=id)
def_source_filename = os.path.normpath(os.path.join(
data_directory, def_source_basename))
source_names.append(def_source_filename)
except KeyError as e:
raise error.ConfigurationError(
'Invalid output name format {0}, {1} is an invalid key, only '
'use "problem", "input" and "id".\n'.format(input_name_format, e))
# Append any extra source file to the source list, normalizing their paths
# for the current operative system.
if options.extra_sources is not None:
for extra_source_format in options.extra_sources:
extra_source_file = extra_source_format.format(problem=problem_letter,
input=input_type, id=id)
source_names.append(os.path.normpath(extra_source_file))
# Print message indicating that an output is going to be submitted.
print '-' * 79
print '{0} output for "{1} - {2}" at "{3}"'.format(
input_type.capitalize(), problem_letter, problem['name'], output_filename)
print '-' * 79
# Renew the cookie if the user requested a new login or the cookie has
# expired.
if google_login.CookieHasExpired(cookie):
print 'Cookie has expired, logging into the Code Jam servers...'
cookie = None
if not cookie or options.renew_cookie:
cookie = code_jam_login.Login(options.password)
# Get the contest status and check if it is accepting submissions.
contest_status = contest_manager.GetContestStatus(
host, cookie, get_initial_values_token, contest_id)
if not options.force and not contest_manager.CanSubmit(contest_status):
raise error.UserError('Cannot submit solutions to this contest, its not '
'active or in practice mode.\n')
# All problem inputs have public answers in practice mode.
input_public = (input_spec[input_type]['public'] or
contest_status == contest_manager.PRACTICE)
# Get the user status and check if it is participating or not.
input_index = utils.GetIndexFromInputId(input_spec, input_id)
current_user_status = user_status.GetUserStatus(
host, cookie, user_status_token, contest_id, input_spec)
if (contest_status == contest_manager.ACTIVE and
current_user_status is not None):
# Check that there is a running timer for this problem input.
problem_inputs = current_user_status.problem_inputs
problem_input_state = problem_inputs[problem_index][input_index]
if not options.force and problem_input_state.current_attempt == -1:
raise error.UserError(
'You cannot submit {0}-{1}, the timer expired or you did not '
'download this input.\n'.format(problem_letter, input_type))
# Ask for confirmation if user is trying to resubmit a non-public output.
if not input_public and problem_input_state.submitted:
submit_message = ('You already have submitted an output for {0}-{1}. '
'Resubmitting will override the previous one.'.format(
problem_letter, input_type))
utils.AskConfirmationOrDie(submit_message, 'Submit', options.force)
print 'Submitting new output and source files.'
else:
print 'Submitting output and source files.'
else:
print 'Submitting practice output and source files.'
# Create the output submitter and send the files.
submitter = output_submitter.OutputSubmitter(
host, cookie, submit_output_token, contest_id, problem['id'])
submitter.Submit(input_id, output_filename, source_names, input_public,
gzip_body=options.gzip_content, zip_sources=options.zip_sources,
add_ignored_zips=not options.ignore_zip)
except error.OptionError as e:
parser.print_usage()
program_basename = os.path.basename(sys.argv[0])
sys.stderr.write('{0}: error: {1}\n'.format(program_basename, e))
sys.exit(1)
except error.UserError as e:
sys.stderr.write(str(e))
sys.exit(1)
except error.CommandlineError as e:
sys.stderr.write('{0}: {1}'.format(e.__class__.__name__, e))
sys.exit(1)
if __name__ == '__main__':
main()
| apache-2.0 | -2,430,356,063,935,323,600 | 45.3125 | 80 | 0.628525 | false |
scribblemaniac/RenderChan | renderchan/contrib/mp3.py | 1 | 2027 |
__author__ = 'Konstantin Dmitriev'
from renderchan.module import RenderChanModule
from renderchan.utils import which
import subprocess
import os
import re
import random
class RenderChanMp3Module(RenderChanModule):
def __init__(self):
RenderChanModule.__init__(self)
if os.name == 'nt':
self.conf['binary']=os.path.join(os.path.dirname(__file__),"..\\..\\..\\packages\\mpg123\\mpg123.exe")
self.conf['sox_binary']=os.path.join(os.path.dirname(__file__),"..\\..\\..\\packages\\sox\\sox.exe")
else:
self.conf['binary']="mpg123"
self.conf['sox_binary']="sox"
self.conf["packetSize"]=0
def getInputFormats(self):
return ["mp3"]
def getOutputFormats(self):
return ["wav"]
def checkRequirements(self):
if which(self.conf['binary']) == None:
self.active=False
print("Module warning (%s): Cannot find '%s' executable." % (self.getName(), self.conf['binary']))
print(" Please install mpg123 package.")
return False
if which(self.conf['sox_binary']) == None:
self.active=False
print("Module warning (%s): Cannot find '%s' executable!" % (self.getName(), self.conf['sox_binary']))
print(" Please install sox package.")
return False
self.active=True
return True
def render(self, filename, outputPath, startFrame, endFrame, format, updateCompletion, extraParams={}):
comp = 0.0
updateCompletion(comp)
random_string = "%08d" % (random.randint(0,99999999))
tmpfile=outputPath+"."+random_string
# TODO: Progress callback
commandline=[self.conf['binary'], "-w", tmpfile, filename]
subprocess.check_call(commandline)
commandline=[self.conf['sox_binary'], tmpfile, outputPath, "rate", "-v", extraParams["audio_rate"]]
subprocess.check_call(commandline)
os.remove(tmpfile)
updateCompletion(1.0)
| bsd-3-clause | -5,324,836,554,297,819,000 | 32.229508 | 114 | 0.601381 | false |
flosse/hello-xmpp | python/sleekxmpp/client.py | 1 | 1260 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
import sleekxmpp
# Python versions before 3.0 do not use UTF-8 encoding
# by default. To ensure that Unicode is handled properly
# throughout SleekXMPP, we will set the default encoding
# ourselves to UTF-8.
if sys.version_info < (3, 0):
from sleekxmpp.util.misc_ops import setdefaultencoding
setdefaultencoding('utf8')
else:
raw_input = input
if __name__ == '__main__':
# check arguments
if len(sys.argv) < 5:
print("Usage: python client.py <my-jid> <my-password> <my-text> <jid1>")
sys.exit(1)
# Setup logging.
logging.basicConfig(level="DEBUG", format='%(levelname)-8s %(message)s')
to = sys.argv[4]
msg = sys.argv[3]
# create a new xmpp client
xmpp = sleekxmpp.ClientXMPP(sys.argv[1], sys.argv[2])
# define a handler function
def onStart(ev):
print("connected")
xmpp.send_message(mto=to, mbody=msg, mtype='chat')
xmpp.disconnect(wait=True)
# add the function
xmpp.add_event_handler("session_start", onStart)
# connect to the XMPP server
if xmpp.connect():
xmpp.process(block=True)
print("Disconnected")
else:
print("Unable to connect.")
| mit | -9,118,855,012,749,246,000 | 25.25 | 80 | 0.642063 | false |
tell-k/pypi-updates | tests/test_bot.py | 1 | 11603 | # -*- coding: utf-8 -*-
"""
unit test for PypiUpdatesBot
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:author: tell-k <[email protected]>
:copyright: tell-k. All Rights Reserved.
"""
import mock
import pytest
import logbook
class DummyMemcache(object):
def __init__(self):
self._data = {}
def set(self, key, value):
self._data.update({key: value})
def get(self, key):
return self._data.get(key)
class DummyTweepyAPI(object):
def update_status(self, message):
pass
class TestPypiUpdatesBot(object):
def _get_target_class(self):
from pypi_updates import PypiUpdatesBot
return PypiUpdatesBot
def _make_one(self, *args, **kwargs):
return self._get_target_class()(*args, **kwargs)
def test_tweepy_api(self):
target_obj = self._make_one()
assert target_obj.tweepy_api is not None
# same instance check
assert target_obj.tweepy_api is target_obj.tweepy_api
@mock.patch('pypi_updates.bot.pylibmc.Client',
return_value=DummyMemcache())
def test_memcache(self, mock_memcache):
target_obj = self._make_one()
assert target_obj.memcache is not None
# same instance check
assert target_obj.memcache is target_obj.memcache
@mock.patch('pypi_updates.bot.feedparser.parse', return_value=None)
def test_canot_parse_feed(self, mock):
from pypi_updates.bot import RSS_URL
target_obj = self._make_one()
update_status = target_obj.funcs[0]['options']['callback']
with logbook.TestHandler() as log_handler:
update_status(target_obj)
expected = [
'[WARNING] [kuroko user]: Cannot parse RSS: {}'.format(RSS_URL)
]
assert log_handler.formatted_records == expected
mock.assert_called_with(RSS_URL)
@mock.patch('pypi_updates.bot.feedparser.parse',
return_value={'items': []})
def test_canot_parse_items(self, mock):
from pypi_updates.bot import RSS_URL
target_obj = self._make_one()
update_status = target_obj.funcs[0]['options']['callback']
with logbook.TestHandler() as log_handler:
update_status(target_obj)
assert log_handler.formatted_records == [
'[WARNING] [kuroko user]: Cannot parse RSS: {}'.format(RSS_URL)
]
mock.assert_called_with(RSS_URL)
@mock.patch('pypi_updates.bot.pylibmc.Client', return_value=DummyMemcache())
@mock.patch('pypi_updates.bot.tweepy.API', return_value=DummyTweepyAPI())
def test_update_status(self, mock_memcache, mock_tweepy):
from pypi_updates.bot import RSS_URL
target_obj = self._make_one()
update_status = target_obj.funcs[0]['options']['callback']
dummy_feed = {
'items': [
{
'title': 'dummy',
'link': 'http://example.com/1/',
'description': 'dummydesc',
'published': '09 Oct 2014 15:31:26 GMT'
},
{
'title': 'dummy2',
'link': 'http://example.com/2/',
'description': 'dummydesc2',
'published': '09 Oct 2014 15:18:59 GMT'
},
]
}
m_parse = mock.patch('pypi_updates.bot.feedparser.parse',
return_value=dummy_feed)
with logbook.TestHandler() as log_handler, m_parse as m:
update_status(target_obj)
assert log_handler.formatted_records == [
u'[INFO] [kuroko user]: latest_published => 20141009151858',
u'[INFO] [kuroko user]: dummy http://example.com/1/',
u'[INFO] [kuroko user]: dummy2 http://example.com/2/',
]
m.assert_called_with(RSS_URL)
assert target_obj.memcache.get('latest_published') == '20141009153126'
@mock.patch('pypi_updates.bot.tweepy.API', return_value=DummyTweepyAPI())
def test_already_set_latest_published(self, mock_tweepy):
from pypi_updates.bot import RSS_URL
target_obj = self._make_one()
update_status = target_obj.funcs[0]['options']['callback']
dummy_feed = {
'items': [
{
'title': 'dummy',
'link': 'http://example.com/1/',
'description': 'dummydesc',
'published': '09 Oct 2014 15:31:26 GMT'
},
{
'title': 'dummy2',
'link': 'http://example.com/2/',
'description': 'dummydesc2',
'published': '09 Oct 2014 15:18:59 GMT'
},
]
}
dummy_memcache = DummyMemcache()
dummy_memcache.set('latest_published', '20141009151859')
with logbook.TestHandler() as log_handler,\
mock.patch('pypi_updates.bot.feedparser.parse',
return_value=dummy_feed) as m,\
mock.patch('pypi_updates.bot.pylibmc.Client',
return_value=dummy_memcache):
update_status(target_obj)
assert log_handler.formatted_records == [
u'[INFO] [kuroko user]: latest_published => 20141009151859',
u'[INFO] [kuroko user]: dummy http://example.com/1/',
]
m.assert_called_with(RSS_URL)
assert target_obj.memcache.get('latest_published') == '20141009153126'
def test_skip_old_tweet(self):
from pypi_updates.bot import RSS_URL
target_obj = self._make_one()
update_status = target_obj.funcs[0]['options']['callback']
dummy_feed = {
'items': [
{
'title': 'dummy',
'link': 'http://example.com/1/',
'description': 'dummydesc',
'published': '09 Oct 2014 15:31:26 GMT'
},
]
}
dummy_memcache = DummyMemcache()
dummy_memcache.set('latest_published', '20141009153126')
with logbook.TestHandler() as log_handler,\
mock.patch('pypi_updates.bot.feedparser.parse',
return_value=dummy_feed) as m,\
mock.patch('pypi_updates.bot.pylibmc.Client',
return_value=dummy_memcache):
update_status(target_obj)
assert log_handler.formatted_records == [
u'[INFO] [kuroko user]: latest_published => 20141009153126',
]
m.assert_called_with(RSS_URL)
assert target_obj.memcache.get('latest_published') == '20141009153126'
@mock.patch('pypi_updates.bot.pylibmc.Client', return_value=DummyMemcache())
@mock.patch('pypi_updates.bot.tweepy.API', return_value=DummyTweepyAPI())
def test_tweet_over_length(self, mock_memcache, mock_tweepy):
from pypi_updates.bot import RSS_URL
target_obj = self._make_one()
update_status = target_obj.funcs[0]['options']['callback']
dummy_feed = {
'items': [
{
'title': 'a' * 109, # truncate
'link': 'http://example.com/1/',
'description': 'a' * 126, # truncate
'published': '09 Oct 2014 15:31:26 GMT'
},
{
'title': 'a' * 108, # not truncate
'link': 'http://example.com/2/',
'description': 'a' * 125, # not truncate
'published': '09 Oct 2014 15:18:59 GMT'
},
]
}
with logbook.TestHandler() as log_handler,\
mock.patch('pypi_updates.bot.feedparser.parse',
return_value=dummy_feed) as m:
update_status(target_obj)
assert log_handler.formatted_records == [
u'[INFO] [kuroko user]: latest_published => 20141009151858',
u'[INFO] [kuroko user]: {}... http://example.com/1/'.format(u'a' * 105),
u'[INFO] [kuroko user]: {} http://example.com/2/'.format(u'a' * 108),
]
m.assert_called_with(RSS_URL)
assert target_obj.memcache.get('latest_published') == '20141009153126'
@mock.patch('pypi_updates.bot.pylibmc.Client', return_value=DummyMemcache())
def test_raise_tweepy_error(self, mock_memcache):
from pypi_updates.bot import RSS_URL
target_obj = self._make_one()
update_status = target_obj.funcs[0]['options']['callback']
dummy_feed = {
'items': [
{
'title': 'dummy',
'link': 'http://example.com/1/',
'description': 'dummydesc',
'published': '09 Oct 2014 15:31:26 GMT'
},
]
}
def _update_status_error(message):
import tweepy
raise tweepy.TweepError(reason='error')
dummy_tweepy_api = DummyTweepyAPI()
dummy_tweepy_api.update_status = _update_status_error
with logbook.TestHandler() as log_handler,\
mock.patch('pypi_updates.bot.feedparser.parse',
return_value=dummy_feed) as m,\
mock.patch('pypi_updates.bot.tweepy.API',
return_value=dummy_tweepy_api):
update_status(target_obj)
assert log_handler.formatted_records == [
u'[INFO] [kuroko user]: latest_published => 20141009153125',
u'[INFO] [kuroko user]: dummy http://example.com/1/',
u'[ERROR] [kuroko user]: error'
]
m.assert_called_with(RSS_URL)
assert target_obj.memcache.get('latest_published') == '20141009153125'
@mock.patch('pypi_updates.bot.pylibmc.Client',
return_value=DummyMemcache())
@mock.patch('pypi_updates.bot.tweepy.API', return_value=DummyTweepyAPI())
def test_multibyte_language(self, mock_memcache, mock_tweepy):
from pypi_updates.bot import RSS_URL
target_obj = self._make_one()
update_status = target_obj.funcs[0]['options']['callback']
dummy_feed = {
'items': [
{
'title': u'是假的數據',
'link': 'http://example.com/1/',
'description': u'是假的數據',
'published': '09 Oct 2014 15:31:26 GMT'
},
]
}
m_parse = mock.patch('pypi_updates.bot.feedparser.parse',
return_value=dummy_feed)
with logbook.TestHandler() as log_handler, m_parse as m:
update_status(target_obj)
assert log_handler.formatted_records == [
u'[INFO] [kuroko user]: latest_published => 20141009153125',
u'[INFO] [kuroko user]: 是假的數據 http://example.com/1/',
]
m.assert_called_with(RSS_URL)
assert target_obj.memcache.get('latest_published') == '20141009153126'
class TestIsValidMessage(object):
def _call_fut(self, msg):
from pypi_updates.bot import is_valid_message
return is_valid_message(msg)
@pytest.mark.parametrize('msg', [
'new pypi packages',
])
def test_valid_case(self, msg):
assert self._call_fut(msg)
@pytest.mark.parametrize('msg', [
'kissanime',
'new kissanime',
])
def test_invalid_case(self, msg):
assert not self._call_fut(msg)
| mit | 1,637,907,340,961,301,500 | 34.719136 | 84 | 0.536421 | false |
ALSchwalm/python-prompt-toolkit | prompt_toolkit/buffer.py | 1 | 40824 | """
Data structures for the Buffer.
It holds the text, cursor position, history, etc...
"""
from __future__ import unicode_literals
from .completion import Completer, Completion, CompleteEvent
from .document import Document
from .enums import IncrementalSearchDirection
from .history import History, InMemoryHistory
from .selection import SelectionType, SelectionState
from .utils import Callback
from .validation import ValidationError
from .clipboard import ClipboardData
from .filters import Never, to_simple_filter
from .search_state import SearchState
import os
import six
import subprocess
import tempfile
__all__ = (
'EditReadOnlyBuffer',
'AcceptAction',
'Buffer',
'indent',
'unindent',
)
class EditReadOnlyBuffer(Exception):
" Attempt editing of read-only buffer. "
class AcceptAction(object):
"""
What to do when the input is accepted by the user.
(When Enter was pressed in the command line.)
:param handler: (optional) A callable which accepts a CLI and `Document'
that is called when the user accepts input.
:param render_cli_done: When using a handler, first render the CLI in the
'done' state, then call the handler. This
"""
def __init__(self, handler=None):
assert handler is None or callable(handler)
self.handler = handler
@classmethod
def run_in_terminal(cls, handler, render_cli_done=False):
"""
Create an `AcceptAction` that runs the given handler in the terminal.
:param render_cli_done: When True, render the interface in the 'Done'
state first, then execute the function. If False, erase the
interface instead.
"""
def _handler(cli, buffer):
cli.run_in_terminal(lambda: handler(cli, buffer), render_cli_done=render_cli_done)
return AcceptAction(handler=_handler)
@property
def is_returnable(self):
"""
True when there is something handling accept.
"""
return bool(self.handler)
def validate_and_handle(self, cli, buffer):
"""
Validate buffer and handle the accept action.
"""
if buffer.validate():
if self.handler:
self.handler(cli, buffer)
buffer.append_to_history()
def _return_document_handler(cli, buffer):
cli.set_return_value(buffer.document)
AcceptAction.RETURN_DOCUMENT = AcceptAction(_return_document_handler)
AcceptAction.IGNORE = AcceptAction(handler=None)
class CompletionState(object):
"""
Immutable class that contains a completion state.
"""
def __init__(self, original_document, current_completions=None, complete_index=None):
#: Document as it was when the completion started.
self.original_document = original_document
#: List of all the current Completion instances which are possible at
#: this point.
self.current_completions = current_completions or []
#: Position in the `current_completions` array.
#: This can be `None` to indicate "no completion", the original text.
self.complete_index = complete_index # Position in the `_completions` array.
def __repr__(self):
return '%s(%r, <%r> completions, index=%r)' % (
self.__class__.__name__,
self.original_document, len(self.current_completions), self.complete_index)
def go_to_index(self, index):
"""
Create a new CompletionState object with the new index.
"""
return CompletionState(self.original_document, self.current_completions, complete_index=index)
def new_text_and_position(self):
"""
Return (new_text, new_cursor_position) for this completion.
"""
if self.complete_index is None:
return self.original_document.text, self.original_document.cursor_position
else:
original_text_before_cursor = self.original_document.text_before_cursor
original_text_after_cursor = self.original_document.text_after_cursor
c = self.current_completions[self.complete_index]
if c.start_position == 0:
before = original_text_before_cursor
else:
before = original_text_before_cursor[:c.start_position]
new_text = before + c.text + original_text_after_cursor
new_cursor_position = len(before) + len(c.text)
return new_text, new_cursor_position
@property
def current_completion(self):
"""
Return the current completion, or return `None` when no completion is
selected.
"""
if self.complete_index is not None:
return self.current_completions[self.complete_index]
class Buffer(object):
"""
The core data structure that holds the text and cursor position of the
current input line and implements all text manupulations on top of it. It
also implements the history, undo stack and the completion state.
:attr completer : :class:`~prompt_toolkit.completion.Completer` instance.
:attr history: :class:`~prompt_toolkit.history.History` instance.
:attr callbacks: :class:`~.Callbacks` instance.
:attr tempfile_suffix: Suffix to be appended to the tempfile for the 'open
in editor' function.
:attr is_multiline: SimpleFilter to indicate whether we should consider
this buffer a multiline input. If so, key bindings can
decide to insert newlines when pressing [Enter].
(Instead of accepting the input.)
:param complete_while_typing: Filter instance. Decide whether or not to do
asynchronous autocompleting while typing.
:param on_text_changed: Callback instance or None.
:param on_text_insert: Callback instance or None.
:param on_cursor_position_changed: Callback instance or None.
:param enable_history_search: SimpleFilter to indicate when up-arrow partial
string matching is enabled. It is adviced to not enable this at the
same time as `complete_while_typing`, because when there is an
autocompletion found, the up arrows usually browse through the
completions, rather than through the history.
"""
def __init__(self, completer=None, history=None, validator=None, tempfile_suffix='',
is_multiline=Never(), complete_while_typing=Never(),
enable_history_search=Never(), initial_document=None,
accept_action=AcceptAction.RETURN_DOCUMENT, read_only=False,
on_text_changed=None, on_text_insert=None, on_cursor_position_changed=None):
# Accept both filters and booleans as input.
enable_history_search = to_simple_filter(enable_history_search)
is_multiline = to_simple_filter(is_multiline)
complete_while_typing = to_simple_filter(complete_while_typing)
read_only = to_simple_filter(read_only)
# Validate input.
assert completer is None or isinstance(completer, Completer)
assert history is None or isinstance(history, History)
assert on_text_changed is None or isinstance(on_text_changed, Callback)
assert on_text_insert is None or isinstance(on_text_insert, Callback)
assert on_cursor_position_changed is None or isinstance(on_cursor_position_changed, Callback)
self.completer = completer
self.validator = validator
self.tempfile_suffix = tempfile_suffix
self.accept_action = accept_action
# Filters. (Usually, used by the key bindings to drive the buffer.)
self.is_multiline = is_multiline
self.complete_while_typing = complete_while_typing
self.enable_history_search = enable_history_search
self.read_only = read_only
#: The command buffer history.
# Note that we shouldn't use a lazy 'or' here. bool(history) could be
# False when empty.
self.history = InMemoryHistory() if history is None else history
self.__cursor_position = 0
# Events
self.on_text_changed = on_text_changed or Callback()
self.on_text_insert = on_text_insert or Callback()
self.on_cursor_position_changed = on_cursor_position_changed or Callback()
self.reset(initial_document=initial_document)
def reset(self, initial_document=None, append_to_history=False):
"""
:param append_to_history: Append current input to history first.
"""
assert initial_document is None or isinstance(initial_document, Document)
if append_to_history:
self.append_to_history()
initial_document = initial_document or Document()
self.__cursor_position = initial_document.cursor_position
# `ValidationError` instance. (Will be set when the input is wrong.)
self.validation_error = None
# State of the selection.
self.selection_state = None
# State of complete browser
self.complete_state = None # For interactive completion through Ctrl-N/Ctrl-P.
# The history search text. (Used for filtering the history when we
# browse through it.)
self.history_search_text = None
# Undo/redo stacks
self._undo_stack = [] # Stack of (text, cursor_position)
self._redo_stack = []
#: The working lines. Similar to history, except that this can be
#: modified. The user can press arrow_up and edit previous entries.
#: Ctrl-C should reset this, and copy the whole history back in here.
#: Enter should process the current command and append to the real
#: history.
self._working_lines = self.history.strings[:]
self._working_lines.append(initial_document.text)
self.__working_index = len(self._working_lines) - 1
# <getters/setters>
def _set_text(self, value):
""" set text at current working_index. Return whether it changed. """
original_value = self._working_lines[self.working_index]
self._working_lines[self.working_index] = value
return value != original_value
def _set_cursor_position(self, value):
""" Set cursor position. Return whether it changed. """
original_position = self.__cursor_position
self.__cursor_position = max(0, value)
return value != original_position
@property
def text(self):
return self._working_lines[self.working_index]
@text.setter
def text(self, value):
"""
Setting text. (When doing this, make sure that the cursor_position is
valid for this text. text/cursor_position should be consistent at any time,
otherwise set a Document instead.)
"""
assert isinstance(value, six.text_type), 'Got %r' % value
assert self.cursor_position <= len(value)
# Don't allow editing of read-only buffers.
if self.read_only():
raise EditReadOnlyBuffer()
changed = self._set_text(value)
if changed:
self._text_changed()
# Reset history search text.
self.history_search_text = None
@property
def cursor_position(self):
return self.__cursor_position
@cursor_position.setter
def cursor_position(self, value):
"""
Setting cursor position.
"""
assert isinstance(value, int)
assert value <= len(self.text)
changed = self._set_cursor_position(value)
if changed:
self._cursor_position_changed()
@property
def working_index(self):
return self.__working_index
@working_index.setter
def working_index(self, value):
if self.__working_index != value:
self.__working_index = value
self._text_changed()
def _text_changed(self):
# Remove any validation errors and complete state.
self.validation_error = None
self.complete_state = None
self.selection_state = None
# fire 'on_text_changed' event.
self.on_text_changed.fire()
def _cursor_position_changed(self):
# Remove any validation errors and complete state.
self.validation_error = None
self.complete_state = None
# Note that the cursor position can change if we have a selection the
# new position of the cursor determines the end of the selection.
# fire 'on_cursor_position_changed' event.
self.on_cursor_position_changed.fire()
@property
def document(self):
"""
Return :class:`Document` instance from the current text and cursor
position.
"""
return Document(self.text, self.cursor_position, selection=self.selection_state)
@document.setter
def document(self, value):
"""
Set :class:`Document` instance.
This will set both the text and cursor position at the same time, but
atomically. (Change events will be triggered only after both have been set.)
"""
assert isinstance(value, Document)
# Don't allow editing of read-only buffers.
if self.read_only():
raise EditReadOnlyBuffer()
# Set text and cursor position first.
text_changed = self._set_text(value.text)
cursor_position_changed = self._set_cursor_position(value.cursor_position)
# Now handle change events. (We do this when text/cursor position is
# both set and consistent.)
if text_changed:
self._text_changed()
if cursor_position_changed:
self._cursor_position_changed()
# End of <getters/setters>
def save_to_undo_stack(self, clear_redo_stack=True):
"""
Safe current state (input text and cursor position), so that we can
restore it by calling undo.
"""
# Safe if the text is different from the text at the top of the stack
# is different. If the text is the same, just update the cursor position.
if self._undo_stack and self._undo_stack[-1][0] == self.text:
self._undo_stack[-1] = (self._undo_stack[-1][0], self.cursor_position)
else:
self._undo_stack.append((self.text, self.cursor_position))
# Saving anything to the undo stack, clears the redo stack.
if clear_redo_stack:
self._redo_stack = []
def transform_lines(self, line_index_iterator, transform_callback):
"""
Transforms the text on a range of lines.
When the iterator yield an index not in the range of lines that the
document contains, it skips them silently.
To uppercase some lines::
new_text = transform_lines(range(5,10), lambda text: text.upper())
:param line_index_iterator: Iterator of line numbers (int)
:param transform_callback: callable that takes the original text of a
line, and return the new text for this line.
:returns: The new text.
"""
# Split lines
lines = self.text.split('\n')
# Apply transformation
for index in line_index_iterator:
try:
lines[index] = transform_callback(lines[index])
except IndexError:
pass
return '\n'.join(lines)
def transform_region(self, from_, to, transform_callback):
"""
Transform a part of the input string.
:param :from_: (int) start position.
:param :to: (int) end position.
:param :transform_callback: Callable which accepts a string and returns
the transformed string.
"""
assert from_ < to
self.text = ''.join([
self.text[:from_] +
transform_callback(self.text[from_:to]) +
self.text[to:]
])
def cursor_left(self, count=1):
self.cursor_position += self.document.get_cursor_left_position(count=count)
def cursor_right(self, count=1):
self.cursor_position += self.document.get_cursor_right_position(count=count)
def cursor_up(self, count=1):
""" (for multiline edit). Move cursor to the previous line. """
self.cursor_position += self.document.get_cursor_up_position(count=count)
def cursor_down(self, count=1):
""" (for multiline edit). Move cursor to the next line. """
self.cursor_position += self.document.get_cursor_down_position(count=count)
def auto_up(self, count=1):
"""
If we're not on the first line (of a multiline input) go a line up,
otherwise go back in history. (If nothing is selected.)
"""
if self.complete_state:
self.complete_previous(count=count)
elif self.document.cursor_position_row > 0:
self.cursor_position += self.document.get_cursor_up_position(count=count)
elif not self.selection_state:
self.history_backward(count=count)
def auto_down(self, count=1):
"""
If we're not on the last line (of a multiline input) go a line down,
otherwise go forward in history. (If nothing is selected.)
"""
if self.complete_state:
self.complete_next(count=count)
elif self.document.cursor_position_row < self.document.line_count - 1:
self.cursor_position += self.document.get_cursor_down_position(count=count)
elif not self.selection_state:
self.history_forward(count=count)
def delete_before_cursor(self, count=1):
"""
Delete character before cursor, return deleted character.
"""
assert count >= 0
deleted = ''
if self.cursor_position > 0:
deleted = self.text[self.cursor_position - count:self.cursor_position]
new_text = self.text[:self.cursor_position - count] + self.text[self.cursor_position:]
new_cursor_position = self.cursor_position - len(deleted)
# Set new Document atomically.
self.document = Document(new_text, new_cursor_position)
return deleted
def delete(self, count=1):
"""
Delete one character. Return deleted character.
"""
if self.cursor_position < len(self.text):
deleted = self.document.text_after_cursor[:count]
self.text = self.text[:self.cursor_position] + \
self.text[self.cursor_position + len(deleted):]
return deleted
else:
return ''
def join_next_line(self):
"""
Join the next line to the current one by deleting the line ending after
the current line.
"""
if not self.document.on_last_line:
self.cursor_position += self.document.get_end_of_line_position()
self.delete()
# Remove spaces.
self.text = (self.document.text_before_cursor + ' ' +
self.document.text_after_cursor.lstrip(' '))
def join_selected_lines(self):
"""
Join the selected lines.
"""
assert self.selection_state
# Get lines.
from_, to = self.document.selection_range()
before = self.text[:from_]
lines = self.text[from_:to].splitlines()
after = self.text[to:]
# Replace leading spaces with just one space.
lines = [l.lstrip(' ') + ' ' for l in lines]
# Set new document.
self.document = Document(text=before + ''.join(lines) + after,
cursor_position=len(before + ''.join(lines[:-1])) - 1)
def swap_characters_before_cursor(self):
"""
Swap the last two characters before the cursor.
"""
pos = self.cursor_position
if pos >= 2:
a = self.text[pos - 2]
b = self.text[pos - 1]
self.text = self.text[:pos-2] + b + a + self.text[pos:]
def go_to_history(self, index):
"""
Go to this item in the history.
"""
if index < len(self._working_lines):
self.working_index = index
self.cursor_position = len(self.text)
def complete_next(self, count=1):
"""
Browse to the next completions.
(Does nothing if there are no completion.)
"""
if self.complete_state:
completions_count = len(self.complete_state.current_completions)
if self.complete_state.complete_index is None:
index = 0
elif self.complete_state.complete_index == completions_count - 1:
index = None
else:
index = min(completions_count-1, self.complete_state.complete_index + count)
self._go_to_completion(index)
def complete_previous(self, count=1):
"""
Browse to the previous completions.
(Does nothing if there are no completion.)
"""
if self.complete_state:
if self.complete_state.complete_index == 0:
index = None
elif self.complete_state.complete_index is None:
index = len(self.complete_state.current_completions) - 1
else:
index = max(0, self.complete_state.complete_index - count)
self._go_to_completion(index)
def cancel_completion(self):
"""
Cancel completion, go back to the original text.
"""
if self.complete_state:
self._go_to_completion(None)
self.complete_state = None
def set_completions(self, completions, go_to_first=True, go_to_last=False):
"""
Start completions. (Generate list of completions and initialize.)
"""
assert not (go_to_first and go_to_last)
# Generate list of all completions.
if completions is None:
if self.completer:
completions = list(self.completer.get_completions(
self.document,
CompleteEvent(completion_requested=True)
))
else:
completions = []
# Set `complete_state`.
if completions:
self.complete_state = CompletionState(
original_document=self.document,
current_completions=completions)
if go_to_first:
self._go_to_completion(0)
elif go_to_last:
self._go_to_completion(len(completions) - 1)
else:
self._go_to_completion(None)
else:
self.complete_state = None
def start_history_lines_completion(self):
"""
Start a completion based on all the other lines in the document and the
history.
"""
found_completions = set()
completions = []
# For every line of the whole history, find matches with the current line.
current_line = self.document.current_line_before_cursor.lstrip()
for i, string in enumerate(self._working_lines):
for j, l in enumerate(string.split('\n')):
l = l.strip()
if l and l.startswith(current_line):
# When a new line has been found.
if l not in found_completions:
found_completions.add(l)
# Create completion.
if i == self.working_index:
display_meta = "Current, line %s" % (j+1)
else:
display_meta = "History %s, line %s" % (i+1, j+1)
completions.append(Completion(
l,
start_position=-len(current_line),
display_meta=display_meta))
self.set_completions(completions=completions[::-1])
def _go_to_completion(self, index):
"""
Select a completion from the list of current completions.
"""
assert self.complete_state
# Set new completion
state = self.complete_state.go_to_index(index)
# Set text/cursor position
new_text, new_cursor_position = state.new_text_and_position()
self.document = Document(new_text, new_cursor_position)
# (changing text/cursor position will unset complete_state.)
self.complete_state = state
def _set_history_search(self):
""" Set `history_search_text`. """
if self.enable_history_search():
if self.history_search_text is None:
self.history_search_text = self.text
else:
self.history_search_text = None
def _history_matches(self, i):
"""
True when the current entry matches the history search.
(when we don't have history search, it's also True.)
"""
return (self.history_search_text is None or
self._working_lines[i].startswith(self.history_search_text))
def history_forward(self, count=1):
"""
Move forwards through the history.
:param count: Amount of items to move forward.
:param history_search: When True, filter history using self.history_search_text.
"""
self._set_history_search()
# Go forward in history.
found_something = False
for i in range(self.working_index + 1, len(self._working_lines)):
if self._history_matches(i):
self.working_index = i
count -= 1
found_something = True
if count == 0:
break
# If we found an entry, move cursor to the end of the first line.
if found_something:
self.cursor_position = 0
self.cursor_position += self.document.get_end_of_line_position()
def history_backward(self, count=1):
"""
Move backwards through history.
"""
self._set_history_search()
# Go back in history.
found_something = False
for i in range(self.working_index - 1, -1, -1):
if self._history_matches(i):
self.working_index = i
count -= 1
found_something = True
if count == 0:
break
# If we move to another entry, move cursor to the end of the line.
if found_something:
self.cursor_position = len(self.text)
def start_selection(self, selection_type=SelectionType.CHARACTERS):
"""
Take the current cursor position as the start of this selection.
"""
self.selection_state = SelectionState(self.cursor_position, selection_type)
def copy_selection(self, _cut=False):
"""
Copy selected text and return :class:`ClipboardData` instance.
"""
if self.selection_state:
type = self.selection_state.type
# Take start and end of selection
from_, to = self.document.selection_range()
copied_text = self.text[from_:to]
# If cutting, remove the text and set the new cursor position.
if _cut:
self.document = Document(text=self.text[:from_] + self.text[to + 1:],
cursor_position=min(from_, to))
self.selection_state = None
return ClipboardData(copied_text, type)
else:
return ClipboardData('')
def cut_selection(self):
"""
Delete selected text and return :class:`ClipboardData` instance.
"""
return self.copy_selection(_cut=True)
def newline(self, copy_margin=True):
"""
Insert a line ending at the current position.
"""
if copy_margin:
self.insert_text('\n' + self.document.leading_whitespace_in_current_line)
else:
self.insert_text('\n')
def insert_line_above(self, copy_margin=True):
"""
Insert a new line above the current one.
"""
if copy_margin:
insert = self.document.leading_whitespace_in_current_line + '\n'
else:
insert = '\n'
self.cursor_position += self.document.get_start_of_line_position()
self.insert_text(insert)
self.cursor_position -= 1
def insert_line_below(self, copy_margin=True):
"""
Insert a new line below the current one.
"""
if copy_margin:
insert = '\n' + self.document.leading_whitespace_in_current_line
else:
insert = '\n'
self.cursor_position += self.document.get_end_of_line_position()
self.insert_text(insert)
def insert_text(self, data, overwrite=False, move_cursor=True, fire_event=True):
"""
Insert characters at cursor position.
:param fire_event: Fire `on_text_insert` event. This is mainly used to
trigger autocompletion while typing.
"""
# In insert/text mode.
if overwrite:
# Don't overwrite the newline itself. Just before the line ending, it should act like insert mode.
overwritten_text = self.text[self.cursor_position:self.cursor_position+len(data)]
if '\n' in overwritten_text:
overwritten_text = overwritten_text[:overwritten_text.find('\n')]
self.text = self.text[:self.cursor_position] + data + self.text[self.cursor_position+len(overwritten_text):]
else:
self.text = self.text[:self.cursor_position] + data + self.text[self.cursor_position:]
if move_cursor:
self.cursor_position += len(data)
# Fire 'on_text_insert' event.
if fire_event:
self.on_text_insert.fire()
def paste_clipboard_data(self, data, before=False, count=1):
"""
Insert the data from the clipboard.
"""
assert isinstance(data, ClipboardData)
if data.type == SelectionType.CHARACTERS:
if before:
self.insert_text(data.text * count)
else:
self.cursor_right()
self.insert_text(data.text * count, fire_event=False)
self.cursor_left()
elif data.type == SelectionType.LINES:
if before:
self.cursor_position += self.document.get_start_of_line_position(after_whitespace=False)
self.insert_text((data.text + '\n') * count, move_cursor=False)
else:
self.cursor_position += self.document.get_end_of_line_position()
self.insert_text(('\n' + data.text) * count, move_cursor=False, fire_event=False)
self.cursor_down()
self.cursor_position += self.document.get_start_of_line_position(after_whitespace=True)
def undo(self):
# Pop from the undo-stack until we find a text that if different from
# the current text. (The current logic of `save_to_undo_stack` will
# cause that the top of the undo stack is usually the same as the
# current text, so in that case we have to pop twice.)
while self._undo_stack:
text, pos = self._undo_stack.pop()
if text != self.text:
# Push current text to redo stack.
self._redo_stack.append((self.text, self.cursor_position))
# Set new text/cursor_position.
self.document = Document(text, cursor_position=pos)
break
def redo(self):
if self._redo_stack:
# Copy current state on undo stack.
self.save_to_undo_stack(clear_redo_stack=False)
# Pop state from redo stack.
text, pos = self._redo_stack.pop()
self.document = Document(text, cursor_position=pos)
def validate(self):
"""
Returns `True` if valid.
"""
self.validation_error = None
# Validate first. If not valid, set validation exception.
if self.validator:
try:
self.validator.validate(self.document)
except ValidationError as e:
# Set cursor position (don't allow invalid values.)
cursor_position = e.index
self.cursor_position = min(max(0, cursor_position), len(self.text))
self.validation_error = e
return False
return True
def append_to_history(self):
"""
Append the current input to the history.
(Only if valid input.)
"""
# Validate first. If not valid, set validation exception.
if not self.validate():
return
# Save at the tail of the history. (But don't if the last entry the
# history is already the same.)
if self.text and (not len(self.history) or self.history[-1] != self.text):
self.history.append(self.text)
def _search(self, search_state, include_current_position=False, count=1):
"""
Execute search. Return (working_index, cursor_position) tuple when this
search is applied. Returns `None` when this text cannot be found.
"""
assert isinstance(search_state, SearchState)
assert isinstance(count, int) and count > 0
text = search_state.text
direction = search_state.direction
ignore_case = search_state.ignore_case()
def search_once(working_index, document):
"""
Do search one time.
Return (working_index, document) or `None`
"""
if direction == IncrementalSearchDirection.FORWARD:
# Try find at the current input.
new_index = document.find(
text, include_current_position=include_current_position,
ignore_case=ignore_case)
if new_index is not None:
return (working_index,
Document(document.text, document.cursor_position + new_index))
else:
# No match, go forward in the history. (Include len+1 to wrap around.)
# (Here we should always include all cursor positions, because
# it's a different line.)
for i in range(working_index + 1, len(self._working_lines) + 1):
i %= len(self._working_lines)
document = Document(self._working_lines[i], 0)
new_index = document.find(text, include_current_position=True,
ignore_case=ignore_case)
if new_index is not None:
return (i, Document(document.text, new_index))
else:
# Try find at the current input.
new_index = document.find_backwards(
text, ignore_case=ignore_case)
if new_index is not None:
return (working_index,
Document(document.text, document.cursor_position + new_index))
else:
# No match, go back in the history. (Include -1 to wrap around.)
for i in range(working_index - 1, -2, -1):
i %= len(self._working_lines)
document = Document(self._working_lines[i], len(self._working_lines[i]))
new_index = document.find_backwards(
text, ignore_case=ignore_case)
if new_index is not None:
return (i, Document(document.text, len(document.text) + new_index))
# Do 'count' search iterations.
working_index = self.working_index
document = self.document
for i in range(count):
result = search_once(working_index, document)
if result is None:
return # Nothing found.
else:
working_index, document = result
return (working_index, document.cursor_position)
def document_for_search(self, search_state):
"""
Return a `Document` instance that has the text/cursor position for this
search, if we would apply it.
"""
search_result = self._search(search_state, include_current_position=True)
if search_result is None:
return self.document
else:
working_index, cursor_position = search_result
return Document(self._working_lines[working_index], cursor_position)
def apply_search(self, search_state, include_current_position=True, count=1):
"""
Return a `Document` instance that has the text/cursor position for this
search, if we would apply it.
"""
search_result = self._search(search_state,
include_current_position=include_current_position, count=count)
if search_result is not None:
working_index, cursor_position = search_result
self.working_index = working_index
self.cursor_position = cursor_position
def exit_selection(self):
self.selection_state = None
def open_in_editor(self, cli):
"""
Open code in editor.
:param cli: `CommandLineInterface` instance.
"""
if self.read_only():
raise EditReadOnlyBuffer()
# Write to temporary file
descriptor, filename = tempfile.mkstemp(self.tempfile_suffix)
os.write(descriptor, self.text.encode('utf-8'))
os.close(descriptor)
# Open in editor
# (We need to use `cli.run_in_terminal`, because not all editors go to
# the alternate screen buffer, and some could influence the cursor
# position.)
succes = cli.run_in_terminal(lambda: self._open_file_in_editor(filename))
# Read content again.
if succes:
with open(filename, 'rb') as f:
text = f.read().decode('utf-8')
# Drop trailing newline. (Editors are supposed to add it at the
# end, but we don't need it.)
if text.endswith('\n'):
text = text[:-1]
self.document = Document(
text=text,
cursor_position=len(text))
# Clean up temp file.
os.remove(filename)
def _open_file_in_editor(self, filename):
"""
Call editor executable.
Return True when we received a zero return code.
"""
# If the 'EDITOR' environment variable has been set, use that one.
# Otherwise, fall back to the first available editor that we can find.
editor = os.environ.get('EDITOR')
editors = [
editor,
# Order of preference.
'/usr/bin/editor',
'/usr/bin/nano',
'/usr/bin/pico',
'/usr/bin/vi',
'/usr/bin/emacs',
]
for e in editors:
if e:
try:
returncode = subprocess.call([e, filename])
return returncode == 0
except OSError:
# Executable does not exist, try the next one.
pass
return False
def indent(buffer, from_row, to_row, count=1):
"""
Indent text of the `Buffer` object.
"""
current_row = buffer.document.cursor_position_row
line_range = range(from_row, to_row)
# Apply transformation.
new_text = buffer.transform_lines(line_range, lambda l: ' ' * count + l)
buffer.document = Document(
new_text,
Document(new_text).translate_row_col_to_index(current_row, 0))
# Go to the start of the line.
buffer.cursor_position += buffer.document.get_start_of_line_position(after_whitespace=True)
def unindent(buffer, from_row, to_row, count=1):
"""
Unindent text of the `Buffer` object.
"""
current_row = buffer.document.cursor_position_row
line_range = range(from_row, to_row)
def transform(text):
remove = ' ' * count
if text.startswith(remove):
return text[len(remove):]
else:
return text.lstrip()
# Apply transformation.
new_text = buffer.transform_lines(line_range, transform)
buffer.document = Document(
new_text,
Document(new_text).translate_row_col_to_index(current_row, 0))
# Go to the start of the line.
buffer.cursor_position += buffer.document.get_start_of_line_position(after_whitespace=True)
| bsd-3-clause | -6,038,432,106,492,722,000 | 35.223602 | 120 | 0.588698 | false |
davidyack/Xrm.Tools.CRMWebAPI | python/setup.py | 1 | 1265 | #!/usr/bin/env python
'''The setup and build script for the python-crmwebapi library.'''
import os
from setuptools import setup, find_packages
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='xrm-tools-crmwebapi',
version='1.0',
author='Xrm.Tools',
author_email='',
license='MIT',
url='https://github.com/davidyack/Xrm.Tools.CRMWebAPI',
keywords='crmwebapi',
description='A Python version of CRMWebAPI',
long_description=(read('README.rst')),
packages=find_packages(exclude=['tests*']),
install_requires=['future', 'requests'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
)
| mit | -3,837,700,705,353,445,400 | 30.625 | 71 | 0.621344 | false |
facebookresearch/Detectron | tools/convert_coco_model_to_cityscapes.py | 1 | 4289 | #!/usr/bin/env python
# Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
# Convert a detection model trained for COCO into a model that can be fine-tuned
# on cityscapes
#
# cityscapes_to_coco
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import numpy as np
import os
import sys
import detectron.datasets.coco_to_cityscapes_id as cs
from detectron.utils.io import load_object
from detectron.utils.io import save_object
NUM_CS_CLS = 9
NUM_COCO_CLS = 81
def parse_args():
parser = argparse.ArgumentParser(
description='Convert a COCO pre-trained model for use with Cityscapes')
parser.add_argument(
'--coco_model', dest='coco_model_file_name',
help='Pretrained network weights file path',
default=None, type=str)
parser.add_argument(
'--convert_func', dest='convert_func',
help='Blob conversion function',
default='cityscapes_to_coco', type=str)
parser.add_argument(
'--output', dest='out_file_name',
help='Output file path',
default=None, type=str)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
return args
def convert_coco_blobs_to_cityscape_blobs(model_dict):
for k, v in model_dict['blobs'].items():
if v.shape[0] == NUM_COCO_CLS or v.shape[0] == 4 * NUM_COCO_CLS:
coco_blob = model_dict['blobs'][k]
print(
'Converting COCO blob {} with shape {}'.
format(k, coco_blob.shape)
)
cs_blob = convert_coco_blob_to_cityscapes_blob(
coco_blob, args.convert_func
)
print(' -> converted shape {}'.format(cs_blob.shape))
model_dict['blobs'][k] = cs_blob
def convert_coco_blob_to_cityscapes_blob(coco_blob, convert_func):
# coco blob (81, ...) or (81*4, ...)
coco_shape = coco_blob.shape
leading_factor = int(coco_shape[0] / NUM_COCO_CLS)
tail_shape = list(coco_shape[1:])
assert leading_factor == 1 or leading_factor == 4
# Reshape in [num_classes, ...] form for easier manipulations
coco_blob = coco_blob.reshape([NUM_COCO_CLS, -1] + tail_shape)
# Default initialization uses Gaussian with mean and std to match the
# existing parameters
std = coco_blob.std()
mean = coco_blob.mean()
cs_shape = [NUM_CS_CLS] + list(coco_blob.shape[1:])
cs_blob = (np.random.randn(*cs_shape) * std + mean).astype(np.float32)
# Replace random parameters with COCO parameters if class mapping exists
for i in range(NUM_CS_CLS):
coco_cls_id = getattr(cs, convert_func)(i)
if coco_cls_id >= 0: # otherwise ignore (rand init)
cs_blob[i] = coco_blob[coco_cls_id]
cs_shape = [NUM_CS_CLS * leading_factor] + tail_shape
return cs_blob.reshape(cs_shape)
def remove_momentum(model_dict):
for k in model_dict['blobs'].keys():
if k.endswith('_momentum'):
del model_dict['blobs'][k]
def load_and_convert_coco_model(args):
model_dict = load_object(args.coco_model_file_name)
remove_momentum(model_dict)
convert_coco_blobs_to_cityscape_blobs(model_dict)
return model_dict
if __name__ == '__main__':
args = parse_args()
print(args)
assert os.path.exists(args.coco_model_file_name), \
'Weights file does not exist'
weights = load_and_convert_coco_model(args)
save_object(weights, args.out_file_name)
print('Wrote blobs to {}:'.format(args.out_file_name))
print(sorted(weights['blobs'].keys()))
| apache-2.0 | -572,377,124,243,045,500 | 32.507813 | 80 | 0.639543 | false |
rousseab/pymatgen | pymatgen/io/abinitio/abiobjects.py | 1 | 42855 | # coding: utf-8
"""
Low-level objects providing an abstraction for the objects involved in the calculation.
"""
from __future__ import unicode_literals, division, print_function
import collections
import abc
import six
import numpy as np
import pymatgen.core.units as units
from pprint import pformat
from monty.design_patterns import singleton
from monty.collections import AttrDict
from pymatgen.core.design_patterns import Enum
from pymatgen.serializers.json_coders import PMGSONable, pmg_serialize
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
from monty.json import MontyEncoder, MontyDecoder
def contract(s):
"""
>>> assert contract("1 1 1 2 2 3") == "3*1 2*2 1*3"
>>> assert contract("1 1 3 2 3") == "2*1 1*3 1*2 1*3"
"""
if not s: return s
tokens = s.split()
old = tokens[0]
count = [[1, old]]
for t in tokens[1:]:
if t == old:
count[-1][0] += 1
else:
old = t
count.append([1, t])
return " ".join("%d*%s" % (c, t) for c, t in count)
class AbivarAble(six.with_metaclass(abc.ABCMeta, object)):
"""
An `AbivarAble` object provides a method `to_abivars`
that returns a dictionary with the abinit variables.
"""
@abc.abstractmethod
def to_abivars(self):
"""Returns a dictionary with the abinit variables."""
#@abc.abstractmethod
#def from_abivars(cls, vars):
# """Build the object from a dictionary with Abinit variables."""
def __str__(self):
return pformat(self.to_abivars(), indent=1, width=80, depth=None)
def __contains__(self, key):
return key in self.to_abivars()
@singleton
class MandatoryVariable(object):
"""
Singleton used to tag mandatory variables, just because I can use
the cool syntax: variable is MANDATORY!
"""
@singleton
class DefaultVariable(object):
"""Singleton used to tag variables that will have the default value"""
MANDATORY = MandatoryVariable()
DEFAULT = DefaultVariable()
class SpinMode(collections.namedtuple('SpinMode', "mode nsppol nspinor nspden"), AbivarAble, PMGSONable):
"""
Different configurations of the electron density as implemented in abinit:
One can use as_spinmode to construct the object via SpinMode.as_spinmode
(string) where string can assume the values:
- polarized
- unpolarized
- afm (anti-ferromagnetic)
- spinor (non-collinear magnetism)
- spinor_nomag (non-collinear, no magnetism)
"""
@classmethod
def as_spinmode(cls, obj):
"""Converts obj into a `SpinMode` instance"""
if isinstance(obj, cls):
return obj
else:
# Assume a string with mode
try:
return _mode2spinvars[obj]
except KeyError:
raise KeyError("Wrong value for spin_mode: %s" % str(obj))
def to_abivars(self):
return {
"nsppol": self.nsppol,
"nspinor": self.nspinor,
"nspden": self.nspden,
}
@pmg_serialize
def as_dict(self):
return {k: getattr(self, k) for k in self._fields}
@classmethod
def from_dict(cls, d):
return cls(**{k: d[k] for k in d if k in cls._fields})
# An handy Multiton
_mode2spinvars = {
"unpolarized": SpinMode("unpolarized", 1, 1, 1),
"polarized": SpinMode("polarized", 2, 1, 2),
"afm": SpinMode("afm", 1, 1, 2),
"spinor": SpinMode("spinor", 1, 2, 4),
"spinor_nomag": SpinMode("spinor_nomag", 1, 2, 1),
}
class Smearing(AbivarAble, PMGSONable):
"""
Variables defining the smearing technique. The preferred way to instanciate
a `Smearing` object is via the class method Smearing.as_smearing(string)
"""
#: Mapping string_mode --> occopt
_mode2occopt = {
'nosmearing': 1,
'fermi_dirac': 3,
'marzari4': 4,
'marzari5': 5,
'methfessel': 6,
'gaussian': 7}
def __init__(self, occopt, tsmear):
self.occopt = occopt
self.tsmear = tsmear
def __str__(self):
s = "occopt %d # %s Smearing\n" % (self.occopt, self.mode)
if self.tsmear:
s += 'tsmear %s' % self.tsmear
return s
def __eq__(self, other):
return (self.occopt == other.occopt and
np.allclose(self.tsmear, other.tsmear))
def __ne__(self, other):
return not self == other
def __bool__(self):
return self.mode != "nosmearing"
# py2 old version
__nonzero__ = __bool__
@classmethod
def as_smearing(cls, obj):
"""
Constructs an instance of `Smearing` from obj. Accepts obj in the form:
* Smearing instance
* "name:tsmear" e.g. "gaussian:0.004" (Hartree units)
* "name:tsmear units" e.g. "gaussian:0.1 eV"
* None --> no smearing
"""
if obj is None:
return Smearing.nosmearing()
if isinstance(obj, cls):
return obj
# obj is a string
if obj == "nosmearing":
return cls.nosmearing()
else:
obj, tsmear = obj.split(":")
obj.strip()
occopt = cls._mode2occopt[obj]
try:
tsmear = float(tsmear)
except ValueError:
tsmear, unit = tsmear.split()
tsmear = units.Energy(float(tsmear), unit).to("Ha")
return cls(occopt, tsmear)
@property
def mode(self):
for (mode_str, occopt) in self._mode2occopt.items():
if occopt == self.occopt:
return mode_str
raise AttributeError("Unknown occopt %s" % self.occopt)
@staticmethod
def nosmearing():
return Smearing(1, 0.0)
def to_abivars(self):
if self.mode == "nosmearing":
return {"occopt": 1, "tsmear": 0.0}
else:
return {"occopt": self.occopt, "tsmear": self.tsmear,}
@pmg_serialize
def as_dict(self):
"""json friendly dict representation of Smearing"""
return {"occopt": self.occopt, "tsmear": self.tsmear}
@staticmethod
def from_dict(d):
return Smearing(d["occopt"], d["tsmear"])
class ElectronsAlgorithm(dict, AbivarAble, PMGSONable):
"""Variables controlling the SCF/NSCF algorithm."""
# None indicates that we use abinit defaults.
_DEFAULT = dict(
iprcell=None, iscf=None, diemac=None, diemix=None, diemixmag=None,
dielam=None, diegap=None, dielng=None, diecut=None, nstep=50)
def __init__(self, *args, **kwargs):
super(ElectronsAlgorithm, self).__init__(*args, **kwargs)
for k in self:
if k not in self._DEFAULT:
raise ValueError("%s: No default value has been provided for "
"key %s" % (self.__class__.__name__, k))
def to_abivars(self):
return self.copy()
@pmg_serialize
def as_dict(self):
return self.copy()
@classmethod
def from_dict(cls, d):
d = d.copy()
d.pop("@module", None)
d.pop("@class", None)
return cls(**d)
class Electrons(AbivarAble, PMGSONable):
"""The electronic degrees of freedom"""
def __init__(self, spin_mode="polarized", smearing="fermi_dirac:0.1 eV",
algorithm=None, nband=None, fband=None, charge=0.0, comment=None): # occupancies=None,
"""
Constructor for Electrons object.
Args:
comment: String comment for Electrons
charge: Total charge of the system. Default is 0.
"""
super(Electrons, self).__init__()
self.comment = comment
self.smearing = Smearing.as_smearing(smearing)
self.spin_mode = SpinMode.as_spinmode(spin_mode)
self.nband = nband
self.fband = fband
self.charge = charge
self.algorithm = algorithm
@property
def nsppol(self):
return self.spin_mode.nsppol
@property
def nspinor(self):
return self.spin_mode.nspinor
@property
def nspden(self):
return self.spin_mode.nspden
def as_dict(self):
"json friendly dict representation"
d = {}
d["@module"] = self.__class__.__module__
d["@class"] = self.__class__.__name__
d["spin_mode"] = self.spin_mode.as_dict()
d["smearing"] = self.smearing.as_dict()
d["algorithm"] = self.algorithm.as_dict() if self.algorithm else None
d["nband"] = self.nband
d["fband"] = self.fband
d["charge"] = self.charge
d["comment"] = self.comment
return d
@classmethod
def from_dict(cls, d):
d = d.copy()
d.pop("@module", None)
d.pop("@class", None)
dec = MontyDecoder()
d["spin_mode"] = dec.process_decoded(d["spin_mode"])
d["smearing"] = dec.process_decoded(d["smearing"])
d["algorithm"] = dec.process_decoded(d["algorithm"]) if d["algorithm"] else None
return cls(**d)
def to_abivars(self):
abivars = self.spin_mode.to_abivars()
abivars.update({
"nband" : self.nband,
"fband" : self.fband,
"charge" : self.charge,
})
if self.smearing:
abivars.update(self.smearing.to_abivars())
if self.algorithm:
abivars.update(self.algorithm)
#abivars["#comment"] = self.comment
return abivars
class KSampling(AbivarAble, PMGSONable):
"""
Input variables defining the K-point sampling.
"""
# Modes supported by the constructor.
modes = Enum(('monkhorst', 'path', 'automatic',))
def __init__(self, mode="monkhorst", num_kpts= 0, kpts=((1, 1, 1),), kpt_shifts=(0.5, 0.5, 0.5),
kpts_weights=None, use_symmetries=True, use_time_reversal=True, chksymbreak=None,
comment=None):
"""
Highly flexible constructor for KSampling objects. The flexibility comes
at the cost of usability and in general, it is recommended that you use
the default constructor only if you know exactly what you are doing and
requires the flexibility. For most usage cases, the object be constructed
far more easily using the convenience static constructors:
#. gamma_only
#. gamma_centered
#. monkhorst
#. monkhorst_automatic
#. path
and it is recommended that you use those.
Args:
mode: Mode for generating k-poits. Use one of the KSampling.modes enum types.
num_kpts: Number of kpoints if mode is "automatic"
Number of division for the sampling of the smallest segment if mode is "path".
Not used for the other modes
kpts: Number of divisions. Even when only a single specification is
required, e.g. in the automatic scheme, the kpts should still
be specified as a 2D array. e.g., [[20]] or [[2,2,2]].
kpt_shifts: Shifts for Kpoints.
use_symmetries: False if spatial symmetries should not be used
to reduce the number of independent k-points.
use_time_reversal: False if time-reversal symmetry should not be used
to reduce the number of independent k-points.
kpts_weights: Optional weights for kpoints. For explicit kpoints.
chksymbreak: Abinit input variable: check whether the BZ sampling preserves the symmetry of the crystal.
comment: String comment for Kpoints
.. note::
The default behavior of the constructor is monkhorst.
"""
if mode not in KSampling.modes:
raise ValueError("Unknown kpoint mode %s" % mode)
super(KSampling, self).__init__()
self.mode = mode
self.comment = comment
self.num_kpts = num_kpts
self.kpts = kpts
self.kpt_shifts = kpt_shifts
self.kpts_weights = kpts_weights
self.use_symmetries = use_symmetries
self.use_time_reversal = use_time_reversal
self.chksymbreak = chksymbreak
abivars = {}
if mode in ("monkhorst",):
assert num_kpts == 0
ngkpt = np.reshape(kpts, 3)
shiftk = np.reshape(kpt_shifts, (-1,3))
if use_symmetries and use_time_reversal: kptopt = 1
if not use_symmetries and use_time_reversal: kptopt = 2
if not use_symmetries and not use_time_reversal: kptopt = 3
if use_symmetries and not use_time_reversal: kptopt = 4
abivars.update({
"ngkpt" : ngkpt,
"shiftk" : shiftk,
"nshiftk" : len(shiftk),
"kptopt" : kptopt,
"chksymbreak": chksymbreak,
})
elif mode in ("path",):
if num_kpts <= 0:
raise ValueError("For Path mode, num_kpts must be specified and >0")
kptbounds = np.reshape(kpts, (-1,3))
#print("in path with kptbound: %s " % kptbounds)
abivars.update({
"ndivsm" : num_kpts,
"kptbounds": kptbounds,
"kptopt" : -len(kptbounds)+1,
})
elif mode in ("automatic",):
kpts = np.reshape(kpts, (-1,3))
if len(kpts) != num_kpts:
raise ValueError("For Automatic mode, num_kpts must be specified.")
kptnrm = np.ones(num_kpts)
abivars.update({
"kptopt" : 0,
"kpt" : kpts,
"nkpt" : num_kpts,
"kptnrm" : kptnrm,
"wtk" : kpts_weights, # for iscf/=-2, wtk.
"chksymbreak": chksymbreak,
})
else:
raise ValueError("Unknown mode %s" % mode)
self.abivars = abivars
#self.abivars["#comment"] = comment
@property
def is_homogeneous(self):
return self.mode not in ["path"]
@classmethod
def gamma_only(cls):
"""Gamma-only sampling"""
return cls(kpt_shifts=(0.0,0.0,0.0), comment="Gamma-only sampling")
@classmethod
def gamma_centered(cls, kpts=(1, 1, 1), use_symmetries=True, use_time_reversal=True):
"""
Convenient static constructor for an automatic Gamma centered Kpoint grid.
Args:
kpts: Subdivisions N_1, N_2 and N_3 along reciprocal lattice vectors.
use_symmetries: False if spatial symmetries should not be used
to reduce the number of independent k-points.
use_time_reversal: False if time-reversal symmetry should not be used
to reduce the number of independent k-points.
Returns:
:class:`KSampling` object.
"""
return cls(kpts=[kpts], kpt_shifts=(0.0, 0.0, 0.0),
use_symmetries=use_symmetries, use_time_reversal=use_time_reversal,
comment="gamma-centered mode")
@classmethod
def monkhorst(cls, ngkpt, shiftk=(0.5, 0.5, 0.5), chksymbreak=None, use_symmetries=True,
use_time_reversal=True, comment=None):
"""
Convenient static constructor for a Monkhorst-Pack mesh.
Args:
ngkpt: Subdivisions N_1, N_2 and N_3 along reciprocal lattice vectors.
shiftk: Shift to be applied to the kpoints.
use_symmetries: Use spatial symmetries to reduce the number of k-points.
use_time_reversal: Use time-reversal symmetry to reduce the number of k-points.
Returns:
:class:`KSampling` object.
"""
return cls(
kpts=[ngkpt], kpt_shifts=shiftk,
use_symmetries=use_symmetries, use_time_reversal=use_time_reversal, chksymbreak=chksymbreak,
comment=comment if comment else "Monkhorst-Pack scheme with user-specified shiftk")
@classmethod
def monkhorst_automatic(cls, structure, ngkpt,
use_symmetries=True, use_time_reversal=True, chksymbreak=None, comment=None):
"""
Convenient static constructor for an automatic Monkhorst-Pack mesh.
Args:
structure: :class:`Structure` object.
ngkpt: Subdivisions N_1, N_2 and N_3 along reciprocal lattice vectors.
use_symmetries: Use spatial symmetries to reduce the number of k-points.
use_time_reversal: Use time-reversal symmetry to reduce the number of k-points.
Returns:
:class:`KSampling` object.
"""
sg = SpacegroupAnalyzer(structure)
#sg.get_crystal_system()
#sg.get_point_group()
# TODO
nshiftk = 1
#shiftk = 3*(0.5,) # this is the default
shiftk = 3*(0.5,)
#if lattice.ishexagonal:
#elif lattice.isbcc
#elif lattice.isfcc
return cls.monkhorst(
ngkpt, shiftk=shiftk, use_symmetries=use_symmetries, use_time_reversal=use_time_reversal,
chksymbreak=chksymbreak, comment=comment if comment else "Automatic Monkhorst-Pack scheme")
@classmethod
def _path(cls, ndivsm, structure=None, kpath_bounds=None, comment=None):
"""
Static constructor for path in k-space.
Args:
structure: :class:`Structure` object.
kpath_bounds: List with the reduced coordinates of the k-points defining the path.
ndivsm: Number of division for the smallest segment.
comment: Comment string.
Returns:
:class:`KSampling` object.
"""
if kpath_bounds is None:
# Compute the boundaries from the input structure.
from pymatgen.symmetry.bandstructure import HighSymmKpath
sp = HighSymmKpath(structure)
# Flat the array since "path" is a a list of lists!
kpath_labels = []
for labels in sp.kpath["path"]:
kpath_labels.extend(labels)
kpath_bounds = []
for label in kpath_labels:
red_coord = sp.kpath["kpoints"][label]
#print("label %s, red_coord %s" % (label, red_coord))
kpath_bounds.append(red_coord)
return cls(mode=KSampling.modes.path, num_kpts=ndivsm, kpts=kpath_bounds,
comment=comment if comment else "K-Path scheme")
@classmethod
def path_from_structure(cls, ndivsm, structure):
"""See _path for the meaning of the variables"""
return cls._path(ndivsm, structure=structure, comment="K-path generated automatically from structure")
@classmethod
def explicit_path(cls, ndivsm, kpath_bounds):
"""See _path for the meaning of the variables"""
return cls._path(ndivsm, kpath_bounds=kpath_bounds, comment="Explicit K-path")
@classmethod
def automatic_density(cls, structure, kppa, chksymbreak=None, use_symmetries=True, use_time_reversal=True,
shifts=(0.5, 0.5, 0.5)):
"""
Returns an automatic Kpoint object based on a structure and a kpoint
density. Uses Gamma centered meshes for hexagonal cells and Monkhorst-Pack grids otherwise.
Algorithm:
Uses a simple approach scaling the number of divisions along each
reciprocal lattice vector proportional to its length.
Args:
structure: Input structure
kppa: Grid density
"""
lattice = structure.lattice
lengths = lattice.abc
ngrid = kppa / structure.num_sites
mult = (ngrid * lengths[0] * lengths[1] * lengths[2]) ** (1 / 3.)
num_div = [int(round(1.0 / lengths[i] * mult)) for i in range(3)]
# ensure that num_div[i] > 0
num_div = [i if i > 0 else 1 for i in num_div]
angles = lattice.angles
hex_angle_tol = 5 # in degrees
hex_length_tol = 0.01 # in angstroms
right_angles = [i for i in range(3) if abs(angles[i] - 90) < hex_angle_tol]
hex_angles = [i for i in range(3)
if abs(angles[i] - 60) < hex_angle_tol or
abs(angles[i] - 120) < hex_angle_tol]
is_hexagonal = (len(right_angles) == 2 and len(hex_angles) == 1
and abs(lengths[right_angles[0]] -
lengths[right_angles[1]]) < hex_length_tol)
#style = Kpoints.modes.gamma
#if not is_hexagonal:
# num_div = [i + i % 2 for i in num_div]
# style = Kpoints.modes.monkhorst
comment = "abinitio generated KPOINTS with grid density = " + "{} / atom".format(kppa)
shifts = np.reshape(shifts, (-1, 3))
return cls(
mode="monkhorst", num_kpts=0, kpts=[num_div], kpt_shifts=shifts,
use_symmetries=use_symmetries, use_time_reversal=use_time_reversal, chksymbreak=chksymbreak,
comment=comment)
def to_abivars(self):
return self.abivars
def as_dict(self):
enc = MontyEncoder()
return {'mode': self.mode, 'comment': self.comment, 'num_kpts': self.num_kpts,
'kpts': enc.default(np.array(self.kpts)), 'kpt_shifts': self.kpt_shifts,
'kpts_weights': self.kpts_weights, 'use_symmetries': self.use_symmetries,
'use_time_reversal': self.use_time_reversal, 'chksymbreak': self.chksymbreak,
'@module': self.__class__.__module__, '@class': self.__class__.__name__}
@classmethod
def from_dict(cls, d):
d = d.copy()
d.pop('@module', None)
d.pop('@class', None)
dec = MontyDecoder()
d['kpts'] = dec.process_decoded(d['kpts'])
return cls(**d)
class Constraints(AbivarAble):
"""This object defines the constraints for structural relaxation"""
def to_abivars(self):
raise NotImplementedError("")
class RelaxationMethod(AbivarAble, PMGSONable):
"""
This object stores the variables for the (constrained) structural optimization
ionmov and optcell specify the type of relaxation.
The other variables are optional and their use depend on ionmov and optcell.
A None value indicates that we use abinit default. Default values can
be modified by passing them to the constructor.
The set of variables are constructed in to_abivars depending on ionmov and optcell.
"""
_default_vars = {
"ionmov" : MANDATORY,
"optcell" : MANDATORY,
"ntime" : 80,
"dilatmx" : 1.05,
"ecutsm" : 0.5,
"strfact" : None,
"tolmxf" : None,
"strtarget" : None,
"atoms_constraints": {}, # Constraints are stored in a dictionary. {} means if no constraint is enforced.
}
IONMOV_DEFAULT = 3
OPTCELL_DEFAULT = 2
def __init__(self, *args, **kwargs):
# Initialize abivars with the default values.
self.abivars = self._default_vars
# Overwrite the keys with the args and kwargs passed to constructor.
self.abivars.update(*args, **kwargs)
self.abivars = AttrDict(self.abivars)
for k in self.abivars:
if k not in self._default_vars:
raise ValueError("%s: No default value has been provided for key %s" % (self.__class__.__name__, k))
for k in self.abivars:
if k is MANDATORY:
raise ValueError("%s: No default value has been provided for the mandatory key %s" %
(self.__class__.__name__, k))
@classmethod
def atoms_only(cls, atoms_constraints=None):
if atoms_constraints is None:
return cls(ionmov=cls.IONMOV_DEFAULT, optcell=0)
else:
return cls(ionmov=cls.IONMOV_DEFAULT, optcell=0, atoms_constraints=atoms_constraints)
@classmethod
def atoms_and_cell(cls, atoms_constraints=None):
if atoms_constraints is None:
return cls(ionmov=cls.IONMOV_DEFAULT, optcell=cls.OPTCELL_DEFAULT)
else:
return cls(ionmov=cls.IOMOV_DEFAULT, optcell=cls.OPTCELL_DEFAULT, atoms_constraints=atoms_constraints)
@property
def move_atoms(self):
"""True if atoms must be moved."""
return self.abivars.ionmov != 0
@property
def move_cell(self):
"""True if lattice parameters must be optimized."""
return self.abivars.optcell != 0
def to_abivars(self):
"""Returns a dictionary with the abinit variables"""
# These variables are always present.
out_vars = {
"ionmov" : self.abivars.ionmov,
"optcell": self.abivars.optcell,
"ntime" : self.abivars.ntime,
}
# Atom relaxation.
if self.move_atoms:
out_vars.update({
"tolmxf": self.abivars.tolmxf,
})
if self.abivars.atoms_constraints:
# Add input variables for constrained relaxation.
raise NotImplementedError("")
out_vars.update(self.abivars.atoms_constraints.to_abivars())
# Cell relaxation.
if self.move_cell:
out_vars.update({
"dilatmx" : self.abivars.dilatmx,
"ecutsm" : self.abivars.ecutsm,
"strfact" : self.abivars.strfact,
"strtarget": self.abivars.strtarget,
})
return out_vars
def as_dict(self):
d = dict(self._default_vars)
d['@module'] = self.__class__.__module__
d['@class'] = self.__class__.__name__
return d
@classmethod
def from_dict(cls, d):
d = d.copy()
d.pop('@module', None)
d.pop('@class', None)
return cls(**d)
class PPModel(AbivarAble, PMGSONable):
"""
Parameters defining the plasmon-pole technique.
The common way to instanciate a PPModel object is via the class method PPModel.as_ppmodel(string)
"""
_mode2ppmodel = {
"noppmodel": 0,
"godby" : 1,
"hybersten": 2,
"linden" : 3,
"farid" : 4,
}
modes = Enum(k for k in _mode2ppmodel)
@classmethod
def as_ppmodel(cls, obj):
"""
Constructs an instance of PPModel from obj.
Accepts obj in the form:
* PPmodel instance
* string. e.g "godby:12.3 eV", "linden".
"""
if isinstance(obj, cls):
return obj
# obj is a string
if ":" not in obj:
mode, plasmon_freq = obj, None
else:
# Extract mode and plasmon_freq
mode, plasmon_freq = obj.split(":")
try:
plasmon_freq = float(plasmon_freq)
except ValueError:
plasmon_freq, unit = plasmon_freq.split()
plasmon_freq = units.Energy(float(plasmon_freq), unit).to("Ha")
return cls(mode=mode, plasmon_freq=plasmon_freq)
def __init__(self, mode="godby", plasmon_freq=None):
assert mode in PPModel.modes
self.mode = mode
self.plasmon_freq = plasmon_freq
def __eq__(self, other):
if other is None:
return False
else:
if self.mode != other.mode:
return False
if self.plasmon_freq is None:
return other.plasmon_freq is None
else:
return np.allclose(self.plasmon_freq, other.plasmon_freq)
def __ne__(self, other):
return not self == other
def __bool__(self):
return self.mode != "noppmodel"
# py2 old version
__nonzero__ = __bool__
def __repr__(self):
return "<%s at %s, mode = %s>" % (self.__class__.__name__, id(self),
str(self.mode))
def to_abivars(self):
if self:
return {"ppmodel": self._mode2ppmodel[self.mode], "ppmfrq": self.plasmon_freq}
else:
return {}
@classmethod
def noppmodel(cls):
return cls(mode="noppmodel", plasmon_freq=None)
def as_dict(self):
return {"mode": self.mode, "plasmon_freq": self.plasmon_freq,
"@module": self.__class__.__module__,
"@class": self.__class__.__name__}
@staticmethod
def from_dict(d):
return PPModel(mode=d["mode"], plasmon_freq=d["plasmon_freq"])
class HilbertTransform(AbivarAble):
"""
Parameters for the Hilbert-transform method (Screening code)
i.e. the parameters defining the frequency mesh used for the spectral function
and the frequency mesh used for the polarizability
"""
def __init__(self, nomegasf, domegasf=None, spmeth=1, nfreqre=None, freqremax=None, nfreqim=None, freqremin=None):
"""
Args:
nomegasf: Number of points for sampling the spectral function along the real axis.
domegasf: Step in Ha for the linear mesh used for the spectral function.
spmeth: Algorith for the representation of the delta function.
nfreqre: Number of points along the real axis (linear mesh).
freqremax: Maximum frequency for W along the real axis (in hartree).
nfreqim: Number of point along the imaginary axis (Gauss-Legendre mesh).
freqremin: Minimum frequency for W along the real axis (in hartree).
"""
# Spectral function
self.nomegasf = nomegasf
self.domegasf = domegasf
self.spmeth = spmeth
# Mesh for the contour-deformation method used for the integration of the self-energy
self.nfreqre = nfreqre
self.freqremax = freqremax
self.freqremin = freqremin
self.nfreqim = nfreqim
def to_abivars(self):
"""Returns a dictionary with the abinit variables"""
return {
# Spectral function
"nomegasf": self.nomegasf,
"domegasf": self.domegasf,
"spmeth" : self.spmeth,
# Frequency mesh for the polarizability
"nfreqre" : self.nfreqre,
"freqremax": self.freqremax,
"nfreqim" : self.nfreqim,
"freqremin": self.freqremin,
}
class ModelDielectricFunction(AbivarAble):
"""Model dielectric function used for BSE calculation"""
def __init__(self, mdf_epsinf):
self.mdf_epsinf = mdf_epsinf
def to_abivars(self):
return {"mdf_epsinf": self.mdf_epsinf}
##########################################################################################
################################# WORK IN PROGRESS ######################################
##########################################################################################
class Screening(AbivarAble):
"""
This object defines the parameters used for the
computation of the screening function.
"""
# Approximations used for W
_WTYPES = {
"RPA": 0,
}
# Self-consistecy modes
_SC_MODES = {
"one_shot" : 0,
"energy_only" : 1,
"wavefunctions": 2,
}
def __init__(self, ecuteps, nband, w_type="RPA", sc_mode="one_shot",
hilbert=None, ecutwfn=None, inclvkb=2):
"""
Args:
ecuteps: Cutoff energy for the screening (Ha units).
nband Number of bands for the Green's function
w_type: Screening type
sc_mode: Self-consistency mode.
hilbert: Instance of :class:`HilbertTransform` defining the parameters for the Hilber transform method.
ecutwfn: Cutoff energy for the wavefunctions (Default: ecutwfn == ecut).
inclvkb: Option for the treatment of the dipole matrix elements (NC pseudos).
"""
if w_type not in self._WTYPES:
raise ValueError("W_TYPE: %s is not supported" % w_type)
if sc_mode not in self._SC_MODES:
raise ValueError("Self-consistecy mode %s is not supported" % sc_mode)
self.ecuteps = ecuteps
self.nband = nband
self.w_type = w_type
self.sc_mode = sc_mode
self.ecutwfn = ecutwfn
self.inclvkb = inclvkb
if hilbert is not None:
raise NotImplementedError("Hilber transform not coded yet")
self.hilbert = hilbert
# Default values
# TODO Change abinit defaults
self.gwpara=2
self.awtr =1
self.symchi=1
self.optdriver = 3
@property
def use_hilbert(self):
return hasattr(self, "hilbert")
#@property
#def gwcalctyp(self):
# "Return the value of the gwcalctyp input variable"
# dig0 = str(self._SIGMA_TYPES[self.type])
# dig1 = str(self._SC_MODES[self.sc_mode]
# return dig1.strip() + dig0.strip()
def to_abivars(self):
"""Returns a dictionary with the abinit variables"""
abivars = {
"ecuteps" : self.ecuteps,
"ecutwfn" : self.ecutwfn,
"inclvkb" : self.inclvkb,
"gwpara" : self.gwpara,
"awtr" : self.awtr,
"symchi" : self.symchi,
#"gwcalctyp": self.gwcalctyp,
#"fftgw" : self.fftgw,
"optdriver" : self.optdriver,
}
# Variables for the Hilber transform.
if self.use_hilbert:
abivars.update(self.hilbert.to_abivars())
return abivars
class SelfEnergy(AbivarAble):
"""
This object defines the parameters used for the computation of the self-energy.
"""
_SIGMA_TYPES = {
"gw" : 0,
"hartree_fock": 5,
"sex" : 6,
"cohsex" : 7,
"model_gw_ppm": 8,
"model_gw_cd" : 9,
}
_SC_MODES = {
"one_shot" : 0,
"energy_only" : 1,
"wavefunctions": 2,
}
def __init__(self, se_type, sc_mode, nband, ecutsigx, screening,
gw_qprange=1, ppmodel=None, ecuteps=None, ecutwfn=None, gwpara=2):
"""
Args:
se_type: Type of self-energy (str)
sc_mode: Self-consistency mode.
nband: Number of bands for the Green's function
ecutsigx: Cutoff energy for the exchange part of the self-energy (Ha units).
screening: :class:`Screening` instance.
gw_qprange: Option for the automatic selection of k-points and bands for GW corrections.
See Abinit docs for more detail. The default value makes the code computie the
QP energies for all the point in the IBZ and one band above and one band below the Fermi level.
ppmodel: :class:`PPModel` instance with the parameters used for the plasmon-pole technique.
ecuteps: Cutoff energy for the screening (Ha units).
ecutwfn: Cutoff energy for the wavefunctions (Default: ecutwfn == ecut).
"""
if se_type not in self._SIGMA_TYPES:
raise ValueError("SIGMA_TYPE: %s is not supported" % se_type)
if sc_mode not in self._SC_MODES:
raise ValueError("Self-consistecy mode %s is not supported" % sc_mode)
self.type = se_type
self.sc_mode = sc_mode
self.nband = nband
self.ecutsigx = ecutsigx
self.screening = screening
self.gw_qprange = gw_qprange
self.gwpara = gwpara
if ppmodel is not None:
assert not screening.use_hilbert
self.ppmodel = PPModel.as_ppmodel(ppmodel)
self.ecuteps = ecuteps if ecuteps is not None else screening.ecuteps
self.ecutwfn = ecutwfn
self.optdriver = 4
#band_mode in ["gap", "full"]
#if isinstance(kptgw, str) and kptgw == "all":
# self.kptgw = None
# self.nkptgw = None
#else:
# self.kptgw = np.reshape(kptgw, (-1,3))
# self.nkptgw = len(self.kptgw)
#if bdgw is None:
# raise ValueError("bdgw must be specified")
#if isinstance(bdgw, str):
# # TODO add new variable in Abinit so that we can specify
# # an energy interval around the KS gap.
# homo = float(nele) / 2.0
# #self.bdgw =
#else:
# self.bdgw = np.reshape(bdgw, (-1,2))
#self.freq_int = freq_int
@property
def use_ppmodel(self):
"""True if we are using the plasmon-pole approximation."""
return hasattr(self, "ppmodel")
@property
def gwcalctyp(self):
"""Returns the value of the gwcalctyp input variable."""
dig0 = str(self._SIGMA_TYPES[self.type])
dig1 = str(self._SC_MODES[self.sc_mode])
return dig1.strip() + dig0.strip()
@property
def symsigma(self):
"""1 if symmetries can be used to reduce the number of q-points."""
return 1 if self.sc_mode == "one_shot" else 0
def to_abivars(self):
"""Returns a dictionary with the abinit variables."""
abivars = dict(
gwcalctyp=self.gwcalctyp,
ecuteps=self.ecuteps,
ecutsigx=self.ecutsigx,
symsigma=self.symsigma,
gw_qprange=self.gw_qprange,
gwpara=self.gwpara,
optdriver=self.optdriver,
#"ecutwfn" : self.ecutwfn,
#"kptgw" : self.kptgw,
#"nkptgw" : self.nkptgw,
#"bdgw" : self.bdgw,
)
# FIXME: problem with the spin
#assert len(self.bdgw) == self.nkptgw
# ppmodel variables
if self.use_ppmodel:
abivars.update(self.ppmodel.to_abivars())
return abivars
class ExcHamiltonian(AbivarAble):
"""This object contains the parameters for the solution of the Bethe-Salpeter equation."""
# Types of excitonic Hamiltonian.
_EXC_TYPES = {
"TDA": 0, # Tamm-Dancoff approximation.
"coupling": 1, # Calculation with coupling.
}
# Algorithms used to compute the macroscopic dielectric function
# and/or the exciton wavefunctions.
_ALGO2VAR = {
"direct_diago": 1,
"haydock" : 2,
"cg" : 3,
}
# Options specifying the treatment of the Coulomb term.
_COULOMB_MODES = [
"diago",
"full",
"model_df"
]
def __init__(self, bs_loband, nband, soenergy, coulomb_mode, ecuteps, spin_mode="polarized", mdf_epsinf=None,
exc_type="TDA", algo="haydock", with_lf=True, bs_freq_mesh=None, zcut=None, **kwargs):
"""
Args:
bs_loband: Lowest band index (Fortran convention) used in the e-h basis set.
Can be scalar or array of shape (nsppol,). Must be >= 1 and <= nband
nband: Max band index used in the e-h basis set.
soenergy: Scissors energy in Hartree.
coulomb_mode: Treatment of the Coulomb term.
ecuteps: Cutoff energy for W in Hartree.
mdf_epsinf: Macroscopic dielectric function :math:`\epsilon_\inf` used in
the model dielectric function.
exc_type: Approximation used for the BSE Hamiltonian
with_lf: True if local field effects are included <==> exchange term is included
bs_freq_mesh: Frequency mesh for the macroscopic dielectric function (start, stop, step) in Ha.
zcut: Broadening parameter in Ha.
**kwargs:
Extra keywords
"""
spin_mode = SpinMode.as_spinmode(spin_mode)
# We want an array bs_loband(nsppol).
try:
bs_loband = np.reshape(bs_loband, spin_mode.nsppol)
except ValueError:
bs_loband = np.array(spin_mode.nsppol * [int(bs_loband)])
self.bs_loband = bs_loband
self.nband = nband
self.soenergy = soenergy
self.coulomb_mode = coulomb_mode
assert coulomb_mode in self._COULOMB_MODES
self.ecuteps = ecuteps
self.mdf_epsinf = mdf_epsinf
self.exc_type = exc_type
assert exc_type in self._EXC_TYPES
self.algo = algo
assert algo in self._ALGO2VAR
self.with_lf = with_lf
# if bs_freq_mesh is not given, abinit will select its own mesh.
self.bs_freq_mesh = np.array(bs_freq_mesh) if bs_freq_mesh is not None else bs_freq_mesh
self.zcut = zcut
self.optdriver = 99
# Extra options.
self.kwargs = kwargs
#if "chksymbreak" not in self.kwargs:
# self.kwargs["chksymbreak"] = 0
# Consistency check
if any(bs_loband < 0):
raise ValueError("bs_loband <= 0 while it is %s" % bs_loband)
if any(bs_loband >= nband):
raise ValueError("bs_loband (%s) >= nband (%s)" % (bs_loband, nband))
@property
def inclvkb(self):
"""Treatment of the dipole matrix element (NC pseudos, default is 2)"""
return self.kwargs.get("inclvkb", 2)
@property
def use_haydock(self):
"""True if we are using the Haydock iterative technique."""
return self.algo == "haydock"
@property
def use_cg(self):
"""True if we are using the conjugate gradient method."""
return self.algo == "cg"
@property
def use_direct_diago(self):
"""True if we are performing the direct diagonalization of the BSE Hamiltonian."""
return self.algo == "direct_diago"
def to_abivars(self):
"""Returns a dictionary with the abinit variables."""
abivars = dict(
bs_calctype=1,
bs_loband=self.bs_loband,
#nband=self.nband,
soenergy=self.soenergy,
ecuteps=self.ecuteps,
bs_algorithm = self._ALGO2VAR[self.algo],
bs_coulomb_term=21,
mdf_epsinf=self.mdf_epsinf,
bs_exchange_term=1 if self.with_lf else 0,
inclvkb=self.inclvkb,
zcut=self.zcut,
bs_freq_mesh=self.bs_freq_mesh,
bs_coupling=self._EXC_TYPES[self.exc_type],
optdriver=self.optdriver,
)
if self.use_haydock:
# FIXME
abivars.update(
bs_haydock_niter=100, # No. of iterations for Haydock
bs_hayd_term=0, # No terminator
bs_haydock_tol=[0.05, 0], # Stopping criteria
)
elif self.use_direct_diago:
raise NotImplementedError("")
elif self.use_cg:
raise NotImplementedError("")
else:
raise ValueError("Unknown algorithm for EXC: %s" % self.algo)
# Add extra kwargs
abivars.update(self.kwargs)
return abivars
| mit | -4,362,257,695,082,234,400 | 32.958003 | 118 | 0.570389 | false |
CylonicRaider/Instant | script/colorlogs.py | 1 | 5716 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Perform syntax highlighting on Scribe logs.
"""
import sys, os, re
import time
import errno
import instabot
# Hard-coded ANSI escape sequences for coloring.
COLORS = {None: '\033[0m', 'bold': '\033[1m', 'black': '\033[30m',
'red': '\033[31m', 'green': '\033[32m', 'orange': '\033[33m',
'blue': '\033[34m', 'magenta': '\033[35m', 'cyan': '\033[36m',
'gray': '\033[37m'}
def highlight(line, filt=None):
def highlight_scalar(val):
if val in instabot.LOG_CONSTANTS:
return (COLORS['magenta'], val)
elif instabot.INTEGER_RE.match(val) or instabot.FLOAT_RE.match(val):
return (COLORS['cyan'], val)
else:
return (COLORS[None], val)
def highlight_tuple(val):
if val[:1] != '(': return (COLORS['red'], val)
idx, ret = 1, [COLORS['orange'], '(']
m = instabot.WHITESPACE_RE.match(val, idx)
if m:
ret.append(m.group())
idx = m.end()
while idx < len(val):
m = instabot.SCALAR_RE.match(val, idx)
if not m: break
ret.extend(highlight_scalar(m.group()))
idx = m.end()
m = instabot.COMMA_RE.match(val, idx)
if not m: break
ret.extend((COLORS['orange'], m.group()))
idx = m.end()
m = instabot.WHITESPACE_RE.match(val, idx)
if m:
ret.extend((COLORS['orange'], m.group()))
idx = m.end()
if val[idx:] == ')':
ret.extend((COLORS['orange'], ')'))
else:
# Should not happen...
ret.extend((COLORS['red'], val[idx:]))
return ret
def highlight_scalar_or_tuple(val):
if val.startswith('('):
return highlight_tuple(val)
else:
return highlight_scalar(val)
def highlight_dict(val):
if val[:1] != '{': return (COLORS['red'], val)
idx, ret = 1, [COLORS['orange'], '{']
m = instabot.WHITESPACE_RE.match(val, idx)
if m:
ret.append(m.group())
idx = m.end()
while idx < len(val):
m = instabot.DICT_ENTRY_RE.match(val, idx)
if not m: break
ret.extend(highlight_scalar_or_tuple(m.group(1)))
ret.extend((COLORS['orange'], val[m.end(1):m.start(2)]))
ret.extend(highlight_scalar_or_tuple(m.group(2)))
idx = m.end()
m = instabot.COMMA_RE.match(val, idx)
if not m: break
ret.extend((COLORS['orange'], m.group()))
idx = m.end()
m = instabot.WHITESPACE_RE.match(val, idx)
if m:
ret.extend((COLORS['orange'], m.group()))
idx = m.end()
if val[idx:] == '}':
ret.extend((COLORS['orange'], '}'))
else:
# Should not happen...
ret.extend((COLORS['red'], val[idx:]))
return ret
def highlight_any(val):
if val.startswith('{'):
return highlight_dict(val)
elif val.startswith('('):
return highlight_tuple(val)
else:
return highlight_scalar(val)
m = instabot.LOGLINE_RE.match(line)
if not m: return line
if filt and not filt(m.group(2)): return None
ret = [line[:m.start(2)], COLORS['bold'], m.group(2), COLORS[None],
line[m.end(2):m.start(3)]]
idx = m.start(3)
if idx != -1:
while idx < len(line):
# Skip whitespace
m = instabot.WHITESPACE_RE.match(line, idx)
if m:
ret.extend((COLORS[None], m.group()))
idx = m.end()
if idx == len(line): break
# Match the next parameter; output name
m = instabot.PARAM_RE.match(line, idx)
if not m: break
name, val = m.group(1, 2)
ret.extend((COLORS['green'], name, '='))
# Output value
ret.extend(highlight_any(val))
idx = m.end()
ret.extend((COLORS[None], line[idx:]))
return ''.join(ret)
def highlight_stream(it, newlines=False, filt=None):
if not newlines:
for line in it:
hl = highlight(line, filt)
if hl is not None: yield hl
else:
for line in it:
hl = highlight(line.rstrip('\n'), filt)
if hl is not None: yield hl + '\n'
def main():
p = instabot.OptionParser(sys.argv[0])
p.help_action(desc='A syntax highlighter for Scribe logs.')
p.option('exclude', short='x', default=[], accum=True,
help='Filter out lines of this type (may be repeated)')
p.option('out', short='o',
help='File to write output to (- is standard output and '
'the default)')
p.flag_ex('append', short='a', varname='outmode', value='a', default='w',
help='Append to output file instead of overwriting it')
p.flag('line-buffered', short='u',
help='Flush output after each input line')
p.argument('in', default='-',
help='File to read from (- is standard input and '
'the default)')
p.parse(sys.argv[1:])
ignore, inpath, outpath = p.get('exclude', 'in', 'out')
outmode, linebuf = p.get('outmode', 'line-buffered')
try:
filt = (lambda t: t not in ignore) if ignore else None
of = instabot.open_file
with of(inpath, 'r') as fi, of(outpath, outmode) as fo:
for l in highlight_stream(fi, True, filt):
fo.write(l)
if linebuf: fo.flush()
except KeyboardInterrupt:
# Suppress noisy stack traces.
pass
if __name__ == '__main__': main()
| mit | 5,965,448,285,206,380,000 | 35.177215 | 77 | 0.520469 | false |
openstack/senlin | contrib/kubernetes/kube/master.py | 1 | 10788 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import jinja2
from oslo_log import log as logging
from oslo_utils import encodeutils
from kube import base
from senlin.common import consts
from senlin.common import exception as exc
from senlin.common.i18n import _
from senlin.common import schema
LOG = logging.getLogger(__name__)
class ServerProfile(base.KubeBaseProfile):
"""Profile for an kubernetes master server."""
VERSIONS = {
'1.0': [
{'status': consts.EXPERIMENTAL, 'since': '2017.10'}
]
}
KEYS = (
CONTEXT, FLAVOR, IMAGE, KEY_NAME,
PUBLIC_NETWORK, BLOCK_DEVICE_MAPPING_V2,
) = (
'context', 'flavor', 'image', 'key_name',
'public_network', 'block_device_mapping_v2',
)
INTERNAL_KEYS = (
KUBEADM_TOKEN, KUBE_MASTER_IP, SECURITY_GROUP,
PRIVATE_NETWORK, PRIVATE_SUBNET, PRIVATE_ROUTER,
KUBE_MASTER_FLOATINGIP, KUBE_MASTER_FLOATINGIP_ID,
SCALE_OUT_RECV_ID, SCALE_OUT_URL,
) = (
'kubeadm_token', 'kube_master_ip', 'security_group',
'private_network', 'private_subnet', 'private_router',
'kube_master_floatingip', 'kube_master_floatingip_id',
'scale_out_recv_id', 'scale_out_url',
)
NETWORK_KEYS = (
PORT, FIXED_IP, NETWORK, PORT_SECURITY_GROUPS,
FLOATING_NETWORK, FLOATING_IP,
) = (
'port', 'fixed_ip', 'network', 'security_groups',
'floating_network', 'floating_ip',
)
BDM2_KEYS = (
BDM2_UUID, BDM2_SOURCE_TYPE, BDM2_DESTINATION_TYPE,
BDM2_DISK_BUS, BDM2_DEVICE_NAME, BDM2_VOLUME_SIZE,
BDM2_GUEST_FORMAT, BDM2_BOOT_INDEX, BDM2_DEVICE_TYPE,
BDM2_DELETE_ON_TERMINATION,
) = (
'uuid', 'source_type', 'destination_type', 'disk_bus',
'device_name', 'volume_size', 'guest_format', 'boot_index',
'device_type', 'delete_on_termination',
)
properties_schema = {
CONTEXT: schema.Map(
_('Customized security context for operating servers.'),
),
FLAVOR: schema.String(
_('ID of flavor used for the server.'),
required=True,
updatable=True,
),
IMAGE: schema.String(
# IMAGE is not required, because there could be BDM or BDMv2
# support and the corresponding settings effective
_('ID of image to be used for the new server.'),
updatable=True,
),
KEY_NAME: schema.String(
_('Name of Nova keypair to be injected to server.'),
),
PUBLIC_NETWORK: schema.String(
_('Public network for kubernetes.'),
required=True,
),
BLOCK_DEVICE_MAPPING_V2: schema.List(
_('A list specifying the properties of block devices to be used '
'for this server.'),
schema=schema.Map(
_('A map specifying the properties of a block device to be '
'used by the server.'),
schema={
BDM2_UUID: schema.String(
_('ID of the source image, snapshot or volume'),
),
BDM2_SOURCE_TYPE: schema.String(
_("Volume source type, must be one of 'image', "
"'snapshot', 'volume' or 'blank'"),
required=True,
),
BDM2_DESTINATION_TYPE: schema.String(
_("Volume destination type, must be 'volume' or "
"'local'"),
required=True,
),
BDM2_DISK_BUS: schema.String(
_('Bus of the device.'),
),
BDM2_DEVICE_NAME: schema.String(
_('Name of the device(e.g. vda, xda, ....).'),
),
BDM2_VOLUME_SIZE: schema.Integer(
_('Size of the block device in MB(for swap) and '
'in GB(for other formats)'),
required=True,
),
BDM2_GUEST_FORMAT: schema.String(
_('Specifies the disk file system format(e.g. swap, '
'ephemeral, ...).'),
),
BDM2_BOOT_INDEX: schema.Integer(
_('Define the boot order of the device'),
),
BDM2_DEVICE_TYPE: schema.String(
_('Type of the device(e.g. disk, cdrom, ...).'),
),
BDM2_DELETE_ON_TERMINATION: schema.Boolean(
_('Whether to delete the volume when the server '
'stops.'),
),
}
),
),
}
def __init__(self, type_name, name, **kwargs):
super(ServerProfile, self).__init__(type_name, name, **kwargs)
self.server_id = None
def do_cluster_create(self, obj):
self._generate_kubeadm_token(obj)
self._create_security_group(obj)
self._create_network(obj)
def do_cluster_delete(self, obj):
if obj.dependents and 'kube-node' in obj.dependents:
msg = ("Cluster %s delete failed, "
"Node clusters %s must be deleted first." %
(obj.id, obj.dependents['kube-node']))
raise exc.EResourceDeletion(type='kubernetes.master',
id=obj.id,
message=msg)
self._delete_network(obj)
self._delete_security_group(obj)
def do_create(self, obj):
"""Create a server for the node object.
:param obj: The node object for which a server will be created.
"""
kwargs = {}
for key in self.KEYS:
if self.properties[key] is not None:
kwargs[key] = self.properties[key]
image_ident = self.properties[self.IMAGE]
if image_ident is not None:
image = self._validate_image(obj, image_ident, 'create')
kwargs.pop(self.IMAGE)
kwargs['imageRef'] = image.id
flavor_ident = self.properties[self.FLAVOR]
flavor = self._validate_flavor(obj, flavor_ident, 'create')
kwargs.pop(self.FLAVOR)
kwargs['flavorRef'] = flavor.id
keypair_name = self.properties[self.KEY_NAME]
if keypair_name:
keypair = self._validate_keypair(obj, keypair_name, 'create')
kwargs['key_name'] = keypair.name
kwargs['name'] = obj.name
metadata = self._build_metadata(obj, {})
kwargs['metadata'] = metadata
jj_vars = {}
cluster_data = self._get_cluster_data(obj)
kwargs['networks'] = [{'uuid': cluster_data[self.PRIVATE_NETWORK]}]
# Get user_data parameters from metadata
jj_vars['KUBETOKEN'] = cluster_data[self.KUBEADM_TOKEN]
jj_vars['MASTER_FLOATINGIP'] = cluster_data[
self.KUBE_MASTER_FLOATINGIP]
block_device_mapping_v2 = self.properties[self.BLOCK_DEVICE_MAPPING_V2]
if block_device_mapping_v2 is not None:
kwargs['block_device_mapping_v2'] = self._resolve_bdm(
obj, block_device_mapping_v2, 'create')
# user_data = self.properties[self.USER_DATA]
user_data = base.loadScript('./scripts/master.sh')
if user_data is not None:
# Use jinja2 to replace variables defined in user_data
try:
jj_t = jinja2.Template(user_data)
user_data = jj_t.render(**jj_vars)
except (jinja2.exceptions.UndefinedError, ValueError) as ex:
# TODO(anyone) Handle jinja2 error
pass
ud = encodeutils.safe_encode(user_data)
kwargs['user_data'] = encodeutils.safe_decode(base64.b64encode(ud))
sgid = self._get_security_group(obj)
kwargs['security_groups'] = [{'name': sgid}]
server = None
resource_id = None
try:
server = self.compute(obj).server_create(**kwargs)
self.compute(obj).wait_for_server(server.id)
server = self.compute(obj).server_get(server.id)
self._update_master_ip(obj, server.addresses[''][0]['addr'])
self._associate_floatingip(obj, server)
LOG.info("Created master node: %s" % server.id)
return server.id
except exc.InternalError as ex:
if server and server.id:
resource_id = server.id
raise exc.EResourceCreation(type='server',
message=str(ex),
resource_id=resource_id)
def do_delete(self, obj, **params):
"""Delete the physical resource associated with the specified node.
:param obj: The node object to operate on.
:param kwargs params: Optional keyword arguments for the delete
operation.
:returns: This operation always return True unless exception is
caught.
:raises: `EResourceDeletion` if interaction with compute service fails.
"""
if not obj.physical_id:
return True
server_id = obj.physical_id
ignore_missing = params.get('ignore_missing', True)
internal_ports = obj.data.get('internal_ports', [])
force = params.get('force', False)
try:
self._disassociate_floatingip(obj, server_id)
driver = self.compute(obj)
if force:
driver.server_force_delete(server_id, ignore_missing)
else:
driver.server_delete(server_id, ignore_missing)
driver.wait_for_server_delete(server_id)
if internal_ports:
ex = self._delete_ports(obj, internal_ports)
if ex:
raise ex
return True
except exc.InternalError as ex:
raise exc.EResourceDeletion(type='server', id=server_id,
message=str(ex))
| apache-2.0 | 4,121,959,655,715,227,600 | 37.666667 | 79 | 0.540786 | false |
richardkiss/pycoin | pycoin/key/electrum.py | 1 | 3426 | import hashlib
from .subpaths import subpaths_for_path_range
from pycoin.encoding.bytes32 import from_bytes_32, to_bytes_32
from pycoin.encoding.hash import double_sha256
from pycoin.encoding.hexbytes import b2h
from pycoin.key.Key import Key
def initial_key_to_master_key(initial_key):
"""
initial_key:
a hex string of length 32
"""
b = initial_key.encode("utf8")
orig_input = b
for i in range(100000):
b = hashlib.sha256(b + orig_input).digest()
return from_bytes_32(b)
class ElectrumWallet(Key):
def __init__(self, initial_key=None, master_private_key=None, public_pair=None, master_public_key=None):
if [initial_key, public_pair, master_private_key, master_public_key].count(None) != 3:
raise ValueError(
"exactly one of initial_key, master_private_key, master_public_key must be non-None")
self._initial_key = initial_key
if initial_key is not None:
master_private_key = initial_key_to_master_key(initial_key)
if master_public_key:
public_pair = tuple(from_bytes_32(master_public_key[idx:idx+32]) for idx in (0, 32))
super(ElectrumWallet, self).__init__(
secret_exponent=master_private_key, public_pair=public_pair, is_compressed=False)
@classmethod
def deserialize(class_, blob):
if len(blob) == 32:
return class_(master_private_key=from_bytes_32(blob))
if len(blob) == 64:
return class_(master_public_key=blob)
def serialize(self):
if self._secret_exponent:
return to_bytes_32(self._secret_exponent)
return self.master_public_key()
def secret_exponent(self):
if self._secret_exponent is None and self._initial_key:
self._secret_exponent = initial_key_to_master_key(b2h(self._initial_key))
return self._secret_exponent
def master_private_key(self):
return self.secret_exponent()
def master_public_key(self):
return self.sec()[1:]
def public_copy(self):
if self.secret_exponent() is None:
return self
return self.__class__(public_pair=self.public_pair())
def subkey_for_path(self, path):
return self.subkey(path)
def subkey(self, path):
"""
path:
of the form "K" where K is an integer index, or "K/N" where N is usually
a 0 (deposit address) or 1 (change address)
"""
t = path.split("/")
if len(t) == 2:
n, for_change = t
else:
n, = t
for_change = 0
b = (str(n) + ':' + str(for_change) + ':').encode("utf8") + self.master_public_key()
offset = from_bytes_32(double_sha256(b))
if self.secret_exponent():
return self.__class__(
master_private_key=((self.master_private_key() + offset) % self._generator.order())
)
p1 = offset * self._generator
x, y = self.public_pair()
p2 = self._generator.Point(x, y)
p = p1 + p2
return self.__class__(public_pair=p)
def subkeys(self, path):
"""
A generalized form that can return multiple subkeys.
"""
for _ in subpaths_for_path_range(path, hardening_chars="'pH"):
yield self.subkey(_)
def __repr__(self):
return "Electrum<E:%s>" % b2h(self.master_public_key())
| mit | -3,823,016,807,712,825,300 | 33.26 | 108 | 0.593695 | false |
rmartinho/yajna | tools/bootstrap.py | 1 | 3845 | #!/usr/bin/python
import itertools
import os
import fnmatch
import re
import sys
import argparse
import ninja_syntax
import gcc
import msvc
# --- util functions
def flags(*iterables):
return ' '.join(itertools.chain(*iterables))
def get_files(root, pattern):
pattern = fnmatch.translate(pattern)
for dir, dirs, files in os.walk(root):
for f in files:
if re.match(pattern, f):
yield os.path.join(dir, f)
def object_file(fn):
return os.path.join('obj', re.sub(r'\.c\+\+$', '.o', fn))
# --- arguments
parser = argparse.ArgumentParser()
parser.add_argument('--debug', action='store_true', help='compile with debug information')
parser.add_argument('--cxx', default=None, metavar='executable', help='compiler name to use')
parser.add_argument('--msvc', action='store_true', help='use the MSVC++ toolchain')
parser.add_argument('--boost-dir', default=None, metavar='path', help='path of boost folder (i.e. the folder with include/ and lib/ subfolders)')
parser.add_argument('--no-lto', action='store_true', help='do not perform link-time optimisation')
args = parser.parse_args()
tools = msvc.Toolchain() if args.msvc else gcc.Toolchain()
compiler = args.cxx if args.cxx else tools.compiler()
linker = args.cxx if args.cxx else tools.linker()
# --- variables
dependencies = []
include_flags = flags([tools.include('include')],
(tools.dependency_include(os.path.join('deps', d, 'include')) for d in dependencies))
if(args.boost_dir):
include_flags += ' ' + tools.dependency_include(args.boost_dir)
cxx_flags = flags(tools.cxx_flags(),
tools.debug_flags() if args.debug else tools.optimisation_flags(),
[] if args.no_lto or args.debug else tools.linker_lto_flags())
warning_flags = flags(tools.max_warnings())
define_flags = ''
lib_flags = ''
ld_flags = flags(tools.link_flags(),
[] if args.no_lto or args.debug else tools.linker_lto_flags())
stringize_tool = 'tools/stringize.py'
single_header_tool = 'tools/single_header.py'
# --- preamble
ninja = ninja_syntax.Writer(open('build.ninja', 'w'))
ninja.variable('ninja_required_version', '1.3')
ninja.variable('builddir', 'obj' + os.sep)
ninja.variable('msvc_deps_prefix', 'Note: including file:')
# --- rules
ninja.rule('bootstrap',
command = ' '.join(['python'] + sys.argv),
generator = True,
description = 'BOOTSTRAP')
ninja.rule('cxx',
command = ' '.join([compiler, flags(tools.dependencies_output('$out.d')), cxx_flags, warning_flags, include_flags, define_flags, '$extraflags', '$in', flags(tools.compiler_output('$out'))]),
deps = tools.ninja_deps_style(),
depfile = '$out.d',
description = 'C++ $in')
ninja.rule('link',
command = ' '.join([linker, ld_flags, '$in', flags(tools.linker_output('$out')), lib_flags]),
description = 'LINK $out')
# --- build edges
ninja.build('build.ninja', 'bootstrap',
implicit = sys.argv[0])
hdr_files = list(get_files('include', '*.h++'))
src_files = list(get_files('src', '*.c++'))
obj_files = [object_file(fn) for fn in src_files]
for fn in src_files:
ninja.build(object_file(fn), 'cxx',
inputs = fn)
program = os.path.join('bin', 'yajna') + tools.executable_extension()
ninja.build(program, 'link',
inputs = obj_files)
ninja.build('yajna', 'phony',
inputs = program)
test_src_files = list(get_files('test', '*.c++'))
test_obj_files = [object_file(fn) for fn in test_src_files]
for fn in test_src_files:
ninja.build(object_file(fn), 'cxx',
inputs = fn)
test_runner = os.path.join('bin', 'test') + tools.executable_extension()
ninja.build(test_runner, 'link',
inputs = test_obj_files)
ninja.build('test', 'phony',
inputs = test_runner)
ninja.default('yajna')
| cc0-1.0 | -8,401,330,749,942,638,000 | 31.584746 | 198 | 0.645514 | false |
Xilef11/runesofwizardry-classics | createrune.py | 1 | 1958 | import sys
rune_Path = "./src/main/java/xilef11/mc/runesofwizardry_classics/runes/Rune"
lang_file = "src/main/resources/assets/runesofwizardry_classics/lang/en_US.lang"
runes_file = "src/main/java/xilef11/mc/runesofwizardry_classics/ModRunes.java"
shortName = sys.argv[1]
locName = sys.argv[2]
clas = open(rune_Path+shortName+".java","w")
clas.write('''
package xilef11.mc.runesofwizardry_classics.runes;
import java.io.IOException;
import java.util.Set;
import xilef11.mc.runesofwizardry_classics.Refs;
import xilef11.mc.runesofwizardry_classics.runes.entity.RuneEntityUnimplemented;
import net.minecraft.item.ItemStack;
import net.minecraft.util.BlockPos;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.Vec3i;
import com.zpig333.runesofwizardry.api.RuneEntity;
import com.zpig333.runesofwizardry.core.rune.PatternUtils;
import com.zpig333.runesofwizardry.tileentity.TileEntityDustActive;
public class Rune'''+shortName+''' extends ClassicRune {
@Override
protected ItemStack[][] setupPattern() throws IOException {
return PatternUtils.importFromJson(Refs.PATTERN_PATH+"rune'''+shortName+'''.json");
}
@Override
protected Vec3i setupEntityPos() {
return new Vec3i(0,0,0);
}
@Override
protected ItemStack[][] setupSacrifice() {
return new ItemStack[][]{
{}
};
}
@Override
public String getName() {
return Refs.Lang.RUNE+".'''+shortName.lower()+'''";
}
@Override
public RuneEntity createRune(ItemStack[][] actualPattern, EnumFacing front,
Set<BlockPos> dusts, TileEntityDustActive entity) {
return new RuneEntityUnimplemented(actualPattern, front, dusts, entity, this);
}
}
''')
clas.close()
lang = open(lang_file,"a")
lang.write('runesofwizardry_classics.rune.'+shortName.lower()+'='+locName+'\n')
lang.close()
#Note: This will always append to the complete end of the file
runes = open(runes_file,"a")
runes.write(' DustRegistry.registerRune(new Rune'+shortName+'());\n')
runes.close()
| gpl-3.0 | 5,626,017,188,564,619,000 | 28.223881 | 85 | 0.752809 | false |
eudaq/eudaq-configuration | jtag_generation/others/plume/okf7/chip3/jtag_generator.py | 1 | 4579 | # JTAG files generator
# calculates DAC values for different S/N cuts (3 to 12) and generates JTAG (txt) files, update creation date
# by Jan Dreyling-Eschweiler, [email protected]
# First version: 4. September 2015
# -----------------------
# modules
import re
import math
import numpy as np
import time
##################################################
# hard code data
input_file = "../default_jtag.txt"
sensor_name = "3"
# Middlepoints in DAC
IVDREF2 = 98
IVDREF1A = 191
IVDREF1B = 145
IVDREF1C = 95
IVDREF1D = 73
# Thermal noise: TN
THN_matA = 0.9869
THN_matB = 0.9571
THN_matC = 1.044
THN_matD = 1.065
# Fixed pattern noise: FPN
FPN_matA = 0.45
FPN_matB = 0.2892
FPN_matC = 0.5206
FPN_matD = 0.4351
# Offset
OFF_matA = -0.0354
OFF_matB = 0.1257
OFF_matC = 0.2244
OFF_matD = -0.005987
# slope stays constant
DAC_slope = 0.25
##################################################
# Calculations
# Offset in DAC units
IVDREF1A_offset = -(IVDREF1A * DAC_slope)
IVDREF1B_offset = -(IVDREF1B * DAC_slope)
IVDREF1C_offset = -(IVDREF1C * DAC_slope)
IVDREF1D_offset = -(IVDREF1D * DAC_slope)
# total noise
TON_matA = math.sqrt(THN_matA**2 + FPN_matA**2)
TON_matB = math.sqrt(THN_matB**2 + FPN_matB**2)
TON_matC = math.sqrt(THN_matC**2 + FPN_matC**2)
TON_matD = math.sqrt(THN_matD**2 + FPN_matD**2)
TON_avg = (TON_matA + TON_matB + TON_matC + TON_matD) / 4
#print TON_matA, TON_matB, TON_matC, TON_matD
# Sigma to noise cut
SN = np.array([3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
# mV value
VmV_matA = (TON_matA * SN) + OFF_matA
VmV_matB = (TON_matB * SN) + OFF_matB
VmV_matC = (TON_matC * SN) + OFF_matC
VmV_matD = (TON_matD * SN) + OFF_matD
#print VmV_matA, VmV_matB, VmV_matC, VmV_matD
# DAC value
DAC_matA = (VmV_matA - IVDREF1A_offset) / DAC_slope # np.rint
DAC_matB = (VmV_matB - IVDREF1B_offset) / DAC_slope
DAC_matC = (VmV_matC - IVDREF1C_offset) / DAC_slope
DAC_matD = (VmV_matD - IVDREF1D_offset) / DAC_slope
#print DAC_matA, DAC_matB, DAC_matC, DAC_matD
# set 255 as highest value
# print np.where(DAC_matD > 255)
DAC_matA[np.where(DAC_matA > 255)] = 255.
DAC_matB[np.where(DAC_matB > 255)] = 255.
DAC_matC[np.where(DAC_matC > 255)] = 255.
DAC_matD[np.where(DAC_matD > 255)] = 255.
#print DAC_matA, DAC_matB, DAC_matC, DAC_matD
#print str(int(round(DAC_matA[i]))), str(int(round(DAC_matB[i]))), str(int(round(DAC_matC[i]))), str(int(round(DAC_matD[i])))
# Adjust DAC values
# -----------------
# e.g. DAC-vlaues (XXX) of plane 0
# line 26: XXX ; :BIAS_DAC[0][10] --> IVDREF1D
# line 27: XXX ; :BIAS_DAC[0][11] --> IVDREF1C
# line 28: XXX ; :BIAS_DAC[0][12] --> IVDREF1B
# line 29: XXX ; :BIAS_DAC[0][13] --> IVDREF1A
# line 30: XXX ; :BIAS_DAC[0][14] --> IVDREF2
for i, n in enumerate(SN):
#print i, n
output_file = "chip" + str(sensor_name) + "_thresh" + str(SN[i]) + ".txt"
print "Write file:", output_file
# IVDREF2
with open(input_file, "r") as sources:
lines = sources.readlines()
with open(output_file, "w") as sources:
for line in lines:
sources.write(re.sub(r'^(.*?)BIAS_DAC\[.\]\[14\]', str(IVDREF2) + ' ; :BIAS_DAC[0][14]', line))
# IVDREF1A
with open(output_file, "r") as sources:
lines = sources.readlines()
with open(output_file, "w") as sources:
for line in lines:
sources.write(re.sub(r'^(.*?)BIAS_DAC\[.\]\[13\]', str(int(round(DAC_matA[i]))) + ' ; :BIAS_DAC[0][13]', line))
# IVDREF1B
with open(output_file, "r") as sources:
lines = sources.readlines()
with open(output_file, "w") as sources:
for line in lines:
sources.write(re.sub(r'^(.*?)BIAS_DAC\[.\]\[12\]', str(int(round(DAC_matB[i]))) + ' ; :BIAS_DAC[0][12]', line))
# IVDREF1C
with open(output_file, "r") as sources:
lines = sources.readlines()
with open(output_file, "w") as sources:
for line in lines:
sources.write(re.sub(r'^(.*?)BIAS_DAC\[.\]\[11\]', str(int(round(DAC_matC[i]))) + ' ; :BIAS_DAC[0][11]', line))
# IVDREF1D
with open(output_file, "r") as sources:
lines = sources.readlines()
with open(output_file, "w") as sources:
for line in lines:
sources.write(re.sub(r'^(.*?)BIAS_DAC\[.\]\[10\]', str(int(round(DAC_matD[i]))) + ' ; :BIAS_DAC[0][10]', line))
# date and time
with open(output_file, "r") as sources:
lines = sources.readlines()
with open(output_file, "w") as sources:
for line in lines:
sources.write(re.sub(r'^\#JTAG\_MS(.*?)$', '#JTAG_MS ' + time.strftime("%c"), line))
# summary
print "Total noise average of sensor", str(sensor_name), "-->", TON_avg
exit()
| lgpl-3.0 | -1,253,853,782,808,532,000 | 28.928105 | 125 | 0.605809 | false |
bpsinc-native/src_third_party_chromite | lib/perf_uploader_unittest.py | 1 | 7613 | #!/usr/bin/python
# Copyright 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for perf_uploader module."""
import json
import os
import sys
import tempfile
import urllib2
import urlparse
sys.path.insert(0, os.path.abspath('%s/../..' % os.path.dirname(__file__)))
from chromite.lib import cros_test_lib
from chromite.lib import perf_uploader
from chromite.lib import osutils
class PerfUploadTestCase(cros_test_lib.MockTestCase):
"""Base utility class to setup mock objects and temp file for tests."""
def setUp(self):
presentation_info = perf_uploader.PresentationInfo(
master_name='ChromeOSPerf',
test_name='TestName',
)
self.PatchObject(perf_uploader, '_GetPresentationInfo',
return_value=presentation_info)
self.file_name = tempfile.NamedTemporaryFile().name
def tearDown(self):
osutils.SafeUnlink(self.file_name)
class OutputPerfValueTest(PerfUploadTestCase):
"""Test function OutputPerfValue."""
def testInvalidDescription(self):
self.assertRaises(ValueError, perf_uploader.OutputPerfValue,
'ignored', 'a' * 257, 0, 'ignored')
self.assertRaises(ValueError, perf_uploader.OutputPerfValue,
'ignored', 'a\x00c', 0, 'ignored')
def testInvalidUnits(self):
self.assertRaises(ValueError, perf_uploader.OutputPerfValue,
'ignored', 'ignored', 0, 'a' * 257)
self.assertRaises(ValueError, perf_uploader.OutputPerfValue,
'ignored', 'ignored', 0, 'a\x00c')
def testValidJson(self):
perf_uploader.OutputPerfValue(self.file_name, 'desc', 42, 'units')
data = osutils.ReadFile(self.file_name)
entry = json.loads(data)
self.assertTrue(isinstance(entry, dict))
class LoadPerfValuesTest(PerfUploadTestCase):
"""Test function LoadPerfValues."""
def testEmptyFile(self):
osutils.WriteFile(self.file_name, '')
entries = perf_uploader.LoadPerfValues(self.file_name)
self.assertEqual(0, len(entries))
def testLoadOneValue(self):
perf_uploader.OutputPerfValue(self.file_name, 'desc', 41, 'units')
entries = perf_uploader.LoadPerfValues(self.file_name)
self.assertEqual(1, len(entries))
self.assertEqual(41, entries[0].value)
self.assertEqual('desc', entries[0].description)
self.assertEqual(True, entries[0].higher_is_better)
def testLoadTwoValues(self):
perf_uploader.OutputPerfValue(self.file_name, 'desc', 41, 'units')
perf_uploader.OutputPerfValue(self.file_name, 'desc2', 42, 'units2')
entries = perf_uploader.LoadPerfValues(self.file_name)
self.assertEqual(2, len(entries))
self.assertEqual(41, entries[0].value)
self.assertEqual(42, entries[1].value)
self.assertEqual('desc2', entries[1].description)
self.assertEqual(None, entries[1].graph)
class SendToDashboardTest(PerfUploadTestCase):
"""Ensure perf values are sent to chromeperf via HTTP."""
def setUp(self):
self.urlopen = self.PatchObject(urllib2, 'urlopen')
def testOneEntry(self):
perf_uploader.OutputPerfValue(self.file_name, 'desc1', 42, 'unit')
perf_values = perf_uploader.LoadPerfValues(self.file_name)
perf_uploader.UploadPerfValues(perf_values, 'platform', 'cros', 'chrome',
'TestName')
request = self.urlopen.call_args[0][0]
# pylint: disable=W0212
self.assertEqual(perf_uploader._DASHBOARD_UPLOAD_URL,
request.get_full_url())
# pylint: enable=W0212
data = request.get_data()
data = urlparse.parse_qs(data)['data']
entries = [json.loads(x) for x in data]
entry = entries[0][0]
self.assertEqual('cros', entry['supplemental_columns']['r_cros_version'])
self.assertEqual(42, entry['value'])
self.assertEqual('cbuildbot.TestName/desc1', entry['test'])
self.assertEqual('unit', entry['units'])
class UploadPerfValuesTest(PerfUploadTestCase):
"""Test UploadPerfValues function."""
def setUp(self):
self.send_func = self.PatchObject(perf_uploader, '_SendToDashboard')
def testOneEntry(self):
"""Upload one perf value."""
perf_uploader.OutputPerfValue(self.file_name, 'desc1', 42, 'unit')
perf_values = perf_uploader.LoadPerfValues(self.file_name)
perf_uploader.UploadPerfValues(perf_values, 'platform', 'cros', 'chrome',
'TestName')
positional_args, _ = self.send_func.call_args
first_param = positional_args[0]
data = json.loads(first_param['data'])
self.assertEqual(1, len(data))
entry = data[0]
self.assertEqual('unit', entry['units'])
self.assertEqual('cros',
entry['supplemental_columns']['r_cros_version'])
self.assertEqual('chrome',
entry['supplemental_columns']['r_chrome_version'])
self.assertEqual('cros-platform', entry['bot'])
self.assertEqual(42, entry['value'])
self.assertEqual(0, entry['error'])
def testTwoEntriesOfSameTest(self):
"""Upload one test, two perf values."""
perf_uploader.OutputPerfValue(self.file_name, 'desc1', 40, 'unit')
perf_uploader.OutputPerfValue(self.file_name, 'desc1', 42, 'unit')
perf_values = perf_uploader.LoadPerfValues(self.file_name)
perf_uploader.UploadPerfValues(perf_values, 'platform', 'cros', 'chrome',
'TestName')
positional_args, _ = self.send_func.call_args
first_param = positional_args[0]
data = json.loads(first_param['data'])
self.assertEqual(1, len(data))
entry = data[0]
self.assertEqual('unit', entry['units'])
# Average of 40 and 42
self.assertEqual(41, entry['value'])
# Standard deviation sqrt(2)
self.assertEqual(1.4142, entry['error'])
def testTwoTests(self):
"""Upload two tests, one perf value each."""
perf_uploader.OutputPerfValue(self.file_name, 'desc1', 40, 'unit')
perf_uploader.OutputPerfValue(self.file_name, 'desc2', 42, 'unit')
perf_values = perf_uploader.LoadPerfValues(self.file_name)
perf_uploader.UploadPerfValues(perf_values, 'platform', 'cros', 'chrome',
'TestName')
positional_args, _ = self.send_func.call_args
first_param = positional_args[0]
data = json.loads(first_param['data'])
self.assertEqual(2, len(data))
data = sorted(data, key=lambda x: x['test'])
entry = data[0]
self.assertEqual(40, entry['value'])
self.assertEqual(0, entry['error'])
entry = data[1]
self.assertEqual(42, entry['value'])
self.assertEqual(0, entry['error'])
def testTwoTestsThreeEntries(self):
"""Upload two tests, one perf value each."""
perf_uploader.OutputPerfValue(self.file_name, 'desc1', 40, 'unit')
perf_uploader.OutputPerfValue(self.file_name, 'desc1', 42, 'unit')
perf_uploader.OutputPerfValue(self.file_name, 'desc2', 42, 'unit')
perf_values = perf_uploader.LoadPerfValues(self.file_name)
perf_uploader.UploadPerfValues(perf_values, 'platform', 'cros', 'chrome',
'TestName')
positional_args, _ = self.send_func.call_args
first_param = positional_args[0]
data = json.loads(first_param['data'])
self.assertEqual(2, len(data))
data = sorted(data, key=lambda x: x['test'])
entry = data[0]
self.assertEqual(41, entry['value'])
self.assertEqual(1.4142, entry['error'])
entry = data[1]
self.assertEqual(42, entry['value'])
self.assertEqual(0, entry['error'])
if __name__ == '__main__':
cros_test_lib.main()
| bsd-3-clause | 792,030,863,317,914,600 | 37.64467 | 77 | 0.667411 | false |
distributed-system-analysis/pbench | lib/pbench/test/unit/server/test_state_tracker.py | 1 | 10738 | import pytest
from pbench.server.database.models.tracker import (
Dataset,
States,
Metadata,
DatasetBadParameterType,
DatasetBadStateTransition,
DatasetTerminalStateViolation,
DatasetNotFound,
MetadataNotFound,
MetadataBadKey,
MetadataMissingKeyValue,
MetadataDuplicateKey,
)
from pbench.server.database.models.users import User
@pytest.fixture()
def create_user() -> User:
user = User(
email="[email protected]",
password="12345",
username="test",
first_name="Test",
last_name="Account",
)
user.add()
return user
class TestStateTracker:
def test_state_enum(self):
""" Test the States ENUM properties
"""
assert len(States.__members__) == 9
for n, s in States.__members__.items():
assert str(s) == s.friendly
assert s.mutating == (
"ing" in s.friendly
), f"Enum {n} name and state don't match"
def test_construct(self, db_session, create_user):
""" Test dataset contructor
"""
user = create_user
ds = Dataset(owner=user.username, controller="frodo", name="fio")
ds.add()
assert ds.owner == user
assert ds.controller == "frodo"
assert ds.name == "fio"
assert ds.state == States.UPLOADING
assert ds.md5 is None
assert ds.created <= ds.transition
assert ds.id is not None
assert "test(1)|frodo|fio" == str(ds)
def test_dataset_survives_user(self, db_session, create_user):
"""The Dataset isn't automatically removed when the referenced
user is removed.
"""
user = create_user
ds = Dataset(owner=user.username, controller="frodo", name="fio")
ds.add()
User.delete(username=user.username)
ds1 = Dataset.attach(controller="frodo", name="fio")
assert ds1 == ds
def test_construct_bad_owner(self):
"""Test with a non-existent username
"""
with pytest.raises(DatasetBadParameterType):
Dataset(owner="notme", controller="frodo", name="fio")
def test_construct_bad_state(self, db_session, create_user):
"""Test with a non-States state value
"""
with pytest.raises(DatasetBadParameterType):
Dataset(
owner=create_user.username,
controller="frodo",
name="fio",
state="notStates",
)
def test_attach_exists(self, db_session, create_user):
""" Test that we can attach to a dataset
"""
ds1 = Dataset(
owner=create_user.username,
controller="frodo",
name="fio",
state=States.INDEXING,
)
ds1.add()
ds2 = Dataset.attach(controller="frodo", name="fio", state=States.INDEXED)
assert ds2.owner == ds1.owner
assert ds2.controller == ds1.controller
assert ds2.name == ds1.name
assert ds2.state == States.INDEXED
assert ds2.md5 is ds1.md5
assert ds2.id is ds1.id
def test_attach_none(self, db_session):
""" Test expected failure when we try to attach to a dataset that
does not exist.
"""
with pytest.raises(DatasetNotFound):
Dataset.attach(controller="frodo", name="venus", state=States.UPLOADING)
def test_attach_controller_path(self, db_session, create_user):
""" Test that we can attach using controller and name to a
dataset created by file path.
"""
ds1 = Dataset(
owner=create_user.username,
path="/foo/frodo/fio.tar.xz",
state=States.INDEXING,
)
ds1.add()
ds2 = Dataset.attach(controller="frodo", name="fio")
assert ds2.owner == ds1.owner
assert ds2.controller == ds1.controller
assert ds2.name == ds1.name
assert ds2.state == States.INDEXING
assert ds2.md5 is ds1.md5
assert ds2.id is ds1.id
def test_attach_filename(self, db_session, create_user):
""" Test that we can create a dataset using the full tarball
file path.
"""
ds1 = Dataset(
owner="test", path="/foo/bilbo/rover.tar.xz", state=States.QUARANTINED
)
ds1.add()
ds2 = Dataset.attach(controller="bilbo", name="rover")
assert ds2.owner == ds1.owner
assert ds2.controller == ds1.controller
assert ds2.name == ds1.name
assert ds2.state == States.QUARANTINED
assert ds2.md5 is ds1.md5
assert ds2.id is ds1.id
def test_advanced_good(self, db_session, create_user):
""" Test advancing the state of a dataset
"""
ds = Dataset(owner=create_user.username, controller="frodo", name="fio")
ds.add()
ds.advance(States.UPLOADED)
assert ds.state == States.UPLOADED
assert ds.created <= ds.transition
def test_advanced_bad_state(self, db_session, create_user):
"""Test with a non-States state value
"""
ds = Dataset(owner=create_user.username, controller="frodo", name="fio")
ds.add()
with pytest.raises(DatasetBadParameterType):
ds.advance("notStates")
def test_advanced_illegal(self, db_session, create_user):
""" Test that we can't advance to a state that's not a
successor to the initial state.
"""
ds = Dataset(owner=create_user.username, controller="frodo", name="fio")
ds.add()
with pytest.raises(DatasetBadStateTransition):
ds.advance(States.EXPIRED)
def test_advanced_terminal(self, db_session, create_user):
""" Test that we can't advance from a terminal state
"""
ds = Dataset(
owner=create_user.username,
controller="frodo",
name="fio",
state=States.EXPIRED,
)
ds.add()
with pytest.raises(DatasetTerminalStateViolation):
ds.advance(States.UPLOADING)
def test_lifecycle(self, db_session, create_user):
""" Advance a dataset through the entire lifecycle using the state
transition dict.
"""
ds = Dataset(owner=create_user.username, controller="frodo", name="fio")
ds.add()
assert ds.state == States.UPLOADING
beenthere = [ds.state]
while ds.state in Dataset.transitions:
advances = Dataset.transitions[ds.state]
for n in advances:
if n not in beenthere:
next = n
break
else:
break # avoid infinite reindex loop!
beenthere.append(next)
ds.advance(next)
assert ds.state == next
lifecycle = ",".join([s.name for s in beenthere])
assert (
lifecycle
== "UPLOADING,UPLOADED,UNPACKING,UNPACKED,INDEXING,INDEXED,EXPIRING,EXPIRED"
)
def test_metadata(self, db_session, create_user):
""" Various tests on Metadata keys
"""
# See if we can create a metadata row
ds = Dataset.create(owner=create_user.username, controller="frodo", name="fio")
assert ds.metadatas == []
m = Metadata.create(key=Metadata.REINDEX, value="TRUE", dataset=ds)
assert m is not None
assert ds.metadatas == [m]
# Try to get it back
m1 = Metadata.get(ds, Metadata.REINDEX)
assert m1.key == m.key
assert m1.value == m.value
assert m.id == m1.id
assert m.dataset_ref == m1.dataset_ref
# Check the str()
assert "test(1)|frodo|fio>>REINDEX" == str(m)
# Try to get a metadata key that doesn't exist
with pytest.raises(MetadataNotFound) as exc:
Metadata.get(ds, Metadata.TARBALL_PATH)
assert exc.value.dataset == ds
assert exc.value.key == Metadata.TARBALL_PATH
# Try to remove a metadata key that doesn't exist (No-op)
Metadata.remove(ds, Metadata.TARBALL_PATH)
# Try to create a metadata with a bad key
badkey = "THISISNOTTHEKEYYOURELOOKINGFOR"
with pytest.raises(MetadataBadKey) as exc:
Metadata(key=badkey, value=None)
assert exc.value.key == badkey
# Try to create a key without a value
with pytest.raises(MetadataMissingKeyValue):
Metadata(key=Metadata.REINDEX)
# Try to add a duplicate metadata key
with pytest.raises(MetadataDuplicateKey) as exc:
m1 = Metadata(key=Metadata.REINDEX, value="IRRELEVANT")
m1.add(ds)
assert exc.value.key == Metadata.REINDEX
assert exc.value.dataset == ds
assert ds.metadatas == [m]
# Try to add a Metadata key to something that's not a dataset
with pytest.raises(DatasetBadParameterType) as exc:
m1 = Metadata(key=Metadata.TARBALL_PATH, value="DONTCARE")
m1.add("foobar")
assert exc.value.bad_value == "foobar"
assert exc.value.expected_type == Dataset.__name__
# Try to create a Metadata with a bad value for the dataset
with pytest.raises(DatasetBadParameterType) as exc:
m1 = Metadata.create(key=Metadata.REINDEX, value="TRUE", dataset=[ds])
assert exc.value.bad_value == [ds]
assert exc.value.expected_type == Dataset.__name__
# Try to update the metadata key
m.value = "False"
m.update()
m1 = Metadata.get(ds, Metadata.REINDEX)
assert m.id == m1.id
assert m.dataset_ref == m1.dataset_ref
assert m.key == m1.key
assert m.value == "False"
# Delete the key and make sure its gone
m.delete()
with pytest.raises(MetadataNotFound) as exc:
Metadata.get(ds, Metadata.REINDEX)
assert exc.value.dataset == ds
assert exc.value.key == Metadata.REINDEX
assert ds.metadatas == []
def test_metadata_remove(self, db_session, create_user):
""" Test that we can remove a Metadata key
"""
ds = Dataset.create(owner=create_user.username, controller="frodo", name="fio")
assert ds.metadatas == []
m = Metadata(key=Metadata.ARCHIVED, value="TRUE")
m.add(ds)
assert ds.metadatas == [m]
Metadata.remove(ds, Metadata.ARCHIVED)
assert ds.metadatas == []
with pytest.raises(MetadataNotFound) as exc:
Metadata.get(ds, Metadata.ARCHIVED)
assert exc.value.dataset == ds
assert exc.value.key == Metadata.ARCHIVED
Metadata.remove(ds, Metadata.REINDEX)
assert ds.metadatas == []
| gpl-3.0 | -1,718,631,178,785,787,600 | 34.206557 | 88 | 0.59173 | false |
pre-commit/pre-commit | tests/error_handler_test.py | 1 | 6587 | import os.path
import stat
import sys
from unittest import mock
import pytest
import re_assert
from pre_commit import error_handler
from pre_commit.errors import FatalError
from pre_commit.store import Store
from pre_commit.util import CalledProcessError
from testing.util import cmd_output_mocked_pre_commit_home
from testing.util import xfailif_windows
@pytest.fixture
def mocked_log_and_exit():
with mock.patch.object(error_handler, '_log_and_exit') as log_and_exit:
yield log_and_exit
def test_error_handler_no_exception(mocked_log_and_exit):
with error_handler.error_handler():
pass
assert mocked_log_and_exit.call_count == 0
def test_error_handler_fatal_error(mocked_log_and_exit):
exc = FatalError('just a test')
with error_handler.error_handler():
raise exc
mocked_log_and_exit.assert_called_once_with(
'An error has occurred',
1,
exc,
# Tested below
mock.ANY,
)
pattern = re_assert.Matches(
r'Traceback \(most recent call last\):\n'
r' File ".+pre_commit.error_handler.py", line \d+, in error_handler\n'
r' yield\n'
r' File ".+tests.error_handler_test.py", line \d+, '
r'in test_error_handler_fatal_error\n'
r' raise exc\n'
r'(pre_commit\.errors\.)?FatalError: just a test\n',
)
pattern.assert_matches(mocked_log_and_exit.call_args[0][3])
def test_error_handler_uncaught_error(mocked_log_and_exit):
exc = ValueError('another test')
with error_handler.error_handler():
raise exc
mocked_log_and_exit.assert_called_once_with(
'An unexpected error has occurred',
3,
exc,
# Tested below
mock.ANY,
)
pattern = re_assert.Matches(
r'Traceback \(most recent call last\):\n'
r' File ".+pre_commit.error_handler.py", line \d+, in error_handler\n'
r' yield\n'
r' File ".+tests.error_handler_test.py", line \d+, '
r'in test_error_handler_uncaught_error\n'
r' raise exc\n'
r'ValueError: another test\n',
)
pattern.assert_matches(mocked_log_and_exit.call_args[0][3])
def test_error_handler_keyboardinterrupt(mocked_log_and_exit):
exc = KeyboardInterrupt()
with error_handler.error_handler():
raise exc
mocked_log_and_exit.assert_called_once_with(
'Interrupted (^C)',
130,
exc,
# Tested below
mock.ANY,
)
pattern = re_assert.Matches(
r'Traceback \(most recent call last\):\n'
r' File ".+pre_commit.error_handler.py", line \d+, in error_handler\n'
r' yield\n'
r' File ".+tests.error_handler_test.py", line \d+, '
r'in test_error_handler_keyboardinterrupt\n'
r' raise exc\n'
r'KeyboardInterrupt\n',
)
pattern.assert_matches(mocked_log_and_exit.call_args[0][3])
def test_log_and_exit(cap_out, mock_store_dir):
tb = (
'Traceback (most recent call last):\n'
' File "<stdin>", line 2, in <module>\n'
'pre_commit.errors.FatalError: hai\n'
)
with pytest.raises(SystemExit) as excinfo:
error_handler._log_and_exit('msg', 1, FatalError('hai'), tb)
assert excinfo.value.code == 1
printed = cap_out.get()
log_file = os.path.join(mock_store_dir, 'pre-commit.log')
assert printed == f'msg: FatalError: hai\nCheck the log at {log_file}\n'
assert os.path.exists(log_file)
with open(log_file) as f:
logged = f.read()
pattern = re_assert.Matches(
r'^### version information\n'
r'\n'
r'```\n'
r'pre-commit version: \d+\.\d+\.\d+\n'
r'sys.version:\n'
r'( .*\n)*'
r'sys.executable: .*\n'
r'os.name: .*\n'
r'sys.platform: .*\n'
r'```\n'
r'\n'
r'### error information\n'
r'\n'
r'```\n'
r'msg: FatalError: hai\n'
r'```\n'
r'\n'
r'```\n'
r'Traceback \(most recent call last\):\n'
r' File "<stdin>", line 2, in <module>\n'
r'pre_commit\.errors\.FatalError: hai\n'
r'```\n',
)
pattern.assert_matches(logged)
def test_error_handler_non_ascii_exception(mock_store_dir):
with pytest.raises(SystemExit):
with error_handler.error_handler():
raise ValueError('☃')
def test_error_handler_non_utf8_exception(mock_store_dir):
with pytest.raises(SystemExit):
with error_handler.error_handler():
raise CalledProcessError(1, ('exe',), 0, b'error: \xa0\xe1', b'')
def test_error_handler_non_stringable_exception(mock_store_dir):
class C(Exception):
def __str__(self):
raise RuntimeError('not today!')
with pytest.raises(SystemExit):
with error_handler.error_handler():
raise C()
def test_error_handler_no_tty(tempdir_factory):
pre_commit_home = tempdir_factory.get()
ret, out, _ = cmd_output_mocked_pre_commit_home(
sys.executable,
'-c',
'from pre_commit.error_handler import error_handler\n'
'with error_handler():\n'
' raise ValueError("\\u2603")\n',
retcode=3,
tempdir_factory=tempdir_factory,
pre_commit_home=pre_commit_home,
)
log_file = os.path.join(pre_commit_home, 'pre-commit.log')
out_lines = out.splitlines()
assert out_lines[-2] == 'An unexpected error has occurred: ValueError: ☃'
assert out_lines[-1] == f'Check the log at {log_file}'
@xfailif_windows # pragma: win32 no cover
def test_error_handler_read_only_filesystem(mock_store_dir, cap_out, capsys):
# a better scenario would be if even the Store crash would be handled
# but realistically we're only targetting systems where the Store has
# already been set up
Store()
write = (stat.S_IWGRP | stat.S_IWOTH | stat.S_IWUSR)
os.chmod(mock_store_dir, os.stat(mock_store_dir).st_mode & ~write)
with pytest.raises(SystemExit):
with error_handler.error_handler():
raise ValueError('ohai')
output = cap_out.get()
assert output.startswith(
'An unexpected error has occurred: ValueError: ohai\n'
'Failed to write to log at ',
)
# our cap_out mock is imperfect so the rest of the output goes to capsys
out, _ = capsys.readouterr()
# the things that normally go to the log file will end up here
assert '### version information' in out
| mit | -6,707,430,497,092,425,000 | 30.347619 | 79 | 0.60003 | false |
ericacheong/p4_conference_central | models.py | 1 | 4915 | #!/usr/bin/env python
"""models.py
Udacity conference server-side Python App Engine data & ProtoRPC models
$Id: models.py,v 1.1 2014/05/24 22:01:10 wesc Exp $
created/forked from conferences.py by wesc on 2014 may 24
"""
__author__ = '[email protected] (Wesley Chun)'
import httplib
import endpoints
from protorpc import messages
from google.appengine.ext import ndb
class ConflictException(endpoints.ServiceException):
"""ConflictException -- exception mapped to HTTP 409 response"""
http_status = httplib.CONFLICT
class Profile(ndb.Model):
"""Profile -- User profile object"""
displayName = ndb.StringProperty()
mainEmail = ndb.StringProperty()
teeShirtSize = ndb.StringProperty(default='NOT_SPECIFIED')
conferenceKeysToAttend = ndb.StringProperty(repeated=True)
sessionKeysWishlist = ndb.StringProperty(repeated=True)
class ProfileMiniForm(messages.Message):
"""ProfileMiniForm -- update Profile form message"""
displayName = messages.StringField(1)
teeShirtSize = messages.EnumField('TeeShirtSize', 2)
class ProfileForm(messages.Message):
"""ProfileForm -- Profile outbound form message"""
displayName = messages.StringField(1)
mainEmail = messages.StringField(2)
teeShirtSize = messages.EnumField('TeeShirtSize', 3)
conferenceKeysToAttend = messages.StringField(4, repeated=True)
class StringMessage(messages.Message):
"""StringMessage-- outbound (single) string message"""
data = messages.StringField(1, required=True)
class BooleanMessage(messages.Message):
"""BooleanMessage-- outbound Boolean value message"""
data = messages.BooleanField(1)
class Conference(ndb.Model):
"""Conference -- Conference object"""
name = ndb.StringProperty(required=True)
description = ndb.StringProperty()
organizerUserId = ndb.StringProperty()
topics = ndb.StringProperty(repeated=True)
city = ndb.StringProperty()
startDate = ndb.DateProperty()
month = ndb.IntegerProperty() # TODO: do we need for indexing like Java?
endDate = ndb.DateProperty()
maxAttendees = ndb.IntegerProperty()
seatsAvailable = ndb.IntegerProperty()
class ConferenceForm(messages.Message):
"""ConferenceForm -- Conference outbound form message"""
name = messages.StringField(1)
description = messages.StringField(2)
organizerUserId = messages.StringField(3)
topics = messages.StringField(4, repeated=True)
city = messages.StringField(5)
startDate = messages.StringField(6) #DateTimeField()
month = messages.IntegerField(7)
maxAttendees = messages.IntegerField(8)
seatsAvailable = messages.IntegerField(9)
endDate = messages.StringField(10) #DateTimeField()
websafeKey = messages.StringField(11)
organizerDisplayName = messages.StringField(12)
class ConferenceForms(messages.Message):
"""ConferenceForms -- multiple Conference outbound form message"""
items = messages.MessageField(ConferenceForm, 1, repeated=True)
class TeeShirtSize(messages.Enum):
"""TeeShirtSize -- t-shirt size enumeration value"""
NOT_SPECIFIED = 1
XS_M = 2
XS_W = 3
S_M = 4
S_W = 5
M_M = 6
M_W = 7
L_M = 8
L_W = 9
XL_M = 10
XL_W = 11
XXL_M = 12
XXL_W = 13
XXXL_M = 14
XXXL_W = 15
class ConferenceQueryForm(messages.Message):
"""ConferenceQueryForm -- Conference query inbound form message"""
field = messages.StringField(1)
operator = messages.StringField(2)
value = messages.StringField(3)
class ConferenceQueryForms(messages.Message):
"""ConferenceQueryForms -- multiple ConferenceQueryForm inbound form message"""
filters = messages.MessageField(ConferenceQueryForm, 1, repeated=True)
class Session(ndb.Model):
"""Session -- Session object"""
name = ndb.StringProperty(required=True)
highlights = ndb.StringProperty()
speakers = ndb.StringProperty(repeated=True)
duration = ndb.IntegerProperty() # Duration in minutes
typeOfSession = ndb.StringProperty()
date = ndb.DateProperty()
startTime = ndb.TimeProperty()
class SessionForm(messages.Message):
"""SessionForm -- Session outbound form message"""
name = messages.StringField(1)
highlights = messages.StringField(2)
speakers = messages.StringField(3, repeated=True)
duration = messages.IntegerField(4)
typeOfSession = messages.StringField(5)
date = messages.StringField(6)
startTime = messages.StringField(7)
websafeKey = messages.StringField(8)
class SessionForms(messages.Message):
"""SessionForms -- multiple Session outbound form message"""
items = messages.MessageField(SessionForm, 1, repeated=True)
| apache-2.0 | 8,344,834,078,810,159,000 | 34.359712 | 86 | 0.684842 | false |
mastizada/kuma | kuma/search/models.py | 1 | 15211 | # -*- coding: utf-8 -*-
import operator
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db import models
from django.db.models.signals import post_delete
from django.utils.html import strip_tags
from django.utils import timezone
from django.utils.functional import cached_property
from django.template.defaultfilters import slugify
from elasticutils.contrib.django import MappingType, Indexable
from elasticutils.contrib.django.tasks import index_objects
from kuma.core.managers import PrefetchTaggableManager
from kuma.core.urlresolvers import reverse
from kuma.wiki.models import Document
from .decorators import register_mapping_type
from .queries import DocumentS
from .signals import delete_index
class IndexManager(models.Manager):
"""
The model manager to implement a couple of useful methods for handling
search indexes.
"""
def get_current(self):
try:
return (self.filter(promoted=True, populated=True)
.order_by('-created_at'))[0]
except (self.model.DoesNotExist, IndexError, AttributeError):
fallback_name = settings.ES_INDEXES['default']
return Index(name=fallback_name, populated=True, promoted=True)
class Index(models.Model):
"""
Model to store a bunch of metadata about search indexes including
a way to promote it to be picked up as the "current" one.
"""
created_at = models.DateTimeField(default=timezone.now)
name = models.CharField(max_length=30, blank=True, null=True,
help_text='The search index name, set to '
'the created date when left empty')
promoted = models.BooleanField(default=False)
populated = models.BooleanField(default=False)
objects = IndexManager()
class Meta:
verbose_name = 'Index'
verbose_name_plural = 'Indexes'
ordering = ['-created_at']
def save(self, *args, **kwargs):
if not self.name:
self.name = self.created_at.strftime('%Y-%m-%d-%H-%M-%S')
super(Index, self).save(*args, **kwargs)
def __unicode__(self):
return self.name
@cached_property
def successor(self):
try:
return self.get_next_by_created_at()
except (Index.DoesNotExist, ValueError):
return None
@cached_property
def prefixed_name(self):
"The name to use for the search index in ES"
return '%s-%s' % (settings.ES_INDEX_PREFIX, self.name)
def populate(self):
from .tasks import populate_index
populate_index.delay(self.pk)
def record_outdated(self, instance):
if self.successor:
return OutdatedObject.objects.create(index=self.successor,
content_object=instance)
def promote(self):
rescheduled = []
for outdated_object in self.outdated_objects.all():
instance = outdated_object.content_object
label = ('%s.%s.%s' %
(outdated_object.content_type.natural_key() +
(instance.id,))) # gives us 'wiki.document.12345'
if label in rescheduled:
continue
mappping_type = instance.get_mapping_type()
index_objects.delay(mappping_type, [instance.id])
rescheduled.append(label)
self.outdated_objects.all().delete()
self.promoted = True
self.save()
def demote(self):
self.promoted = False
self.save()
post_delete.connect(delete_index, sender=Index,
dispatch_uid='search.index.delete')
class OutdatedObject(models.Model):
index = models.ForeignKey(Index, related_name='outdated_objects')
created_at = models.DateTimeField(default=timezone.now)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
class FilterGroup(models.Model):
"""
A way to group different kinds of filters from each other.
"""
name = models.CharField(max_length=255)
slug = models.CharField(max_length=255, blank=True, null=True,
help_text='the slug to be used as the name of the '
'query parameter in the search URL')
order = models.IntegerField(default=1,
help_text='An integer defining which order '
'the filter group should show up '
'in the sidebar')
class Meta:
ordering = ('-order', 'name')
unique_together = (
('name', 'slug'),
)
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.name)
super(FilterGroup, self).save(*args, **kwargs)
def __unicode__(self):
return self.name
class FilterManager(models.Manager):
use_for_related_fields = True
def visible_only(self):
return self.filter(visible=True)
class Filter(models.Model):
"""
The model to store custom search filters in the database. This is
used to dynamically tweak the search filters available to users.
"""
OPERATOR_AND = 'AND'
OPERATOR_OR = 'OR'
OPERATOR_CHOICES = (
(OPERATOR_OR, OPERATOR_OR),
(OPERATOR_AND, OPERATOR_AND),
)
OPERATORS = {
OPERATOR_OR: operator.or_,
OPERATOR_AND: operator.and_,
}
name = models.CharField(max_length=255, db_index=True,
help_text='the English name of the filter '
'to be shown in the frontend UI')
slug = models.CharField(max_length=255, db_index=True,
help_text='the slug to be used as a query '
'parameter in the search URL')
shortcut = models.CharField(max_length=255, db_index=True,
null=True, blank=True,
help_text='the name of the shortcut to '
'show in the command and query UI. '
'e.g. fxos')
group = models.ForeignKey(FilterGroup, related_name='filters',
help_text='E.g. "Topic", "Skill level" etc')
tags = PrefetchTaggableManager(help_text='A comma-separated list of tags. '
'If more than one tag given a OR '
'query is executed')
operator = models.CharField(max_length=3, choices=OPERATOR_CHOICES,
default=OPERATOR_OR,
help_text='The logical operator to use '
'if more than one tag is given')
enabled = models.BooleanField(default=True,
help_text='Whether this filter is shown '
'to users or not.')
visible = models.BooleanField(default=True,
help_text='Whether this filter is shown '
'at public places, e.g. the '
'command and query UI')
objects = FilterManager()
class Meta(object):
unique_together = (
('name', 'slug'),
)
def __unicode__(self):
return self.name
def get_absolute_url(self):
path = reverse('search', locale=settings.LANGUAGE_CODE)
return '%s%s?%s=%s' % (settings.SITE_URL, path,
self.group.slug, self.slug)
@register_mapping_type
class DocumentType(MappingType, Indexable):
excerpt_fields = ['summary', 'content']
exclude_slugs = ['Talk:', 'User:', 'User_talk:', 'Template_talk:',
'Project_talk:']
@classmethod
def get_model(cls):
return Document
@classmethod
def get_index(cls):
return Index.objects.get_current().prefixed_name
@classmethod
def search(cls):
"""Returns a typed S for this class.
:returns: an `S` for this DjangoMappingType
"""
return DocumentS(cls)
@classmethod
def get_analysis(cls):
return {
'filter': {
'kuma_word_delimiter': {
'type': 'word_delimiter',
'preserve_original': True, # hi-fi -> hifi, hi-fi
'catenate_words': True, # hi-fi -> hifi
'catenate_numbers': True, # 90-210 -> 90210
}
},
'analyzer': {
'default': {
'tokenizer': 'standard',
'filter': ['standard', 'elision']
},
# a custom analyzer that strips html and uses our own
# word delimiter filter and the elision filter#
# (e.g. L'attribut -> attribut). The rest is the same as
# the snowball analyzer
'kuma_content': {
'type': 'custom',
'tokenizer': 'standard',
'char_filter': ['html_strip'],
'filter': [
'elision',
'kuma_word_delimiter',
'lowercase',
'standard',
'stop',
'snowball',
],
},
'kuma_title': {
'type': 'custom',
'tokenizer': 'standard',
'filter': [
'elision',
'kuma_word_delimiter',
'lowercase',
'standard',
'snowball',
],
},
'case_sensitive': {
'type': 'custom',
'tokenizer': 'keyword'
},
'caseInsensitiveKeyword': {
'type': 'custom',
'tokenizer': 'keyword',
'filter': 'lowercase'
}
},
}
@classmethod
def get_mapping(cls):
return {
# try to not waste so much space
'_all': {'enabled': False},
'_boost': {'name': '_boost', 'null_value': 1.0, 'type': 'float'},
'content': {
'type': 'string',
'analyzer': 'kuma_content',
# faster highlighting
'term_vector': 'with_positions_offsets',
},
'id': {'type': 'long', 'index': 'not_analyzed'},
'locale': {'type': 'string', 'index': 'not_analyzed'},
'modified': {'type': 'date'},
'slug': {'type': 'string', 'index': 'not_analyzed'},
'parent': {
'type': 'nested',
'properties': {
'id': {'type': 'long', 'index': 'not_analyzed'},
'title': {'type': 'string', 'analyzer': 'kuma_title'},
'slug': {'type': 'string', 'index': 'not_analyzed'},
'locale': {'type': 'string', 'index': 'not_analyzed'},
}
},
'summary': {
'type': 'string',
'analyzer': 'kuma_content',
# faster highlighting
'term_vector': 'with_positions_offsets',
},
'tags': {'type': 'string', 'analyzer': 'case_sensitive'},
'title': {
'type': 'string',
'analyzer': 'kuma_title',
'boost': 1.2, # the title usually has the best description
},
'kumascript_macros': {
'type': 'string',
'analyzer': 'caseInsensitiveKeyword'
},
'css_classnames': {
'type': 'string',
'analyzer': 'caseInsensitiveKeyword'
},
'html_attributes': {
'type': 'string',
'analyzer': 'caseInsensitiveKeyword'
},
}
@classmethod
def extract_document(cls, obj_id, obj=None):
if obj is None:
obj = cls.get_model().objects.get(pk=obj_id)
doc = {
'id': obj.id,
'title': obj.title,
'slug': obj.slug,
'summary': obj.get_summary(strip_markup=True),
'locale': obj.locale,
'modified': obj.modified,
'content': strip_tags(obj.rendered_html),
'tags': list(obj.tags.values_list('name', flat=True)),
'kumascript_macros': obj.extract_kumascript_macro_names(),
'css_classnames': obj.extract_css_classnames(),
'html_attributes': obj.extract_html_attributes(),
}
if obj.zones.exists():
# boost all documents that are a zone
doc['_boost'] = 8.0
elif obj.slug.split('/') == 1:
# a little boost if no zone but still first level
doc['_boost'] = 4.0
else:
doc['_boost'] = 1.0
if obj.parent:
doc['parent'] = {
'id': obj.parent.id,
'title': obj.parent.title,
'locale': obj.parent.locale,
'slug': obj.parent.slug,
}
else:
doc['parent'] = {}
return doc
@classmethod
def get_indexable(cls):
"""
For this mapping type return a list of model IDs that should be
indexed with the management command, in a full reindex.
WARNING: When changing this code make sure to update the
``should_update`` method below, too!
"""
model = cls.get_model()
excludes = []
for exclude in cls.exclude_slugs:
excludes.append(models.Q(slug__icontains=exclude))
return (model.objects
.filter(is_template=False,
is_redirect=False,
deleted=False)
.exclude(reduce(operator.or_, excludes))
.values_list('id', flat=True))
@classmethod
def should_update(cls, obj):
"""
Given a Document instance should return boolean value
whether the instance should be indexed or not.
WARNING: This *must* mirror the logic of the ``get_indexable``
method above!
"""
return (not obj.is_template and
not obj.is_redirect and
not obj.deleted and
not any([exclude in obj.slug
for exclude in cls.exclude_slugs]))
def get_excerpt(self):
for field in self.excerpt_fields:
if field in self.es_meta.highlight:
return u'…'.join(self.es_meta.highlight[field])
return self.summary
| mpl-2.0 | 168,948,031,515,714,620 | 35.125891 | 79 | 0.509369 | false |
deerwalk/voltdb | lib/python/voltcli/voltadmin.d/stop.py | 1 | 3134 | # This file is part of VoltDB.
# Copyright (C) 2008-2017 VoltDB Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
# Stop a node. Written to easily support multiple, but configured for
# a single host for now.
from voltcli.hostinfo import Host
from voltcli.hostinfo import Hosts
from voltcli import utility
import sys
@VOLT.Command(
bundles = VOLT.AdminBundle(),
description = 'Stop one host of a running VoltDB cluster.',
arguments = (
VOLT.StringArgument('target_host', 'the target hostname[:port] or address[:port]. (default port=3021)'),
),
)
def stop(runner):
# Exec @SystemInformation to find out about the cluster.
response = runner.call_proc('@SystemInformation',
[VOLT.FastSerializer.VOLTTYPE_STRING],
['OVERVIEW'])
# Convert @SystemInformation results to objects.
hosts = Hosts(runner.abort)
for tuple in response.table(0).tuples():
hosts.update(tuple[0], tuple[1], tuple[2])
# Connect to an arbitrary host that isn't being stopped.
defaultport = 3021
min_hosts = 1
max_hosts = 1
target_host = utility.parse_hosts(runner.opts.target_host, min_hosts, max_hosts, defaultport)[0]
(thost, chost) = hosts.get_target_and_connection_host(target_host.host, target_host.port)
if thost is None:
runner.abort('Host not found in cluster: %s:%d' % (target_host.host, target_host.port))
if chost is None:
runner.abort('The entire cluster is being stopped, use "shutdown" instead.')
if runner.opts.username:
user_info = ', user: %s' % runner.opts.username
else:
user_info = ''
runner.info('Connecting to %s:%d%s (%s) to issue "stop" command' %
(chost.get_admininterface(), chost.adminport, user_info, chost.hostname))
runner.voltdb_connect(chost.get_admininterface(), chost.adminport,
runner.opts.username, runner.opts.password,
runner.opts.ssl_config)
# Stop the requested host using exec @StopNode HOST_ID
runner.info('Stopping host %d: %s:%s' % (thost.id, thost.hostname, thost.internalport))
if not runner.opts.dryrun:
response = runner.call_proc('@StopNode',
[VOLT.FastSerializer.VOLTTYPE_INTEGER],
[thost.id],
check_status=False)
print response
if response.status() != 1: # not SUCCESS
sys.exit(1)
| agpl-3.0 | 2,137,626,913,957,797,000 | 40.786667 | 112 | 0.64933 | false |
13steinj/praw | praw/objector.py | 1 | 6362 | """Provides the Objector class."""
import re
from .exceptions import APIException
class Objector(object):
"""The objector builds :class:`.RedditBase` objects."""
@staticmethod
def _camel_to_snake(name):
"""Return `name` converted from camelCase to snake_case.
Code from http://stackoverflow.com/a/1176023/.
"""
first_break_replaced = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub(
'([a-z0-9])([A-Z])', r'\1_\2', first_break_replaced).lower()
@classmethod
def _snake_case_keys(cls, dictionary):
"""Return a copy of dictionary with keys converted to snake_case.
:param dictionary: The dict to be corrected.
"""
return {cls._camel_to_snake(k): v for k, v in dictionary.items()}
def __init__(self, reddit):
"""Initialize an Objector instance.
:param reddit: An instance of :class:`~.Reddit`.
"""
self.parsers = {}
self._reddit = reddit
def kind(self, instance):
"""Return the kind from the instance class.
:param instance: An instance of a subclass of RedditBase.
"""
retval = None
for key in self.parsers:
if isinstance(instance, self.parsers[key]):
retval = key
break
return retval
def _objectify_dict(self, data):
"""Create RedditBase objects from dicts.
:param data: The structured data, assumed to be a dict.
:returns: An instance of :class:`~.RedditBase`.
"""
if {'conversation', 'messages', 'modActions'}.issubset(data):
parser = self.parsers['ModmailConversation']
elif {'actionTypeId', 'author', 'date'}.issubset(data):
# Modmail mod action
data = self._snake_case_keys(data)
parser = self.parsers['ModmailAction']
elif {'bodyMarkdown', 'isInternal'}.issubset(data):
# Modmail message
data = self._snake_case_keys(data)
parser = self.parsers['ModmailMessage']
elif {'isAdmin', 'isDeleted'}.issubset(data):
# Modmail author
data = self._snake_case_keys(data)
# Prevent clobbering base-36 id
del data['id']
data['is_subreddit_mod'] = data.pop('is_mod')
parser = self.parsers[self._reddit.config.kinds['redditor']]
elif {'banStatus', 'muteStatus', 'recentComments'}.issubset(data):
# Modmail user
data = self._snake_case_keys(data)
data['created_string'] = data.pop('created')
parser = self.parsers[self._reddit.config.kinds['redditor']]
elif {'displayName', 'id', 'type'}.issubset(data):
# Modmail subreddit
data = self._snake_case_keys(data)
parser = self.parsers[self._reddit.config.kinds[data['type']]]
elif ({'date', 'id', 'name'}.issubset(data)
or {'id', 'name', 'permissions'}.issubset(data)):
parser = self.parsers[self._reddit.config.kinds['redditor']]
elif {'text', 'url'}.issubset(data):
if 'color' in data or 'linkUrl' in data:
parser = self.parsers['Button']
else:
parser = self.parsers['MenuLink']
elif {'children', 'text'}.issubset(data):
parser = self.parsers['Submenu']
elif {'height', 'url', 'width'}.issubset(data):
parser = self.parsers['Image']
elif {'isSubscribed', 'name', 'subscribers'}.issubset(data):
# discards icon and subscribed information
return self._reddit.subreddit(data['name'])
elif {'authorFlairType', 'name'}.issubset(data):
# discards flair information
return self._reddit.redditor(data['name'])
elif {'parent_id'}.issubset(data):
parser = self.parsers[self._reddit.config.kinds['comment']]
else:
if 'user' in data:
parser = self.parsers[self._reddit.config.kinds['redditor']]
data['user'] = parser.parse({'name': data['user']},
self._reddit)
return data
return parser.parse(data, self._reddit)
def objectify(self, data):
"""Create RedditBase objects from data.
:param data: The structured data.
:returns: An instance of :class:`~.RedditBase`, or ``None`` if
given ``data`` is ``None``.
"""
# pylint: disable=too-many-return-statements
if data is None: # 204 no content
return None
if isinstance(data, list):
return [self.objectify(item) for item in data]
if 'kind' in data and ('shortName' in data or data['kind'] in
('menu', 'moderators')):
# This is a widget
parser = self.parsers.get(data['kind'], self.parsers['widget'])
return parser.parse(data, self._reddit)
elif {'kind', 'data'}.issubset(data) and data['kind'] in self.parsers:
parser = self.parsers[data['kind']]
return parser.parse(data['data'], self._reddit)
elif 'json' in data and 'data' in data['json']:
if 'things' in data['json']['data']: # Submission.reply
return self.objectify(data['json']['data']['things'])
if 'url' in data['json']['data']: # Subreddit.submit
# The URL is the URL to the submission, so it's removed.
del data['json']['data']['url']
parser = self.parsers[self._reddit.config.kinds['submission']]
else:
parser = self.parsers['LiveUpdateEvent']
return parser.parse(data['json']['data'], self._reddit)
elif 'json' in data and 'errors' in data['json']:
errors = data['json']['errors']
if len(errors) == 1:
raise APIException(*errors[0])
assert not errors
elif isinstance(data, dict):
return self._objectify_dict(data)
return data
def register(self, kind, cls):
"""Register a class for a given kind.
:param kind: The kind in the parsed data to map to ``cls``.
:param cls: A RedditBase class.
"""
self.parsers[kind] = cls
| bsd-2-clause | 7,777,390,700,536,816,000 | 38.271605 | 78 | 0.553128 | false |
vroomfondle/podi | app/controllers/play_controller.py | 1 | 6940 | """
Podi, a command-line interface for Kodi.
Copyright (C) 2015 Peter Frost <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from cement.core import controller
from lib.podi.rpc.library.movies import list_movies
from lib.podi.rpc.player import play_movie, play_episode,\
enable_subtitles, select_subtitle, list_active_players, select_audio, pause_unpause_player
from app.errors import JSONResponseError, NoMediaError, MissingArgumentError
import argparse
class PlayController(controller.CementBaseController):
"""
Sends RPC calls to Kodi to request playback of media items.
"""
class Meta:
"""
Defines metadata for use by the Cement framework.
"""
label = 'play'
description = "Trigger playback of a given media item "\
"(if no item is specified, any currently-playing media items will be paused or unpaused)."
stacked_on = 'base'
stacked_type = 'nested'
arguments = [(['positional_arguments'], dict(
action='store', nargs='*', help=argparse.SUPPRESS),), ]
@controller.expose(hide=True)
def default(self):
"""
Called when the user uses the 'play' command without arguments. Requests resumption of playback
of the current media item.
"""
players = self.app.send_rpc_request(list_active_players())
for player in players:
self.app.log.info("Pausing/unpausing {0}".format(player['type']))
self.app.send_rpc_request(pause_unpause_player(player['playerid']))
if len(players) == 0:
raise MissingArgumentError(
"No media item was specified for playback, and there are no currently-playing media items to pause/unpause.")
@controller.expose(aliases=['movies', 'film', 'films'],
help='Play a movie. You must provide a movie id number, e.g.: play movie 127')
def movie(self):
"""
Instructs Kodi to play the movie specified by the user.
"""
try:
movie_id = self.app.pargs.positional_arguments[0]
except IndexError:
raise MissingArgumentError(
'You must provide a movie id number, e.g.: play movie 127')
self.app.log.info("Playing movie {0}".format(
movie_id))
try:
self.app.send_rpc_request(play_movie(movie_id))
except JSONResponseError as err:
if err.error_code == -32602:
raise NoMediaError(
"Kodi returned an 'invalid parameters' error; this movie may not exist? "
"Use 'list episodes' to see available episodes.")
else:
raise err
@controller.expose(aliases=['tvepisode', 'tv_episode'],
help='Play a TV episode. You must provide an episode id number, e.g.: play episode 1340')
def episode(self):
"""
Instructs Kodi to play the TV episode specified by the user.
"""
try:
tv_episode_id = self.app.pargs.positional_arguments[0]
except IndexError as err:
raise MissingArgumentError(
'You must provide an episode id number, e.g.: play movie 127')
self.app.log.info("Playing episode {0}".format(tv_episode_id))
try:
self.app.send_rpc_request(play_episode(tv_episode_id))
except JSONResponseError as err:
if err.error_code == -32602:
raise NoMediaError(
"Kodi returned an 'invalid parameters' error; this episode may not exist? "
"Use 'list episodes' to see available episodes.")
else:
raise err
@controller.expose(
aliases=['subtitles'],
help="Show subtitles. You must provide a subtitle stream id (e.g. play subtitles 2). Use "\
"\"inspect player\" to see a list of available streams.")
def subtitle(self):
"""
Instructs Kodi to display the subtitle track specified by the user.
"""
try:
subtitle_id = self.app.pargs.positional_arguments[0]
except IndexError as err:
raise MissingArgumentError(
"You must provide a subtitle id number, e.g.: play subtitle 2. Use \"inspect player\""
" to see a list of available subtitle streams.")
for player in self.app.send_rpc_request(list_active_players()):
try:
self.app.send_rpc_request(enable_subtitles(player['playerid']))
self.app.send_rpc_request(
select_subtitle(subtitle_id, player['playerid']))
except JSONResponseError as err:
if err.error_code == -32602:
raise NoMediaError(
"Kodi returned an 'invalid parameters' error; this stream may not exist? Use "
"\"inspect player\" to see a list of available streams.")
else:
raise err
@controller.expose(
aliases=['audio_stream'],
help="Select an audio stream for the currently-playing video. You must provide a audio stream "\
"id (e.g. play audio 2). Use \"inspect player\" to see a list of available audio streams.")
def audio(self):
"""
Instructs Kodi to play the audio track specified by the user.
"""
try:
audio_id = self.app.pargs.positional_arguments[0]
except IndexError as err:
raise MissingArgumentError(
"You must provide a audio id number, e.g.: play audio 2. Use \"inspect player\" to see "
"a list of available audio streams.")
for player in self.app.send_rpc_request(list_active_players()):
try:
self.app.send_rpc_request(
select_audio(audio_id, player['playerid']))
except JSONResponseError as err:
if err.error_code == -32602:
raise NoMediaError(
"Kodi returned an 'invalid parameters' error; this stream may not exist? Use "
"\"inspect player\" to see a list of available streams.")
else:
raise err
| gpl-3.0 | 7,164,990,353,190,180,000 | 42.375 | 125 | 0.598847 | false |
shailcoolboy/Warp-Trinity | ResearchApps/Measurement/examples/TxPower_vs_BER/TxPower_vs_BER.py | 2 | 3070 | from warpnet_client import *
from warpnet_common_params import *
from warpnet_experiment_structs import *
from twisted.internet import reactor
from datetime import *
import time
minTime = 10
pktLen = 1412
pktPeriod = 2000
mod_hdr = 2
mod_payload = 2
txGains = [30, 45, 60];
class ScriptMaster:
def startup(self):
er_log = DataLogger('twoNode_PER_Test_v0.m', flushTime=0)
er_log.log("%%WARPnet PER Test Example - %s\r\n" % datetime.now())
registerWithServer()
nodes = dict()
#WARP Nodes
createNode(nodes, Node(0, NODE_PCAP))
createNode(nodes, Node(1, NODE_PCAP))
#Node entry for the BER processor app
createNode(nodes, Node(99, NODE_PCAP))
connectToServer(nodes)
controlStruct = ControlStruct()
nodes[0].addStruct('controlStruct', controlStruct)
nodes[1].addStruct('controlStruct', controlStruct)
cmdStructBERen = CommandStruct(COMMANDID_ENABLE_BER_TESTING, 0)
nodes[0].addStruct('cmdStructBERen', cmdStructBERen)
nodes[1].addStruct('cmdStructBERen', cmdStructBERen)
cmdStructStart = CommandStruct(COMMANDID_STARTTRIAL, 0)
nodes[0].addStruct('cmdStructStart', cmdStructStart)
cmdStructStop = CommandStruct(COMMANDID_STOPTRIAL, 0)
nodes[0].addStruct('cmdStructStop', cmdStructStop)
berStruct = ObserveBERStruct()
nodes[99].addStruct('berStruct', berStruct, handleUnrequested=True)
sendRegistrations(nodes)
controlStruct.packetGeneratorPeriod = pktPeriod
controlStruct.packetGeneratorLength = pktLen
controlStruct.channel = 9
controlStruct.txPower = 63
controlStruct.modOrderHeader = mod_hdr
controlStruct.modOrderPayload = mod_payload
nodes[0].sendToNode('controlStruct')
nodes[1].sendToNode('controlStruct')
nodes[0].sendToNode('cmdStructBERen')
nodes[1].sendToNode('cmdStructBERen')
#Experiment loop
for ii, txGain in enumerate(txGains):
print("Starting trial %d with TxGain %d at %s" % (ii, txGain, datetime.now()))
#Stop any traffic that might be running
nodes[0].sendToNode('cmdStructStop')
#Update the Tx gain at the Tx node
controlStruct.txPower = txGain
nodes[0].sendToNode('controlStruct')
#Clear the internal BER counters
berStruct.clearBitCounts()
#Let things settle
time.sleep(0.25)
#Start the trial
nodes[0].sendToNode('cmdStructStart')
#Run until minTime elapses
time.sleep(minTime)
nodes[0].sendToNode('cmdStructStop')
#Give the nodes and server time to process any final structs
time.sleep(1)
#Record the results
er_log.log("n0_txGain(%d) = %d;\t" % (ii+1, txGain))
er_log.log("n1_bitsRx(%d) = %d;\t" % (ii+1, berStruct.totalBitsReceived))
er_log.log("n1_bitErrs(%d) = %d;\r\n" % (ii+1, berStruct.totalBitErrors))
print("############################################")
print("############# Experiment Done! #############")
print("############################################")
reactor.callFromThread(reactor.stop)
sm = ScriptMaster()
stdio.StandardIO(CmdReader())
factory = WARPnetClient(sm.startup);
reactor.connectTCP('localhost', 10101, factory)
reactor.run()
| bsd-2-clause | 5,196,952,186,976,665,000 | 27.425926 | 81 | 0.695114 | false |
yangjincai/Xq2EFT | test_eft_calculator.py | 1 | 3471 | #!/usr/bin/env python2
import numpy as np
from time import time
import heapq
from matplotlib import pyplot as plt
from eft_calculator import EFT_calculator, Water
import tools
def load_coordinates(name):
lines = open('test.dat/random/'+name).readlines()[-7:-1]
coors = [[float(item) for item in line.split()[2:5]] for line in lines]
return np.array(coors)
class Classical_calculator:
def __init__(self):
self.eps = [0.12, 0.046, 0.046]
self.sigma = [1.7, 0.2245, 0.2245]
self.charge = [-0.834, 0.417, 0.417]
def eval(self, coors):
mol = Water()
coor0 = coors[:3]
coor1 = coors[3:]
e = 0.
f = np.zeros(3)
t = np.zeros(3)
com1 = mol.getCOM(coor1)
eps, sigma, charge = self.eps, self.sigma, self.charge
for i in range(3):
for j in range(3):
ener, force = self.atomicEF(coor0[i], eps[i], sigma[i], charge[i], coor1[j], eps[j], sigma[j], charge[j])
e += ener
f += force
t += np.cross(coor1[j]-com1, force)
return np.array([e, f[0], f[1], f[2], t[0], t[1], t[2]])
def atomicEF(self, x0, e0, s0, q0, x1, e1, s1, q1):
k = 138.935456
e = np.sqrt(e0 * e1)
s = s0 + s1
r = np.linalg.norm(x0 - x1)
sor6 = (s/r) ** 6
evdw = e * (sor6**2 - 2 * sor6)
fvdw = e / r**2 * sor6 * (sor6 - 1) * (x1 - x0)
eelec = k * q0 * q1 / r
felec = k * q0 * q1 / r**3 * (x1 - x0)
ener = evdw + eelec
force = fvdw + felec
return ener, force
def test_random_set():
e0 = []
e1 = []
fce0 = []
fce1 = []
trq0 = []
trq1 = []
all = []
t1 = time()
for i in range(2, 2000):
# load atomic coor
name = 'test%04d.inp' % i
coors = load_coordinates(name)
# evaluate with analytical function
eft = cc.eval(coors)
e0.append(eft[0])
fce0 += list(eft[1:4])
trq0 += list(eft[4:7])
# convert atomic coor to r, phi, theta...
X0, q0 = calculator.mol.atomic2Xq(coors[:3])
X1, q1 = calculator.mol.atomic2Xq(coors[3:])
# evaluate with calculator
eft = calculator.eval(X0, q0, X1, q1)
e1.append(eft[0])
fce1 += list(eft[1:4])
trq1 += list(eft[4:7])
#all.append((-np.abs(e0[-1]-e1[-1]), name))
all.append((-np.linalg.norm(np.array(fce0) - np.array(fce1)), name))
t2 = time()
print 'took %.1f s to evaluate the random set' % (t2 - t1)
heapq.heapify(all)
#for i in range(3):
# de, name = heapq.heappop(all)
# print -de, name
# make a plot
_, axarr = plt.subplots(1, 3)
p = np.corrcoef(e0, e1)[0, 1]
print "Energy: p =", p
axarr[0].scatter(e0, e1)
axarr[0].text(0, 0, 'p=%.4f'%p)
p = np.corrcoef(fce0, fce1)[0, 1]
print "Force: p =", p
axarr[1].scatter(fce0, fce1)
axarr[1].text(0, 0, 'p=%.4f'%p)
p = np.corrcoef(trq0, trq1)[0, 1]
print "Torque: p =", p
axarr[2].scatter(trq0, trq1)
axarr[2].text(0, 0, 'p=%.4f'%p)
plt.savefig('corr.png')
if __name__ == '__main__':
order = 3
calculator = EFT_calculator(order)
t0 = time()
cc = Classical_calculator()
#calculator.setup('grid_data.txt')
calculator.setup()
calculator.fill_grid(cc)
t1 = time()
print 'took %.1f s to fill the grid' % (t1 - t0)
test_random_set()
| apache-2.0 | 7,625,391,754,920,505,000 | 28.666667 | 121 | 0.518294 | false |
mariocesar/django-startup | startup/accounts/models.py | 1 | 2514 | # coding=utf-8
from django.core.mail import send_mail
from django.db import models
from django.core import validators
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.contrib.auth.models import AbstractBaseUser, UserManager, PermissionsMixin
class User(AbstractBaseUser, PermissionsMixin):
"""
Users within the Django authentication system are represented by this
model.
Username, password and email are required. Other fields are optional.
"""
username = models.CharField(
_('username'),
max_length=30,
unique=True,
help_text=_('Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.'),
validators=[validators.RegexValidator(r'^[\w.@+-]+$', _('Enter a valid username.'), 'invalid')]
)
first_name = models.CharField(_('first name'), max_length=30, blank=True)
last_name = models.CharField(_('last name'), max_length=30, blank=True)
email = models.EmailField(_('email address'), blank=True)
photo = models.ImageField(upload_to='users', blank=True, null=True)
is_staff = models.BooleanField(
_('staff status'), default=False,
help_text=_('Designates whether the user can log into this admin '
'site.'))
is_active = models.BooleanField(
_('active'), default=True,
help_text=_('Designates whether this user should be treated as '
'active. Unselect this instead of deleting accounts.'))
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
objects = UserManager()
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
@property
def full_name(self):
return self.get_full_name()
def get_absolute_url(self):
return settings.LOGIN_REDIRECT_URL
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"Returns the short name for the user."
return self.first_name
def email_user(self, subject, message, from_email=None, **kwargs):
"""
Sends an email to this User.
"""
send_mail(subject, message, from_email, [self.email], **kwargs)
| mit | 7,783,932,481,574,590,000 | 31.649351 | 103 | 0.639618 | false |
elentarion/RatticWeb | cred/migrations/0035_auto__add_field_cred_expire_time.py | 1 | 7806 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Cred.expire_time'
db.add_column(u'cred_cred', 'expire_time',
self.gf('django.db.models.fields.PositiveIntegerField')(default=None, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Cred.expire_time'
db.delete_column(u'cred_cred', 'expire_time')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'cred.cred': {
'Meta': {'object_name': 'Cred'},
'attachment': ('cred.fields.SizedFileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'attachment_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'descriptionmarkdown': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'expire_time': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'child_creds'", 'default': 'None', 'to': u"orm['auth.Group']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'iconname': ('django.db.models.fields.CharField', [], {'default': "'Key.png'", 'max_length': '64'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'latest': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'history'", 'null': 'True', 'to': u"orm['cred.Cred']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'ssh_key': ('cred.fields.SizedFileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ssh_key_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'child_creds'", 'default': 'None', 'to': u"orm['cred.Tag']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '250', 'null': 'True', 'blank': 'True'})
},
u'cred.credaudit': {
'Meta': {'ordering': "('-time',)", 'object_name': 'CredAudit'},
'audittype': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'cred': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'logs'", 'to': u"orm['cred.Cred']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'credlogs'", 'to': u"orm['auth.User']"})
},
u'cred.credchangeq': {
'Meta': {'object_name': 'CredChangeQ'},
'cred': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cred.Cred']", 'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
u'cred.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'})
}
}
complete_apps = ['cred'] | gpl-2.0 | 8,432,225,457,294,191,000 | 74.796117 | 217 | 0.549321 | false |
birkin/rapid_exports | rapid_app/models.py | 1 | 6176 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import codecs, csv, datetime, ftplib, itertools, json, logging, operator, os, pprint, shutil, time, zipfile
import MySQLdb # really pymysql; see config/__init__.py
import requests
from django.conf import settings as project_settings
from django.core.urlresolvers import reverse
from django.db import models
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.encoding import smart_unicode
from django.utils.text import slugify
from rapid_app import settings_app
from sqlalchemy import create_engine as alchemy_create_engine
log = logging.getLogger(__name__)
######################
## django db models ##
######################
class PrintTitleDev( models.Model ):
""" Shows the dev db as it _will_ be populated.
Db will is populated by another admin task. """
key = models.CharField( max_length=20, primary_key=True )
issn = models.CharField( max_length=15 )
start = models.IntegerField()
end = models.IntegerField( blank=True, null=True )
location = models.CharField( 'other--location', max_length=25, blank=True, null=True )
building = models.CharField( max_length=25, blank=True, null=True )
call_number = models.CharField( max_length=50, blank=True, null=True )
date_updated = models.DateTimeField( 'other--date-updated', auto_now=True )
title = models.TextField( 'ss--title', blank=True, null=True )
url = models.TextField( 'ss--url', blank=True, null=True )
def __unicode__(self):
return '%s__%s_to_%s' % ( self.issn, self.start, self.end )
# end class PrintTitleDev
class ProcessorTracker( models.Model ):
""" Tracks current-status and recent-processing. """
current_status = models.CharField( max_length=50, blank=True, null=True )
processing_started = models.DateTimeField( blank=True, null=True )
processing_ended = models.DateTimeField( blank=True, null=True )
recent_processing = models.TextField( blank=True, null=True )
def __unicode__(self):
return '{status}__{started}'.format( status=self.current_status, started=self.processing_started )
class Meta:
verbose_name_plural = "Processor Tracker"
# end class PrintTitleDev
#####################
## regular classes ##
#####################
class RapidFileGrabber( object ):
""" Transfers Rapid's prepared file from remote to local location.
Not-django class. """
def __init__( self, remote_server_name, remote_server_port, remote_server_username, remote_server_password, remote_filepath, local_destination_filepath, local_destination_extract_directory ):
self.remote_server_name = remote_server_name
self.remote_server_port = remote_server_port
self.remote_server_username = remote_server_username
self.remote_server_password = remote_server_password
self.remote_filepath = remote_filepath
self.local_destination_filepath = local_destination_filepath
self.local_destination_extract_directory = local_destination_extract_directory
def grab_file( self ):
""" Grabs file.
Called by ProcessFileFromRapidHelper.initiate_work(). """
log.debug( 'grab_file() remote_server_name, `%s`; remote_filepath, `%s`; local_destination_filepath, `%s`' % (self.remote_server_name, self.remote_filepath, self.local_destination_filepath) )
client = ftplib.FTP_TLS( timeout=10 )
client.connect( self.remote_server_name, self.remote_server_port )
client.auth()
client.prot_p()
client.login( self.remote_server_username, self.remote_server_password )
f = open( self.local_destination_filepath, 'wb' )
client.retrbinary( "RETR " + self.remote_filepath, f.write )
f.close()
client.quit()
return
# def grab_file( self ):
# """ Grabs file.
# Called by ProcessFileFromRapidHelper.initiate_work(). """
# log.debug( 'grab_file() remote_server_name, `%s`; remote_filepath, `%s`; local_destination_filepath, `%s`' % (self.remote_server_name, self.remote_filepath, self.local_destination_filepath) )
# ( sftp, transport ) = ( None, None )
# try:
# transport = paramiko.Transport( (self.remote_server_name, 22) )
# transport.connect( username=self.remote_server_username, password=self.remote_server_password )
# sftp = paramiko.SFTPClient.from_transport( transport )
# sftp.get( self.remote_filepath, self.local_destination_filepath )
# except Exception as e:
# log.error( 'exception, `%s`' % unicode(repr(e)) ); raise Exception( unicode(repr(e)) )
# return
def unzip_file( self ):
""" Unzips file.
Called by ProcessFileFromRapidHelper.initiate_work(). """
log.debug( 'unzip_file() zipped-filepath, `%s`; unzipped-directory, `%s`' % (self.local_destination_filepath, self.local_destination_extract_directory) )
try:
zip_ref = zipfile.ZipFile( self.local_destination_filepath )
except Exception as e:
log.error( 'exception, `%s`' % unicode(repr(e)) ); raise Exception( unicode(repr(e)) )
zip_ref.extractall( self.local_destination_extract_directory )
return
# end class RapidFileGrabber
class ManualDbHandler( object ):
""" Backs-up and writes to non-rapid-manager print-titles table.
Non-django class. """
def run_sql( self, sql, connection_url ):
""" Executes sql.
Called by UpdateTitlesHelper._make_backup_table() """
time.sleep( .25 )
log.debug( 'sql, ```%s```' % sql )
engine = alchemy_create_engine( connection_url )
try:
return_val = None
result = engine.execute( sql )
if 'fetchall' in dir( result.cursor ):
return_val = result.cursor.fetchall()
result.close()
return return_val
except Exception as e:
log.error( 'exception executing sql, ```{}```'.format(unicode(repr(e))) )
# end class ManualDbHandler
| mit | -5,328,917,401,375,825,000 | 41.888889 | 201 | 0.650259 | false |
boudinfl/pke | pke/unsupervised/graph_based/textrank.py | 1 | 6828 | # -*- coding: utf-8 -*-
# Authors: Ygor Gallina, Florian Boudin
# Date: 10-18-2018
"""TextRank keyphrase extraction model.
Implementation of the TextRank model for keyword extraction described in:
* Rada Mihalcea and Paul Tarau.
TextRank: Bringing Order into Texts
*In Proceedings of EMNLP*, 2004.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import logging
import networkx as nx
from pke.base import LoadFile
class TextRank(LoadFile):
"""TextRank for keyword extraction.
This model builds a graph that represents the text. A graph based ranking
algorithm is then applied to extract the lexical units (here the words) that
are most important in the text.
In this implementation, nodes are words of certain part-of-speech (nouns
and adjectives) and edges represent co-occurrence relation, controlled by
the distance between word occurrences (here a window of 2 words). Nodes
are ranked by the TextRank graph-based ranking algorithm in its unweighted
variant.
Parameterized example::
import pke
# define the set of valid Part-of-Speeches
pos = {'NOUN', 'PROPN', 'ADJ'}
# 1. create a TextRank extractor.
extractor = pke.unsupervised.TextRank()
# 2. load the content of the document.
extractor.load_document(input='path/to/input',
language='en',
normalization=None)
# 3. build the graph representation of the document and rank the words.
# Keyphrase candidates are composed from the 33-percent
# highest-ranked words.
extractor.candidate_weighting(window=2,
pos=pos,
top_percent=0.33)
# 4. get the 10-highest scored candidates as keyphrases
keyphrases = extractor.get_n_best(n=10)
"""
def __init__(self):
"""Redefining initializer for TextRank."""
super(TextRank, self).__init__()
self.graph = nx.Graph()
"""The word graph."""
def candidate_selection(self, pos=None):
"""Candidate selection using longest sequences of PoS.
Args:
pos (set): set of valid POS tags, defaults to ('NOUN', 'PROPN',
'ADJ').
"""
if pos is None:
pos = {'NOUN', 'PROPN', 'ADJ'}
# select sequence of adjectives and nouns
self.longest_pos_sequence_selection(valid_pos=pos)
def build_word_graph(self, window=2, pos=None):
"""Build a graph representation of the document in which nodes/vertices
are words and edges represent co-occurrence relation. Syntactic filters
can be applied to select only words of certain Part-of-Speech.
Co-occurrence relations can be controlled using the distance between
word occurrences in the document.
As the original paper does not give precise details on how the word
graph is constructed, we make the following assumptions from the example
given in Figure 2: 1) sentence boundaries **are not** taken into account
and, 2) stopwords and punctuation marks **are** considered as words when
computing the window.
Args:
window (int): the window for connecting two words in the graph,
defaults to 2.
pos (set): the set of valid pos for words to be considered as nodes
in the graph, defaults to ('NOUN', 'PROPN', 'ADJ').
"""
if pos is None:
pos = {'NOUN', 'PROPN', 'ADJ'}
# flatten document as a sequence of (word, pass_syntactic_filter) tuples
text = [(word, sentence.pos[i] in pos) for sentence in self.sentences
for i, word in enumerate(sentence.stems)]
# add nodes to the graph
self.graph.add_nodes_from([word for word, valid in text if valid])
# add edges to the graph
for i, (node1, is_in_graph1) in enumerate(text):
# speed up things
if not is_in_graph1:
continue
for j in range(i + 1, min(i + window, len(text))):
node2, is_in_graph2 = text[j]
if is_in_graph2 and node1 != node2:
self.graph.add_edge(node1, node2)
def candidate_weighting(self,
window=2,
pos=None,
top_percent=None,
normalized=False):
"""Tailored candidate ranking method for TextRank. Keyphrase candidates
are either composed from the T-percent highest-ranked words as in the
original paper or extracted using the `candidate_selection()` method.
Candidates are ranked using the sum of their (normalized?) words.
Args:
window (int): the window for connecting two words in the graph,
defaults to 2.
pos (set): the set of valid pos for words to be considered as nodes
in the graph, defaults to ('NOUN', 'PROPN', 'ADJ').
top_percent (float): percentage of top vertices to keep for phrase
generation.
normalized (False): normalize keyphrase score by their length,
defaults to False.
"""
if pos is None:
pos = {'NOUN', 'PROPN', 'ADJ'}
# build the word graph
self.build_word_graph(window=window, pos=pos)
# compute the word scores using the unweighted PageRank formulae
w = nx.pagerank_scipy(self.graph, alpha=0.85, tol=0.0001, weight=None)
# generate the phrases from the T-percent top ranked words
if top_percent is not None:
# warn user as this is not the pke way of doing it
logging.warning("Candidates are generated using {}-top".format(
top_percent))
# computing the number of top keywords
nb_nodes = self.graph.number_of_nodes()
to_keep = min(math.floor(nb_nodes * top_percent), nb_nodes)
# sorting the nodes by decreasing scores
top_words = sorted(w, key=w.get, reverse=True)
# creating keyphrases from the T-top words
self.longest_keyword_sequence_selection(top_words[:int(to_keep)])
# weight candidates using the sum of their word scores
for k in self.candidates.keys():
tokens = self.candidates[k].lexical_form
self.weights[k] = sum([w[t] for t in tokens])
if normalized:
self.weights[k] /= len(tokens)
# use position to break ties
self.weights[k] += (self.candidates[k].offsets[0]*1e-8)
| gpl-3.0 | 159,721,785,282,820,700 | 35.908108 | 80 | 0.60208 | false |
qedsoftware/commcare-hq | corehq/apps/dashboard/models.py | 1 | 13093 | from corehq.apps.export.views import ExportsPermissionsMixin
from django.core.urlresolvers import reverse
from corehq.apps.app_manager.dbaccessors import get_brief_apps_in_domain
from corehq.apps.reports.models import ReportConfig, FormExportSchema, CaseExportSchema
from dimagi.utils.decorators.memoized import memoized
class TileConfigurationError(Exception):
pass
class TileType(object):
ICON = 'icon'
PAGINATE = 'paginate'
class Tile(object):
"""This class creates the tile and its context
when it's called by Django Angular's Remote Method Invocation.
"""
def __init__(self, tile_config, request, in_data):
if not isinstance(tile_config, TileConfiguration):
raise TileConfigurationError(
"tile_config must be an instance of TileConfiguration"
)
self.tile_config = tile_config
self.request = request
# this is the data provided by Django Angular's Remote Method Invocation
self.in_data = in_data
@property
def is_visible(self):
"""Whether or not the tile is visible on the dashboard (permissions).
:return: Boolean
"""
return bool(self.tile_config.visibility_check(self.request))
@property
@memoized
def context_processor(self):
return self.tile_config.context_processor_class(
self.tile_config, self.request, self.in_data
)
@property
def context(self):
"""This is sent back to the Angular JS controller created the remote
Remote Method Invocation of the Dashboard view.
:return: dict
"""
tile_context = {
'slug': self.tile_config.slug,
'helpText': self.tile_config.help_text,
'analytics': {
'usage_label': self.tile_config.analytics_usage_label,
'workflow_labels': self.tile_config.analytics_workflow_labels,
}
}
tile_context.update(self.context_processor.context)
return tile_context
class TileConfiguration(object):
def __init__(self, title, slug, icon, context_processor_class,
url=None, urlname=None, is_external_link=False,
visibility_check=None, url_generator=None,
help_text=None, analytics_usage_label=None,
analytics_workflow_labels=None):
"""
:param title: The title of the tile
:param slug: The tile's slug
:param icon: The class of the icon
:param context_processor: A Subclass of BaseTileContextProcessor
:param url: the url that the icon will link to
:param urlname: the urlname of the view that the icon will link to
:param is_external_link: True if the tile opens links in new window/tab
:param visibility_check: (optional) a lambda that accepts a request
and urlname and returns a boolean value if the tile is visible to the
user.
:param url_generator: a lambda that accepts a request and returns
a string that is the url the tile will take the user to if it's clicked
:param help_text: (optional) text that will appear on hover of tile
:param analytics_usage_label: (optional) label to be used in usage
analytics event tracking.
:param analytics_workflow_labels: (optional) label to be used in workflow
analytics event tracking.
"""
if not issubclass(context_processor_class, BaseTileContextProcessor):
raise TileConfigurationError(
"context processor must be subclass of BaseTileContextProcessor"
)
self.context_processor_class = context_processor_class
self.title = title
self.slug = slug
self.icon = icon
self.url = url
self.urlname = urlname
self.is_external_link = is_external_link
self.visibility_check = (visibility_check
or self._default_visibility_check)
self.url_generator = url_generator or self._default_url_generator
self.help_text = help_text
self.analytics_usage_label = analytics_usage_label
self.analytics_workflow_labels = analytics_workflow_labels if analytics_workflow_labels is not None else []
@property
def ng_directive(self):
return self.context_processor_class.tile_type
def get_url(self, request):
if self.urlname is not None:
return self.url_generator(self.urlname, request)
return self.url
@staticmethod
def _default_url_generator(urlname, request):
return reverse(urlname, args=[request.domain])
@staticmethod
def _default_visibility_check(request):
return True
class BaseTileContextProcessor(object):
tile_type = None
def __init__(self, tile_config, request, in_data):
"""
:param tile_config: An instance of TileConfiguration
:param request: An instance of HttpRequest
:param in_data: A dictionary provided by Django Angular's
Remote Method Invocation
"""
self.request = request
self.tile_config = tile_config
self.in_data = in_data
@property
def context(self):
"""This is the context specific to the type of tile we're creating.
:return: dict
"""
raise NotImplementedError('context must be overridden')
class IconContext(BaseTileContextProcessor):
"""This type of tile is just an icon with a link to another page on HQ
or an external link (like the help site).
"""
tile_type = TileType.ICON
@property
def context(self):
return {
'url': self.tile_config.get_url(self.request),
'icon': self.tile_config.icon,
'isExternal': self.tile_config.is_external_link,
}
class BasePaginatedTileContextProcessor(BaseTileContextProcessor):
"""A resource for serving data to the Angularjs PaginatedTileController
for the hq.dashboard Angular JS module.
To use, subclass this and override :total: and :paginated_items: properties.
"""
tile_type = TileType.PAGINATE
@property
def context(self):
return {
'pagination': self.pagination_context,
'default': {
'show': self.tile_config.icon is not None,
'icon': self.tile_config.icon,
'url': self.tile_config.get_url(self.request),
},
}
@property
def pagination_data(self):
"""The data we READ to figure out the current pagination state.
:return: dict
"""
return self.in_data['pagination']
@property
def limit(self):
"""The maximum number of items for this page.
:return: integer
"""
return self.pagination_data.get('limit', 5)
@property
def current_page(self):
"""The current page that the paginator is on.
:return: integer
"""
return self.pagination_data.get('currentPage', 1)
@property
def skip(self):
"""The number of items to skip over to get to the current page in
the list of paginated items (or in the queryset).
:return: integer
"""
return (self.current_page - 1) * self.limit
@property
def pagination_context(self):
return {
'total': self.total,
'limit': self.limit,
'currentPage': self.current_page,
'paginatedItems': list(self.paginated_items),
}
@staticmethod
def _fmt_item(name,
url,
description=None,
full_name=None,
secondary_url=None,
secondary_url_icon=None):
"""This is the format that the paginator expects items to be in
so that the template can be fully rendered.
:param name: string
:param url: string
:param description: string. optional.
If present, a popover will appear to the left of the list item.
:param full_name: string. optional.
If present, set the popover title.
:param secondary_url: string. optional.
:param secondary_url_icon: string. optional.
If these two values are present, display an icon that link to a secondary url when the line is hovered.
:return:
"""
return {
'name_full': full_name or name,
'name': name,
'description': description,
'url': url,
'secondary_url': secondary_url,
'secondary_url_icon': secondary_url_icon
}
@property
def total(self):
"""The total number of objects being paginated over.
:return: integer
"""
raise NotImplementedError('total must return an int')
@property
def paginated_items(self):
"""The items (as dictionaries/objects) to be passed to the angularjs
template for rendering. It's recommended that you use the
_fmt_item() helper function to return the correctly formatted dict
for each item.
:return: list of dicts formatted with _fmt_item
"""
raise NotImplementedError('pagination must be overridden')
class ReportsPaginatedContext(BasePaginatedTileContextProcessor):
"""Generates the Paginated context for the Reports Tile.
"""
@property
def total(self):
key = ["name", self.request.domain, self.request.couch_user._id]
results = ReportConfig.get_db().view(
'reportconfig/configs_by_domain',
include_docs=False,
startkey=key,
endkey=key+[{}],
reduce=True,
).all()
return results[0]['value'] if results else 0
@property
def paginated_items(self):
reports = ReportConfig.by_domain_and_owner(
self.request.domain, self.request.couch_user._id,
limit=self.limit, skip=self.skip
)
for report in reports:
yield self._fmt_item(
report.name,
report.url,
description="%(desc)s (%(date)s)" % {
'desc': report.description,
'date': report.date_description,
},
full_name=report.full_name
)
class AppsPaginatedContext(BasePaginatedTileContextProcessor):
"""Generates the Paginated context for the Applications Tile.
"""
secondary_url_icon = "fa fa-download"
@property
def total(self):
# todo: optimize this at some point. unfortunately applications_brief
# doesn't have a reduce view and for now we'll avoid refactoring.
return len(self.applications)
@property
@memoized
def applications(self):
return get_brief_apps_in_domain(self.request.domain)
@property
def paginated_items(self):
def _get_app_url(app):
return (
_get_view_app_url(app)
if self.request.couch_user.can_edit_apps()
else _get_release_manager_url(app)
)
def _get_view_app_url(app):
return reverse('view_app', args=[self.request.domain, app.get_id])
def _get_release_manager_url(app):
return reverse('release_manager', args=[self.request.domain, app.get_id])
apps = self.applications[self.skip:self.skip + self.limit]
return [self._fmt_item(a.name,
_get_app_url(a),
None, # description
None, # full_name
_get_release_manager_url(a),
self.secondary_url_icon) for a in apps]
class DataPaginatedContext(BasePaginatedTileContextProcessor, ExportsPermissionsMixin):
"""Generates the Paginated context for the Data Tile."""
domain = None
def __init__(self, tile_config, request, in_data):
self.domain = request.domain
super(DataPaginatedContext, self).__init__(tile_config, request, in_data)
@property
def total(self):
return len(self.form_exports) + len(self.case_exports)
@property
@memoized
def form_exports(self):
exports = []
if self.has_edit_permissions:
exports = FormExportSchema.get_stale_exports(self.request.domain)
return exports
@property
@memoized
def case_exports(self):
exports = []
if self.has_edit_permissions:
exports = CaseExportSchema.get_stale_exports(self.domain)
return exports
@property
def paginated_items(self):
exports = (self.form_exports + self.case_exports)[self.skip:self.skip + self.limit]
for export in exports:
urlname = 'export_download_forms' if isinstance(export, FormExportSchema) else 'export_download_cases'
yield self._fmt_item(
export.name,
reverse(urlname, args=(self.request.domain, export.get_id))
)
| bsd-3-clause | -4,757,006,343,195,167,000 | 33.546174 | 115 | 0.610326 | false |
faassen/morepath | morepath/tests/test_path_directive.py | 1 | 48159 | # -*- coding: utf-8 -*-
import dectate
import morepath
from morepath.converter import Converter
from morepath.error import (
DirectiveReportError, ConfigError, LinkError, TrajectError)
from morepath.compat import text_type
from webtest import TestApp as Client
import pytest
def test_simple_path_one_step():
class app(morepath.App):
pass
class Model(object):
def __init__(self):
pass
@app.path(model=Model, path='simple')
def get_model():
return Model()
@app.view(model=Model)
def default(self, request):
return "View"
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/simple')
assert response.body == b'View'
response = c.get('/simple/link')
assert response.body == b'http://localhost/simple'
def test_simple_path_two_steps():
class app(morepath.App):
pass
class Model(object):
def __init__(self):
pass
@app.path(model=Model, path='one/two')
def get_model():
return Model()
@app.view(model=Model)
def default(self, request):
return "View"
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/one/two')
assert response.body == b'View'
response = c.get('/one/two/link')
assert response.body == b'http://localhost/one/two'
def test_variable_path_one_step():
class app(morepath.App):
pass
class Model(object):
def __init__(self, name):
self.name = name
@app.path(model=Model, path='{name}')
def get_model(name):
return Model(name)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.name
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/foo')
assert response.body == b'View: foo'
response = c.get('/foo/link')
assert response.body == b'http://localhost/foo'
def test_variable_path_two_steps():
class app(morepath.App):
pass
class Model(object):
def __init__(self, name):
self.name = name
@app.path(model=Model, path='document/{name}')
def get_model(name):
return Model(name)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.name
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/document/foo')
assert response.body == b'View: foo'
response = c.get('/document/foo/link')
assert response.body == b'http://localhost/document/foo'
def test_variable_path_two_variables():
class app(morepath.App):
pass
class Model(object):
def __init__(self, name, version):
self.name = name
self.version = version
@app.path(model=Model, path='{name}-{version}')
def get_model(name, version):
return Model(name, version)
@app.view(model=Model)
def default(self, request):
return "View: %s %s" % (self.name, self.version)
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/foo-one')
assert response.body == b'View: foo one'
response = c.get('/foo-one/link')
assert response.body == b'http://localhost/foo-one'
def test_variable_path_explicit_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
@app.path(model=Model, path='{id}',
converters=dict(id=Converter(int)))
def get_model(id):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s (%s)" % (self.id, type(self.id))
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/1')
assert response.body in \
(b"View: 1 (<type 'int'>)", b"View: 1 (<class 'int'>)")
response = c.get('/1/link')
assert response.body == b'http://localhost/1'
response = c.get('/broken', status=404)
def test_variable_path_implicit_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
@app.path(model=Model, path='{id}')
def get_model(id=0):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s (%s)" % (self.id, type(self.id))
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/1')
assert response.body in \
(b"View: 1 (<type 'int'>)", b"View: 1 (<class 'int'>)")
response = c.get('/1/link')
assert response.body == b'http://localhost/1'
response = c.get('/broken', status=404)
def test_variable_path_explicit_trumps_implicit():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
@app.path(model=Model, path='{id}',
converters=dict(id=Converter(int)))
def get_model(id='foo'):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s (%s)" % (self.id, type(self.id))
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/1')
assert response.body in \
(b"View: 1 (<type 'int'>)", b"View: 1 (<class 'int'>)")
response = c.get('/1/link')
assert response.body == b'http://localhost/1'
response = c.get('/broken', status=404)
def test_url_parameter_explicit_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
@app.path(model=Model, path='/',
converters=dict(id=Converter(int)))
def get_model(id):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s (%s)" % (self.id, type(self.id))
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?id=1')
assert response.body in \
(b"View: 1 (<type 'int'>)", b"View: 1 (<class 'int'>)")
response = c.get('/link?id=1')
assert response.body == b'http://localhost/?id=1'
response = c.get('/?id=broken', status=400)
response = c.get('/')
assert response.body in \
(b"View: None (<type 'NoneType'>)", b"View: None (<class 'NoneType'>)")
def test_url_parameter_explicit_converter_get_converters():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
def get_converters():
return dict(id=Converter(int))
@app.path(model=Model, path='/', get_converters=get_converters)
def get_model(id):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s (%s)" % (self.id, type(self.id))
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?id=1')
assert response.body in \
(b"View: 1 (<type 'int'>)", b"View: 1 (<class 'int'>)")
response = c.get('/link?id=1')
assert response.body == b'http://localhost/?id=1'
response = c.get('/?id=broken', status=400)
response = c.get('/')
assert response.body in \
(b"View: None (<type 'NoneType'>)", b"View: None (<class 'NoneType'>)")
def test_url_parameter_get_converters_overrides_converters():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
def get_converters():
return dict(id=Converter(int))
@app.path(model=Model, path='/', converters={id: type(u"")},
get_converters=get_converters)
def get_model(id):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s (%s)" % (self.id, type(self.id))
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?id=1')
assert response.body in \
(b"View: 1 (<type 'int'>)", b"View: 1 (<class 'int'>)")
response = c.get('/link?id=1')
assert response.body == b'http://localhost/?id=1'
response = c.get('/?id=broken', status=400)
response = c.get('/')
assert response.body in \
(b"View: None (<type 'NoneType'>)", b"View: None (<class 'NoneType'>)")
def test_url_parameter_implicit_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
@app.path(model=Model, path='/')
def get_model(id=0):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s (%s)" % (self.id, type(self.id))
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?id=1')
assert response.body in \
(b"View: 1 (<type 'int'>)", b"View: 1 (<class 'int'>)")
response = c.get('/link?id=1')
assert response.body == b'http://localhost/?id=1'
response = c.get('/?id=broken', status=400)
response = c.get('/')
assert response.body in \
(b"View: 0 (<type 'int'>)", b"View: 0 (<class 'int'>)")
def test_url_parameter_explicit_trumps_implicit():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
@app.path(model=Model, path='/',
converters=dict(id=Converter(int)))
def get_model(id='foo'):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s (%s)" % (self.id, type(self.id))
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?id=1')
assert response.body in \
(b"View: 1 (<type 'int'>)", b"View: 1 (<class 'int'>)")
response = c.get('/link?id=1')
assert response.body == b'http://localhost/?id=1'
response = c.get('/?id=broken', status=400)
response = c.get('/')
assert response.body in \
(b"View: foo (<type 'str'>)", b"View: foo (<class 'str'>)")
def test_decode_encode():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
def my_decode(s):
return s + 'ADD'
def my_encode(s):
return s[:-len('ADD')]
@app.path(model=Model, path='/',
converters=dict(id=Converter(my_decode, my_encode)))
def get_model(id):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.id
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?id=foo')
assert response.body == b"View: fooADD"
response = c.get('/link?id=foo')
assert response.body == b'http://localhost/?id=foo'
def test_unknown_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, d):
self.d = d
class Unknown(object):
pass
@app.path(model=Model, path='/')
def get_model(d=Unknown()):
return Model(d)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.d
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
with pytest.raises(DirectiveReportError):
app.commit()
def test_not_all_path_variables_arguments_of_model_factory():
class App(morepath.App):
pass
class Model(object):
def __init__(self, foo):
self.foo = foo
class Unknown(object):
pass
@App.path(model=Model, path='/{foo}/{bar}')
def get_model(foo):
return Model(foo)
with pytest.raises(DirectiveReportError) as e:
App.commit()
assert str(e.value).startswith('Variable in path not found in function '
'signature: bar')
def test_unknown_explicit_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, d):
self.d = d
class Unknown(object):
pass
@app.path(model=Model, path='/', converters={'d': Unknown})
def get_model(d):
return Model(d)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.d
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
with pytest.raises(DirectiveReportError):
app.commit()
def test_default_date_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, d):
self.d = d
from datetime import date
@app.path(model=Model, path='/')
def get_model(d=date(2011, 1, 1)):
return Model(d)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.d
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?d=20121110')
assert response.body == b"View: 2012-11-10"
response = c.get('/')
assert response.body == b"View: 2011-01-01"
response = c.get('/link?d=20121110')
assert response.body == b'http://localhost/?d=20121110'
response = c.get('/link')
assert response.body == b'http://localhost/?d=20110101'
response = c.get('/?d=broken', status=400)
def test_default_datetime_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, d):
self.d = d
from datetime import datetime
@app.path(model=Model, path='/')
def get_model(d=datetime(2011, 1, 1, 10, 30)):
return Model(d)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.d
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?d=20121110T144530')
assert response.body == b"View: 2012-11-10 14:45:30"
response = c.get('/')
assert response.body == b"View: 2011-01-01 10:30:00"
response = c.get('/link?d=20121110T144500')
assert response.body == b'http://localhost/?d=20121110T144500'
response = c.get('/link')
assert response.body == b'http://localhost/?d=20110101T103000'
c.get('/?d=broken', status=400)
def test_custom_date_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, d):
self.d = d
from datetime import date
from time import strptime, mktime
def date_decode(s):
return date.fromtimestamp(mktime(strptime(s, '%d-%m-%Y')))
def date_encode(d):
return d.strftime('%d-%m-%Y')
@app.converter(type=date)
def date_converter():
return Converter(date_decode, date_encode)
@app.path(model=Model, path='/')
def get_model(d=date(2011, 1, 1)):
return Model(d)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.d
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?d=10-11-2012')
assert response.body == b"View: 2012-11-10"
response = c.get('/')
assert response.body == b"View: 2011-01-01"
response = c.get('/link?d=10-11-2012')
assert response.body == b'http://localhost/?d=10-11-2012'
response = c.get('/link')
assert response.body == b'http://localhost/?d=01-01-2011'
response = c.get('/?d=broken', status=400)
def test_variable_path_parameter_required_no_default():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
@app.path(model=Model, path='', required=['id'])
def get_model(id):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.id
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?id=a')
assert response.body == b"View: a"
response = c.get('/', status=400)
def test_variable_path_parameter_required_with_default():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
@app.path(model=Model, path='', required=['id'])
def get_model(id='b'):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.id
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?id=a')
assert response.body == b"View: a"
response = c.get('/', status=400)
def test_type_hints_and_converters():
class app(morepath.App):
pass
class Model(object):
def __init__(self, d):
self.d = d
from datetime import date
@app.path(model=Model, path='', converters=dict(d=date))
def get_model(d):
return Model(d)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.d
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?d=20140120')
assert response.body == b"View: 2014-01-20"
response = c.get('/link?d=20140120')
assert response.body == b'http://localhost/?d=20140120'
def test_link_for_none_means_no_parameter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.id = id
@app.path(model=Model, path='')
def get_model(id):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "View: %s" % self.id
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/')
assert response.body == b"View: None"
response = c.get('/link')
assert response.body == b'http://localhost/'
def test_path_and_url_parameter_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, id, param):
self.id = id
self.param = param
from datetime import date
@app.path(model=Model, path='/{id}', converters=dict(param=date))
def get_model(id=0, param=None):
return Model(id, param)
@app.view(model=Model)
def default(self, request):
return "View: %s %s" % (self.id, self.param)
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/1/link')
assert response.body == b'http://localhost/1'
def test_path_converter_fallback_on_view():
class app(morepath.App):
pass
class Root(object):
pass
class Model(object):
def __init__(self, id):
self.id = id
@app.path(model=Root, path='')
def get_root():
return Root()
@app.path(model=Model, path='/{id}')
def get_model(id=0):
return Model(id)
@app.view(model=Model)
def default(self, request):
return "Default view for %s" % self.id
@app.view(model=Root, name='named')
def named(self, request):
return "Named view on root"
c = Client(app())
response = c.get('/1')
assert response.body == b'Default view for 1'
response = c.get('/named')
assert response.body == b'Named view on root'
def test_root_named_link():
class app(morepath.App):
pass
@app.path(path='')
class Root(object):
pass
@app.view(model=Root)
def default(self, request):
return request.link(self, 'foo')
c = Client(app())
response = c.get('/')
assert response.body == b'http://localhost/foo'
def test_path_class_and_model_argument():
class app(morepath.App):
pass
class Foo(object):
pass
@app.path(path='', model=Foo)
class Root(object):
pass
with pytest.raises(ConfigError):
app.commit()
def test_path_no_class_and_no_model_argument():
class app(morepath.App):
pass
@app.path(path='')
def get_foo():
return None
with pytest.raises(ConfigError):
app.commit()
def test_url_parameter_list():
class app(morepath.App):
pass
class Model(object):
def __init__(self, item):
self.item = item
@app.path(model=Model, path='/', converters={'item': [int]})
def get_model(item):
return Model(item)
@app.view(model=Model)
def default(self, request):
return repr(self.item)
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?item=1&item=2')
assert response.body == b"[1, 2]"
response = c.get('/link?item=1&item=2')
assert response.body == b'http://localhost/?item=1&item=2'
response = c.get('/link')
assert response.body == b'http://localhost/'
response = c.get('/?item=broken&item=1', status=400)
response = c.get('/')
assert response.body == b"[]"
def test_url_parameter_list_empty():
class app(morepath.App):
pass
class Model(object):
def __init__(self, item):
self.item = item
@app.path(model=Model, path='/', converters={'item': []})
def get_model(item):
return Model(item)
@app.view(model=Model)
def default(self, request):
return repr(self.item)
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?item=a&item=b')
assert response.body in (b"[u'a', u'b']", b"['a', 'b']")
response = c.get('/link?item=a&item=b')
assert response.body == b'http://localhost/?item=a&item=b'
response = c.get('/link')
assert response.body == b'http://localhost/'
response = c.get('/')
assert response.body == b"[]"
def test_url_parameter_list_explicit_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, item):
self.item = item
@app.path(model=Model, path='/', converters={'item': [Converter(int)]})
def get_model(item):
return Model(item)
@app.view(model=Model)
def default(self, request):
return repr(self.item)
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?item=1&item=2')
assert response.body == b"[1, 2]"
response = c.get('/link?item=1&item=2')
assert response.body == b'http://localhost/?item=1&item=2'
response = c.get('/link')
assert response.body == b'http://localhost/'
response = c.get('/?item=broken&item=1', status=400)
response = c.get('/')
assert response.body == b"[]"
def test_url_parameter_list_unknown_explicit_converter():
class app(morepath.App):
pass
class Model(object):
def __init__(self, item):
self.item = item
class Unknown(object):
pass
@app.path(model=Model, path='/', converters={'item': [Unknown]})
def get_model(item):
return Model(item)
with pytest.raises(DirectiveReportError):
app.commit()
def test_url_parameter_list_but_only_one_allowed():
class app(morepath.App):
pass
class Model(object):
def __init__(self, item):
self.item = item
@app.path(model=Model, path='/', converters={'item': int})
def get_model(item):
return Model(item)
@app.view(model=Model)
def default(self, request):
return repr(self.item)
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
c.get('/?item=1&item=2', status=400)
c.get('/link?item=1&item=2', status=400)
def test_extra_parameters():
class app(morepath.App):
pass
class Model(object):
def __init__(self, extra_parameters):
self.extra_parameters = extra_parameters
@app.path(model=Model, path='/')
def get_model(extra_parameters):
return Model(extra_parameters)
@app.view(model=Model)
def default(self, request):
return repr(sorted(self.extra_parameters.items()))
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?a=A&b=B')
assert response.body in \
(b"[(u'a', u'A'), (u'b', u'B')]", b"[('a', 'A'), ('b', 'B')]")
response = c.get('/link?a=A&b=B')
assert sorted(response.body[len('http://localhost/?'):].split(b"&")) == [
b'a=A', b'b=B']
def test_extra_parameters_with_get_converters():
class app(morepath.App):
pass
class Model(object):
def __init__(self, extra_parameters):
self.extra_parameters = extra_parameters
def get_converters():
return {
'a': int,
'b': type(u""),
}
@app.path(model=Model, path='/', get_converters=get_converters)
def get_model(extra_parameters):
return Model(extra_parameters)
@app.view(model=Model)
def default(self, request):
return repr(sorted(self.extra_parameters.items()))
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/?a=1&b=B')
assert response.body in \
(b"[(u'a', 1), (u'b', u'B')]", b"[('a', 1), ('b', 'B')]")
response = c.get('/link?a=1&b=B')
assert sorted(response.body[len('http://localhost/?'):].split(b"&")) == [
b'a=1', b'b=B']
c.get('/?a=broken&b=B', status=400)
def test_script_name():
class app(morepath.App):
pass
class Model(object):
def __init__(self):
pass
@app.path(model=Model, path='simple')
def get_model():
return Model()
@app.view(model=Model)
def default(self, request):
return "View"
@app.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(app())
response = c.get('/prefix/simple',
extra_environ=dict(SCRIPT_NAME='/prefix'))
assert response.body == b'View'
response = c.get('/prefix/simple/link',
extra_environ=dict(SCRIPT_NAME='/prefix'))
assert response.body == b'http://localhost/prefix/simple'
def test_sub_path_different_variable():
# See discussion in https://github.com/morepath/morepath/issues/155
class App(morepath.App):
pass
class Foo(object):
def __init__(self, id):
self.id = id
class Bar(object):
def __init__(self, id, foo):
self.id = id
self.foo = foo
@App.path(model=Foo, path='{id}')
def get_foo(id):
return Foo(id)
@App.path(model=Bar, path='{foo_id}/{bar_id}')
def get_client(foo_id, bar_id):
return Bar(bar_id, Foo(foo_id))
@App.view(model=Foo)
def default_sbar(self, request):
return "M: %s" % self.id
@App.view(model=Bar)
def default_bar(self, request):
return "S: %s %s" % (self.id, self.foo.id)
c = Client(App())
with pytest.raises(TrajectError) as ex:
response = c.get('/a')
assert response.body == b'M: a'
response = c.get('/a/b')
assert response.body == b'S: b a'
assert str(ex.value) == 'step {id} and {foo_id} are in conflict'
def test_absorb_path():
class app(morepath.App):
pass
class Root(object):
pass
class Model(object):
def __init__(self, absorb):
self.absorb = absorb
@app.path(model=Root, path='')
def get_root():
return Root()
@app.path(model=Model, path='foo', absorb=True)
def get_model(absorb):
return Model(absorb)
@app.view(model=Model)
def default(self, request):
return "%s" % self.absorb
@app.view(model=Root)
def default_root(self, request):
return request.link(Model('a/b'))
c = Client(app())
response = c.get('/foo/a')
assert response.body == b'a'
response = c.get('/foo')
assert response.body == b''
response = c.get('/foo/a/b')
assert response.body == b'a/b'
# link to a/b absorb
response = c.get('/')
assert response.body == b'http://localhost/foo/a/b'
def test_absorb_path_with_variables():
class app(morepath.App):
pass
class Root(object):
pass
class Model(object):
def __init__(self, id, absorb):
self.id = id
self.absorb = absorb
@app.path(model=Root, path='')
def get_root():
return Root()
@app.path(model=Model, path='{id}', absorb=True)
def get_model(id, absorb):
return Model(id, absorb)
@app.view(model=Model)
def default(self, request):
return "I:%s A:%s" % (self.id, self.absorb)
@app.view(model=Root)
def default_root(self, request):
return request.link(Model('foo', 'a/b'))
c = Client(app())
response = c.get('/foo/a')
assert response.body == b'I:foo A:a'
response = c.get('/foo')
assert response.body == b'I:foo A:'
response = c.get('/foo/a/b')
assert response.body == b'I:foo A:a/b'
# link to a/b absorb
response = c.get('/')
assert response.body == b'http://localhost/foo/a/b'
def test_absorb_path_explicit_subpath_ignored():
class app(morepath.App):
pass
class Root(object):
pass
class Model(object):
def __init__(self, absorb):
self.absorb = absorb
class Another(object):
pass
@app.path(model=Root, path='')
def get_root():
return Root()
@app.path(model=Model, path='foo', absorb=True)
def get_model(absorb):
return Model(absorb)
@app.path(model=Another, path='foo/another')
def get_another():
return Another()
@app.view(model=Model)
def default(self, request):
return "%s" % self.absorb
@app.view(model=Another)
def default_another(self, request):
return "Another"
@app.view(model=Root)
def default_root(self, request):
return request.link(Another())
c = Client(app())
response = c.get('/foo/a')
assert response.body == b'a'
response = c.get('/foo/another')
assert response.body == b'another'
# link to another still works XXX is this wrong?
response = c.get('/')
assert response.body == b'http://localhost/foo/another'
def test_absorb_path_root():
class app(morepath.App):
pass
class Model(object):
def __init__(self, absorb):
self.absorb = absorb
@app.path(model=Model, path='', absorb=True)
def get_model(absorb):
return Model(absorb)
@app.view(model=Model)
def default(self, request):
return "A:%s L:%s" % (self.absorb, request.link(self))
c = Client(app())
response = c.get('/a')
assert response.body == b'A:a L:http://localhost/a'
response = c.get('/')
assert response.body == b'A: L:http://localhost/'
response = c.get('/a/b')
assert response.body == b'A:a/b L:http://localhost/a/b'
def test_path_explicit_variables():
class App(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.store_id = id
@App.path(model=Model, path='models/{id}',
variables=lambda m: {'id': m.store_id})
def get_model(id):
return Model(id)
@App.view(model=Model)
def default(self, request):
return request.link(self)
c = Client(App())
response = c.get('/models/1')
assert response.body == b'http://localhost/models/1'
def test_path_explicit_variables_app_arg():
class App(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.store_id = id
def my_variables(app, m):
assert isinstance(app, App)
return {'id': m.store_id}
@App.path(model=Model, path='models/{id}', variables=my_variables)
def get_model(id):
return Model(id)
@App.view(model=Model)
def default(self, request):
return request.link(self)
c = Client(App())
response = c.get('/models/1')
assert response.body == b'http://localhost/models/1'
def test_error_when_path_variable_is_none():
class App(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.store_id = id
@App.path(model=Model, path='models/{id}',
variables=lambda m: {'id': None})
def get_model(id):
return Model(id)
@App.view(model=Model)
def default(self, request):
return request.link(self)
c = Client(App())
with pytest.raises(LinkError):
c.get('/models/1')
def test_error_when_path_variable_is_missing():
class App(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.store_id = id
@App.path(model=Model, path='models/{id}',
variables=lambda m: {})
def get_model(id):
return Model(id)
@App.view(model=Model)
def default(self, request):
return request.link(self)
c = Client(App())
with pytest.raises(KeyError):
c.get('/models/1')
def test_error_when_path_variables_isnt_dict():
class App(morepath.App):
pass
class Model(object):
def __init__(self, id):
self.store_id = id
@App.path(model=Model, path='models/{id}',
variables=lambda m: 'nondict')
def get_model(id):
return Model(id)
@App.view(model=Model)
def default(self, request):
return request.link(self)
c = Client(App())
with pytest.raises(LinkError):
c.get('/models/1')
def test_resolve_path_method_on_request_same_app():
class App(morepath.App):
pass
class Model(object):
def __init__(self):
pass
@App.path(model=Model, path='simple')
def get_model():
return Model()
@App.view(model=Model)
def default(self, request):
return text_type(isinstance(request.resolve_path('simple'), Model))
@App.view(model=Model, name='extra')
def extra(self, request):
return text_type(request.resolve_path('nonexistent') is None)
@App.view(model=Model, name='appnone')
def appnone(self, request):
return request.resolve_path('simple', app=None)
c = Client(App())
response = c.get('/simple')
assert response.body == b'True'
response = c.get('/simple/extra')
assert response.body == b'True'
with pytest.raises(LinkError):
c.get('/simple/appnone')
def test_resolve_path_method_on_request_different_app():
class App(morepath.App):
pass
class Model(object):
def __init__(self):
pass
@App.path(model=Model, path='simple')
def get_model():
return Model()
@App.view(model=Model)
def default(self, request):
obj = request.resolve_path('p', app=request.app.child('sub'))
return text_type(isinstance(obj, SubModel))
class Sub(morepath.App):
pass
class SubModel(object):
pass
@Sub.path(model=SubModel, path='p')
def get_sub_model():
return SubModel()
@App.mount(path='sub', app=Sub)
def mount_sub():
return Sub()
c = Client(App())
response = c.get('/simple')
assert response.body == b'True'
def test_resolve_path_with_dots_in_url():
class app(morepath.App):
pass
class Root(object):
def __init__(self, absorb):
self.absorb = absorb
@app.path(model=Root, path='root', absorb=True)
def get_root(absorb):
return Root(absorb)
@app.view(model=Root)
def default(self, request):
return "%s" % self.absorb
c = Client(app())
response = c.get('/root/x/../child')
assert response.body == b'child'
response = c.get('/root/x/%2E%2E/child')
assert response.body == b'child'
response = c.get('/root/%2E%2E/%2E%2E/root')
assert response.body == b''
response = c.get('/root/%2E%2E/%2E%2E/root')
assert response.body == b''
response = c.get('/root/%2E%2E/%2E%2E/test', expect_errors=True)
assert response.status_code == 404
def test_quoting_link_generation():
class App(morepath.App):
pass
class Model(object):
def __init__(self):
pass
@App.path(model=Model, path='sim?ple')
def get_model():
return Model()
@App.view(model=Model)
def default(self, request):
return "View"
@App.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(App())
response = c.get('/sim%3Fple')
assert response.body == b'View'
response = c.get('/sim%3Fple/link')
assert response.body == b'http://localhost/sim%3Fple'
def test_quoting_link_generation_umlaut():
class App(morepath.App):
pass
class Model(object):
def __init__(self):
pass
@App.path(model=Model, path=u'simëple')
def get_model():
return Model()
@App.view(model=Model)
def default(self, request):
return "View"
@App.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(App())
response = c.get('/sim%C3%ABple')
assert response.body == b'View'
response = c.get('/sim%C3%ABple/link')
assert response.body == b'http://localhost/sim%C3%ABple'
def test_quoting_link_generation_tilde():
# tilde is an unreserved character according to
# https://www.ietf.org/rfc/rfc3986.txt but urllib.quote
# quotes it anyway. We test whether our workaround using
# the safe parameter works
class App(morepath.App):
pass
class Model(object):
def __init__(self):
pass
@App.path(model=Model, path='sim~ple')
def get_model():
return Model()
@App.view(model=Model)
def default(self, request):
return "View"
@App.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(App())
response = c.get('/sim~ple')
assert response.body == b'View'
response = c.get('/sim~ple/link')
assert response.body == b'http://localhost/sim~ple'
def test_parameter_quoting():
class App(morepath.App):
pass
class Model(object):
def __init__(self, s):
self.s = s
@App.path(model=Model, path='')
def get_model(s):
return Model(s)
@App.view(model=Model)
def default(self, request):
return u"View: %s" % self.s
@App.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(App())
response = c.get('/?s=sim%C3%ABple')
assert response.body == u"View: simëple".encode('utf-8')
response = c.get('/link?s=sim%C3%ABple')
assert response.body == b'http://localhost/?s=sim%C3%ABple'
def test_parameter_quoting_tilde():
class App(morepath.App):
pass
class Model(object):
def __init__(self, s):
self.s = s
@App.path(model=Model, path='')
def get_model(s):
return Model(s)
@App.view(model=Model)
def default(self, request):
return u"View: %s" % self.s
@App.view(model=Model, name='link')
def link(self, request):
return request.link(self)
c = Client(App())
response = c.get('/?s=sim~ple')
assert response.body == u"View: sim~ple".encode('utf-8')
response = c.get('/link?s=sim~ple')
assert response.body == b'http://localhost/?s=sim~ple'
def test_class_link_without_variables():
class App(morepath.App):
pass
class Model(object):
pass
@App.path(model=Model, path='/foo')
def get_model():
return Model()
@App.view(model=Model)
def link(self, request):
return request.class_link(Model)
c = Client(App())
response = c.get('/foo')
assert response.body == b"http://localhost/foo"
def test_class_link_no_app():
class App(morepath.App):
pass
class Model(object):
pass
@App.path(model=Model, path='/foo')
def get_model():
return Model()
@App.view(model=Model)
def link(self, request):
return request.class_link(Model, app=None)
c = Client(App())
with pytest.raises(LinkError):
c.get('/foo')
def test_class_link_with_variables():
class App(morepath.App):
pass
class Model(object):
pass
@App.path(model=Model, path='/foo/{x}')
def get_model(x):
return Model()
@App.view(model=Model)
def link(self, request):
return request.class_link(Model, variables={'x': 'X'})
c = Client(App())
response = c.get('/foo/3')
assert response.body == b"http://localhost/foo/X"
def test_class_link_with_missing_variables():
class App(morepath.App):
pass
class Model(object):
pass
@App.path(model=Model, path='/foo/{x}')
def get_model(x):
return Model()
@App.view(model=Model)
def link(self, request):
return request.class_link(Model, variables={})
c = Client(App())
with pytest.raises(KeyError):
c.get('/foo/3')
def test_class_link_with_extra_variable():
class App(morepath.App):
pass
class Model(object):
pass
@App.path(model=Model, path='/foo/{x}')
def get_model(x):
return Model()
@App.view(model=Model)
def link(self, request):
return request.class_link(Model, variables={'x': 'X', 'y': 'Y'})
c = Client(App())
response = c.get('/foo/3')
assert response.body == b"http://localhost/foo/X"
def test_class_link_with_url_parameter_variable():
class App(morepath.App):
pass
class Model(object):
pass
@App.path(model=Model, path='/foo/{x}')
def get_model(x, y):
return Model()
@App.view(model=Model)
def link(self, request):
return request.class_link(Model, variables={'x': 'X', 'y': 'Y'})
c = Client(App())
response = c.get('/foo/3')
assert response.body == b"http://localhost/foo/X?y=Y"
def test_class_link_with_subclass():
class App(morepath.App):
pass
class Model(object):
pass
class Sub(Model):
pass
@App.path(model=Model, path='/foo/{x}')
def get_model(x):
return Model()
@App.view(model=Model)
def link(self, request):
return request.class_link(Sub, variables={'x': 'X'})
c = Client(App())
response = c.get('/foo/3')
assert response.body == b"http://localhost/foo/X"
def test_absorb_class_path():
class App(morepath.App):
pass
class Root(object):
pass
class Model(object):
def __init__(self, absorb):
self.absorb = absorb
@App.path(model=Root, path='')
def get_root():
return Root()
@App.path(model=Model, path='foo', absorb=True)
def get_model(absorb):
return Model(absorb)
@App.view(model=Model)
def default(self, request):
return "%s" % self.absorb
@App.view(model=Root)
def default_root(self, request):
return request.class_link(Model, variables={'absorb': 'a/b'})
c = Client(App())
# link to a/b absorb
response = c.get('/')
assert response.body == b'http://localhost/foo/a/b'
def test_absorb_class_path_with_variables():
class App(morepath.App):
pass
class Root(object):
pass
class Model(object):
def __init__(self, id, absorb):
self.id = id
self.absorb = absorb
@App.path(model=Root, path='')
def get_root():
return Root()
@App.path(model=Model, path='{id}', absorb=True)
def get_model(id, absorb):
return Model(id, absorb)
@App.view(model=Model)
def default(self, request):
return "I:%s A:%s" % (self.id, self.absorb)
@App.view(model=Root)
def default_root(self, request):
return request.class_link(Model,
variables=dict(id='foo', absorb='a/b'))
c = Client(App())
# link to a/b absorb
response = c.get('/')
assert response.body == b'http://localhost/foo/a/b'
def test_class_link_extra_parameters():
class App(morepath.App):
pass
class Model(object):
def __init__(self, extra_parameters):
self.extra_parameters = extra_parameters
@App.path(model=Model, path='/')
def get_model(extra_parameters):
return Model(extra_parameters)
@App.view(model=Model)
def default(self, request):
return repr(sorted(self.extra_parameters.items()))
@App.view(model=Model, name='link')
def link(self, request):
return request.class_link(
Model,
variables={'extra_parameters': {'a': 'A', 'b': 'B'}})
c = Client(App())
response = c.get('/link?a=A&b=B')
assert sorted(response.body[len('http://localhost/?'):].split(b"&")) == [
b'a=A', b'b=B']
def test_path_on_model_class():
class App(morepath.App):
pass
@App.path('/')
class Model(object):
def __init__(self):
pass
@App.path('/login')
class Login(object):
pass
@App.view(model=Model)
def model_view(self, request):
return "Model"
@App.view(model=Login)
def login_view(self, request):
return "Login"
c = Client(App())
response = c.get('/')
assert response.body == b'Model'
response = c.get('/login')
assert response.body == b'Login'
def test_path_without_model():
class App(morepath.App):
pass
@App.path('/')
def get_path():
pass
with pytest.raises(dectate.DirectiveReportError):
App.commit()
def test_two_path_on_same_model_should_conflict():
class App(morepath.App):
pass
@App.path('/login')
@App.path('/')
class Login(object):
pass
with pytest.raises(dectate.ConflictError):
App.commit()
def test_path_on_same_model_explicit_and_class_should_conflict():
class App(morepath.App):
pass
@App.path('/')
class Login(object):
pass
@App.path('/login', model=Login)
def get_path():
return Login()
with pytest.raises(dectate.ConflictError):
App.commit()
def test_nonexisting_path_too_long_unconsumed():
class App(morepath.App):
pass
class Model(object):
def __init__(self):
pass
@App.path(model=Model, path='simple')
def get_model():
return Model()
@App.view(model=Model)
def default(self, request):
return "View"
c = Client(App())
c.get('/foo/bar/baz', status=404)
| bsd-3-clause | 8,192,586,685,520,216,000 | 22.107965 | 79 | 0.575493 | false |
diofant/diofant | diofant/domains/realfield.py | 1 | 3327 | """Implementation of :class:`RealField` class."""
from __future__ import annotations
import mpmath
from ..core import Float
from ..polys.polyerrors import CoercionFailed
from .characteristiczero import CharacteristicZero
from .field import Field
from .mpelements import MPContext
from .simpledomain import SimpleDomain
class RealField(CharacteristicZero, SimpleDomain, Field):
"""Real numbers up to the given precision."""
rep = 'RR'
is_RealField = True
is_Exact = False
is_Numerical = True
_default_precision = 53
@property
def has_default_precision(self):
return self.precision == self._default_precision
@property
def precision(self):
return self._context.prec
@property
def dps(self):
return self._context.dps
@property
def tolerance(self):
return self._context.tolerance
def __new__(cls, prec=_default_precision, dps=None, tol=None):
context = MPContext(prec, dps, tol)
obj = super().__new__(cls)
try:
obj.dtype = _reals_cache[(context.prec, context.tolerance)]
except KeyError:
_reals_cache[(context.prec, context.tolerance)] = obj.dtype = context.mpf
context._parent = obj
obj._context = context
obj._hash = hash((cls.__name__, obj.dtype, context.prec, context.tolerance))
obj.zero = obj.dtype(0)
obj.one = obj.dtype(1)
return obj
def __getnewargs_ex__(self):
return (), {'prec': self.precision,
'tol': mpmath.mpf(self.tolerance._mpf_)}
def __eq__(self, other):
return (isinstance(other, RealField)
and self.precision == other.precision
and self.tolerance == other.tolerance)
def __hash__(self):
return self._hash
def to_expr(self, element):
return Float(element, self.dps)
def from_expr(self, expr):
number = expr.evalf(self.dps)
if number.is_Number:
return self.dtype(number)
else:
raise CoercionFailed(f'expected real number, got {expr}')
def _from_PythonIntegerRing(self, element, base):
return self.dtype(element)
_from_GMPYIntegerRing = _from_PythonIntegerRing
def _from_PythonRationalField(self, element, base):
return self.dtype(element.numerator) / element.denominator
_from_GMPYRationalField = _from_PythonRationalField
def _from_AlgebraicField(self, element, base):
return self.from_expr(base.to_expr(element))
def _from_RealField(self, element, base):
if self == base:
return element
else:
return self.dtype(element)
def _from_ComplexField(self, element, base):
if not element.imag:
return self.dtype(element.real)
def to_rational(self, element, limit=True):
"""Convert a real number to rational number."""
return self._context.to_rational(element, limit)
def get_exact(self):
from . import QQ
return QQ
def gcd(self, a, b):
return self.one
def almosteq(self, a, b, tolerance=None):
"""Check if ``a`` and ``b`` are almost equal."""
return self._context.almosteq(a, b, tolerance)
_reals_cache: dict[tuple, RealField] = {}
RR = RealField()
| bsd-3-clause | -6,420,955,137,763,290,000 | 25.616 | 85 | 0.621882 | false |
102/rsa | cli.py | 1 | 1981 | import argparse
import rsa
"""
python3 cli.py -f key generate -l 8
python3 cli.py -f message encode -k key_public -d encoded
python3 cli.py -f encoded decode -k key_private -d decoded
"""
def generate(args):
public, private = rsa.get_key_pair(int(args.length))
with open(args.file + '_public', 'w+') as f:
f.write(str(public))
with open(args.file + '_private', 'w+') as f:
f.write(str(private))
def encode(args):
with open(args.public_key, 'r') as f:
public = rsa.PublicKey.fromstring(f.readline().replace('\n', ''))
with open(args.file, 'rb') as f:
message = bytearray(f.read())
with open(args.destination_file, 'wb') as f:
result = public.encrypt(message)
f.write(result)
def decode(args):
with open(args.private_key, 'r') as f:
private = rsa.PrivateKey.fromstring(f.readline().replace('\n', ''))
with open(args.file, 'rb') as f:
message = bytearray(f.read())
with open(args.destination_file, 'wb') as f:
result = private.decrypt(message)
f.write(result)
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--file', default='key')
subparsers = parser.add_subparsers()
generate_keys = subparsers.add_parser('generate')
generate_keys.add_argument('-l', '--length', required=True, type=int)
generate_keys.set_defaults(func=generate)
encode_parser = subparsers.add_parser('encode')
encode_parser.add_argument('-k', '--public-key', help='File with public key', required=True)
encode_parser.add_argument('-d', '--destination-file', help='Destination file', required=True)
encode_parser.set_defaults(func=encode)
decode_parser = subparsers.add_parser('decode')
decode_parser.add_argument('-k', '--private-key', help='File with private key', required=True)
decode_parser.add_argument('-d', '--destination-file', help='Destination file', required=True)
decode_parser.set_defaults(func=decode)
args = parser.parse_args()
args.func(args)
| unlicense | -665,462,845,845,840,400 | 32.576271 | 94 | 0.669864 | false |
tysonholub/twilio-python | twilio/rest/notify/v1/service/binding.py | 1 | 18370 | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import serialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class BindingList(ListResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, service_sid):
"""
Initialize the BindingList
:param Version version: Version that contains the resource
:param service_sid: The SID of the Service that the resource is associated with
:returns: twilio.rest.notify.v1.service.binding.BindingList
:rtype: twilio.rest.notify.v1.service.binding.BindingList
"""
super(BindingList, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, }
self._uri = '/Services/{service_sid}/Bindings'.format(**self._solution)
def create(self, identity, binding_type, address, tag=values.unset,
notification_protocol_version=values.unset,
credential_sid=values.unset, endpoint=values.unset):
"""
Create a new BindingInstance
:param unicode identity: The `identity` value that identifies the new resource's User
:param BindingInstance.BindingType binding_type: The type of the Binding
:param unicode address: The channel-specific address
:param unicode tag: A tag that can be used to select the Bindings to notify
:param unicode notification_protocol_version: The protocol version to use to send the notification
:param unicode credential_sid: The SID of the Credential resource to be used to send notifications to this Binding
:param unicode endpoint: Deprecated
:returns: Newly created BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingInstance
"""
data = values.of({
'Identity': identity,
'BindingType': binding_type,
'Address': address,
'Tag': serialize.map(tag, lambda e: e),
'NotificationProtocolVersion': notification_protocol_version,
'CredentialSid': credential_sid,
'Endpoint': endpoint,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return BindingInstance(self._version, payload, service_sid=self._solution['service_sid'], )
def stream(self, start_date=values.unset, end_date=values.unset,
identity=values.unset, tag=values.unset, limit=None, page_size=None):
"""
Streams BindingInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param date start_date: Only include usage that has occurred on or after this date
:param date end_date: Only include usage that occurred on or before this date
:param unicode identity: The `identity` value of the resources to read
:param unicode tag: Only list Bindings that have all of the specified Tags
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.notify.v1.service.binding.BindingInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(
start_date=start_date,
end_date=end_date,
identity=identity,
tag=tag,
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, start_date=values.unset, end_date=values.unset,
identity=values.unset, tag=values.unset, limit=None, page_size=None):
"""
Lists BindingInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param date start_date: Only include usage that has occurred on or after this date
:param date end_date: Only include usage that occurred on or before this date
:param unicode identity: The `identity` value of the resources to read
:param unicode tag: Only list Bindings that have all of the specified Tags
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.notify.v1.service.binding.BindingInstance]
"""
return list(self.stream(
start_date=start_date,
end_date=end_date,
identity=identity,
tag=tag,
limit=limit,
page_size=page_size,
))
def page(self, start_date=values.unset, end_date=values.unset,
identity=values.unset, tag=values.unset, page_token=values.unset,
page_number=values.unset, page_size=values.unset):
"""
Retrieve a single page of BindingInstance records from the API.
Request is executed immediately
:param date start_date: Only include usage that has occurred on or after this date
:param date end_date: Only include usage that occurred on or before this date
:param unicode identity: The `identity` value of the resources to read
:param unicode tag: Only list Bindings that have all of the specified Tags
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingPage
"""
params = values.of({
'StartDate': serialize.iso8601_date(start_date),
'EndDate': serialize.iso8601_date(end_date),
'Identity': serialize.map(identity, lambda e: e),
'Tag': serialize.map(tag, lambda e: e),
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(
'GET',
self._uri,
params=params,
)
return BindingPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of BindingInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return BindingPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a BindingContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.notify.v1.service.binding.BindingContext
:rtype: twilio.rest.notify.v1.service.binding.BindingContext
"""
return BindingContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def __call__(self, sid):
"""
Constructs a BindingContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.notify.v1.service.binding.BindingContext
:rtype: twilio.rest.notify.v1.service.binding.BindingContext
"""
return BindingContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Notify.V1.BindingList>'
class BindingPage(Page):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, response, solution):
"""
Initialize the BindingPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param service_sid: The SID of the Service that the resource is associated with
:returns: twilio.rest.notify.v1.service.binding.BindingPage
:rtype: twilio.rest.notify.v1.service.binding.BindingPage
"""
super(BindingPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of BindingInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.notify.v1.service.binding.BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingInstance
"""
return BindingInstance(self._version, payload, service_sid=self._solution['service_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Notify.V1.BindingPage>'
class BindingContext(InstanceContext):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, service_sid, sid):
"""
Initialize the BindingContext
:param Version version: Version that contains the resource
:param service_sid: The SID of the Service to fetch the resource from
:param sid: The unique string that identifies the resource
:returns: twilio.rest.notify.v1.service.binding.BindingContext
:rtype: twilio.rest.notify.v1.service.binding.BindingContext
"""
super(BindingContext, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'sid': sid, }
self._uri = '/Services/{service_sid}/Bindings/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch a BindingInstance
:returns: Fetched BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return BindingInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the BindingInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Notify.V1.BindingContext {}>'.format(context)
class BindingInstance(InstanceResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
class BindingType(object):
APN = "apn"
GCM = "gcm"
SMS = "sms"
FCM = "fcm"
FACEBOOK_MESSENGER = "facebook-messenger"
ALEXA = "alexa"
def __init__(self, version, payload, service_sid, sid=None):
"""
Initialize the BindingInstance
:returns: twilio.rest.notify.v1.service.binding.BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingInstance
"""
super(BindingInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload.get('sid'),
'account_sid': payload.get('account_sid'),
'service_sid': payload.get('service_sid'),
'credential_sid': payload.get('credential_sid'),
'date_created': deserialize.iso8601_datetime(payload.get('date_created')),
'date_updated': deserialize.iso8601_datetime(payload.get('date_updated')),
'notification_protocol_version': payload.get('notification_protocol_version'),
'endpoint': payload.get('endpoint'),
'identity': payload.get('identity'),
'binding_type': payload.get('binding_type'),
'address': payload.get('address'),
'tags': payload.get('tags'),
'url': payload.get('url'),
'links': payload.get('links'),
}
# Context
self._context = None
self._solution = {'service_sid': service_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: BindingContext for this BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingContext
"""
if self._context is None:
self._context = BindingContext(
self._version,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def service_sid(self):
"""
:returns: The SID of the Service that the resource is associated with
:rtype: unicode
"""
return self._properties['service_sid']
@property
def credential_sid(self):
"""
:returns: The SID of the Credential resource to be used to send notifications to this Binding
:rtype: unicode
"""
return self._properties['credential_sid']
@property
def date_created(self):
"""
:returns: The RFC 2822 date and time in GMT when the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The RFC 2822 date and time in GMT when the resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def notification_protocol_version(self):
"""
:returns: The protocol version to use to send the notification
:rtype: unicode
"""
return self._properties['notification_protocol_version']
@property
def endpoint(self):
"""
:returns: Deprecated
:rtype: unicode
"""
return self._properties['endpoint']
@property
def identity(self):
"""
:returns: The `identity` value that identifies the new resource's User
:rtype: unicode
"""
return self._properties['identity']
@property
def binding_type(self):
"""
:returns: The type of the Binding
:rtype: unicode
"""
return self._properties['binding_type']
@property
def address(self):
"""
:returns: The channel-specific address
:rtype: unicode
"""
return self._properties['address']
@property
def tags(self):
"""
:returns: The list of tags associated with this Binding
:rtype: unicode
"""
return self._properties['tags']
@property
def url(self):
"""
:returns: The absolute URL of the Binding resource
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: The URLs of related resources
:rtype: unicode
"""
return self._properties['links']
def fetch(self):
"""
Fetch a BindingInstance
:returns: Fetched BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the BindingInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Notify.V1.BindingInstance {}>'.format(context)
| mit | 4,937,572,604,946,714,000 | 34.057252 | 122 | 0.609581 | false |
rogerthat-platform/rogerthat-backend | src/rogerthat/pages/payment.py | 1 | 5119 | # -*- coding: utf-8 -*-
# Copyright 2017 GIG Technology NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.3@@
import base64
import json
import logging
import urllib
import webapp2
from mcfw.rpc import serialize_complex_value
from rogerthat.bizz.payment import get_payment_provider_for_user, get_api_module
from rogerthat.bizz.payment.state import finish_login_state, create_login_state, get_login_state
from rogerthat.dal.payment import get_payment_provider
from rogerthat.rpc import users
from rogerthat.settings import get_server_settings
from rogerthat.to.payment import AppPaymentProviderTO
from rogerthat.utils.app import create_app_user_by_email
class PaymentCallbackHandler(webapp2.RequestHandler):
def get(self, provider_id, path):
params = dict(self.request.GET)
logging.debug("PaymentCallbackHandler.GET '%s', at path '%s' with params %s", provider_id, path, params)
get_api_module(provider_id).web_callback(self, path, params)
def post(self, provider_id, path):
params = dict(self.request.POST)
logging.debug("PaymentCallbackHandler.POST '%s', at path '%s' with params %s", provider_id, path, params)
get_api_module(provider_id).web_callback(self, path, params)
class PaymentLoginRedirectHandler(webapp2.RequestHandler):
def get(self, provider_id):
pp = get_payment_provider(provider_id)
if not pp:
logging.debug('PaymentLoginRedirectHandler: payment provider not found')
self.abort(400)
return
email = self.request.get("email", None)
app_id = self.request.get("app_id", None)
app_user = create_app_user_by_email(email, app_id)
state = create_login_state(app_user, provider_id)
args = {
'state': state,
'response_type': 'code',
'client_id': pp.oauth_settings.client_id,
'scope': pp.oauth_settings.scope,
'redirect_uri': pp.redirect_url(get_server_settings().baseUrl)
}
url = '%s?%s' % (pp.oauth_settings.authorize_url, urllib.urlencode(args))
logging.debug('Redirecting to %s', url)
self.redirect(url.encode('utf-8'))
class PaymentLoginAppHandler(webapp2.RequestHandler):
def post(self):
params = dict(self.request.POST)
logging.debug("PaymentLoginAppHandler with params %s", params)
user = self.request.headers.get("X-MCTracker-User", None)
password = self.request.headers.get("X-MCTracker-Pass", None)
if not (user and password):
logging.debug("user not provided")
self.response.set_status(500)
return
if not users.set_json_rpc_user(base64.decodestring(user), base64.decodestring(password)):
logging.debug("user not set")
self.response.set_status(500)
return
app_user = users.get_current_user()
state = params["state"]
login_state = get_login_state(state)
if app_user != login_state.app_user:
self.response.set_status(500)
logging.error("%s tried to finish anothers user login %s", app_user, state)
return
token = get_api_module(login_state.provider_id).handle_code(login_state)
logging.debug('Received token: %s', token)
if not finish_login_state(state, token):
logging.debug("user already finished this login")
self.response.set_status(500)
return
args = {"result": "success",
"payment_provider": serialize_complex_value(
get_payment_provider_for_user(app_user, login_state.provider_id), AppPaymentProviderTO, False)}
r = json.dumps(args)
self.response.out.write(r)
class PaymentTransactionHandler(webapp2.RequestHandler):
def get(self, provider_id, transaction_id):
logging.debug("PaymentTransactionHandler '%s' for transaction '%s'", provider_id, transaction_id)
pp = get_payment_provider(provider_id)
if not pp:
logging.debug('PaymentTransactionHandler: payment provider not found')
self.abort(400)
return
trans_details = get_api_module(provider_id).get_public_transaction(transaction_id)
if not trans_details:
logging.debug('PaymentTransactionHandler: transaction not found')
self.abort(404)
return
logging.info('Returning result: %s', trans_details)
self.response.headers['Content-Type'] = "application/json"
self.response.out.write(json.dumps(trans_details))
| apache-2.0 | 2,898,154,106,186,170,400 | 39.307087 | 115 | 0.664388 | false |
dmisem/dsmblog | pelicanconf.py | 1 | 1851 | # -*- coding: utf-8 -*- #
from __future__ import unicode_literals
import os
import sys
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
sys.path.append(SITE_ROOT)
import local_settings as ls
AUTHOR = ls.AUTHOR
SITENAME = ls.SITENAME
SITEURL = ls.SITEURL
PATH = ls.PATH
TIMEZONE = ls.TIMEZONE
LOCALE = ls.LOCALE
DEFAULT_LANG = ls.DEFAULT_LANG
ARTICLE_URL = 'articles/{lang}/{slug}.html'
ARTICLE_SAVE_AS = ARTICLE_URL
ARTICLE_LANG_URL = ARTICLE_URL
ARTICLE_LANG_SAVE_AS = ARTICLE_URL
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
# Blogroll
LINKS = (
('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('ReStructuredText', 'http://docutils.sourceforge.net/rst.html'),
)
# Social widget
SOCIAL = (
('linkedin', 'http://ua.linkedin.com/pub/dmitry-semenov/5/994/a6a', ''),
('github', 'https://github.com/dmisem', ''),
('bitbucket', 'https://bitbucket.org/dmisem', ''),
('e-mail', 'mailto:[email protected]', 'envelope'),
)
STATIC_PATHS = ['images', 'img']
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
RELATIVE_URLS = True
THEME = "themes/pelican-bootstrap3"
PYGMENTS_STYLE = "default"
FAVICON = 'img/favicon.ico'
SITELOGO = 'img/dsm.png'
HIDE_SITENAME = True
DISPLAY_TAGS_ON_SIDEBAR = True
DISPLAY_TAGS_INLINE = False
TAG_LEVELS_COUNT = 3 # My settings
TAGS_URL = 'tags.html'
DISPLAY_CATEGORIES_ON_SIDEBAR = False
DISPLAY_RECENT_POSTS_ON_SIDEBAR = False
# PLUGIN_PATHS = [SITE_ROOT + '/plugins']
PLUGIN_PATHS = ['plugins']
PLUGINS = ['tag_cloud']
USE_FOLDER_AS_CATEGORY = True
if __name__ == "__main__":
d = globals()
for k in dir():
print('{0} => {1}'.format(k, d[k]))
| mit | -77,352,275,435,833,500 | 23.355263 | 77 | 0.675851 | false |
vadimtk/chrome4sdp | build/android/pylib/perf/perf_control_unittest.py | 1 | 1117 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0212
import os
import sys
import unittest
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
from pylib.device import device_utils
from pylib.perf import perf_control
class TestPerfControl(unittest.TestCase):
def setUp(self):
if not os.getenv('BUILDTYPE'):
os.environ['BUILDTYPE'] = 'Debug'
devices = device_utils.DeviceUtils.HealthyDevices(blacklist=None)
self.assertGreater(len(devices), 0, 'No device attached!')
self._device = devices[0]
def testHighPerfMode(self):
perf = perf_control.PerfControl(self._device)
try:
perf.SetPerfProfilingMode()
cpu_info = perf.GetCpuInfo()
self.assertEquals(len(perf._cpu_files), len(cpu_info))
for _, online, governor in cpu_info:
self.assertTrue(online)
self.assertEquals('performance', governor)
finally:
perf.SetDefaultPerfMode()
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 6,337,714,404,400,419,000 | 29.189189 | 78 | 0.700985 | false |
tum-ens/urbs | urbs/saveload.py | 1 | 2106 | import pandas as pd
from .pyomoio import get_entity, list_entities
def create_result_cache(prob):
entity_types = ['set', 'par', 'var', 'exp']
if hasattr(prob, 'dual'):
entity_types.append('con')
entities = []
for entity_type in entity_types:
entities.extend(list_entities(prob, entity_type).index.tolist())
result_cache = {}
for entity in entities:
result_cache[entity] = get_entity(prob, entity)
return result_cache
def save(prob, filename):
"""Save urbs model input and result cache to a HDF5 store file.
Args:
- prob: a urbs model instance containing a solution
- filename: HDF5 store file to be written
Returns:
Nothing
"""
import warnings
import tables
warnings.filterwarnings('ignore',
category=pd.io.pytables.PerformanceWarning)
warnings.filterwarnings('ignore',
category=tables.NaturalNameWarning)
if not hasattr(prob, '_result'):
prob._result = create_result_cache(prob)
with pd.HDFStore(filename, mode='w') as store:
for name in prob._data.keys():
store['data/'+name] = prob._data[name]
for name in prob._result.keys():
store['result/'+name] = prob._result[name]
class ResultContainer(object):
""" Result/input data container for reporting functions. """
def __init__(self, data, result):
self._data = data
self._result = result
def load(filename):
"""Load a urbs model result container from a HDF5 store file.
Args:
filename: an existing HDF5 store file
Returns:
prob: the modified instance containing the result cache
"""
with pd.HDFStore(filename, mode='r') as store:
data_cache = {}
for group in store.get_node('data'):
data_cache[group._v_name] = store[group._v_pathname]
result_cache = {}
for group in store.get_node('result'):
result_cache[group._v_name] = store[group._v_pathname]
return ResultContainer(data_cache, result_cache)
| gpl-3.0 | -8,746,768,734,882,240,000 | 28.25 | 72 | 0.616809 | false |
eli-schwartz/pacman | test/pacman/util.py | 1 | 4814 | # Copyright (c) 2006 by Aurelien Foret <[email protected]>
# Copyright (c) 2006-2018 Pacman Development Team <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import hashlib
import tap
SELFPATH = os.path.abspath(os.path.dirname(__file__))
# ALPM
PM_ROOT = "/"
PM_DBPATH = "var/lib/pacman"
PM_SYNCDBPATH = "var/lib/pacman/sync"
PM_LOCK = "var/lib/pacman/db.lck"
PM_CACHEDIR = "var/cache/pacman/pkg"
PM_EXT_PKG = ".pkg.tar.gz"
PM_HOOKDIR = "etc/pacman.d/hooks"
# Pacman
PACCONF = "etc/pacman.conf"
# Pactest
TMPDIR = "tmp"
SYNCREPO = "var/pub"
LOGFILE = "var/log/pactest.log"
verbose = 0
def vprint(msg):
if verbose:
tap.diag(msg)
#
# Methods to generate files
#
def getfileinfo(filename):
data = {
'changed': False,
'isdir': False,
'islink': False,
'link': None,
'hasperms': False,
'perms': None,
}
if filename[-1] == "*":
data["changed"] = True
filename = filename.rstrip("*")
if filename.find(" -> ") != -1:
filename, link = filename.split(" -> ")
data["islink"] = True
data["link"] = link
elif filename.find("|") != -1:
filename, perms = filename.split("|")
data["hasperms"] = True
data["perms"] = int(perms, 8)
if filename[-1] == "/":
data["isdir"] = True
data["filename"] = filename
return data
def mkfile(base, name, data=""):
info = getfileinfo(name)
filename = info["filename"]
path = os.path.join(base, filename)
if info["isdir"]:
if not os.path.isdir(path):
os.makedirs(path, 0o755)
return path
dir_path = os.path.dirname(path)
if dir_path and not os.path.isdir(dir_path):
os.makedirs(dir_path, 0o755)
if info["islink"]:
os.symlink(info["link"], path)
else:
writedata(path, data)
if info["perms"]:
os.chmod(path, info["perms"])
return path
def writedata(filename, data):
if isinstance(data, list):
data = "\n".join(data)
fd = open(filename, "w")
if data:
fd.write(data)
if data[-1] != "\n":
fd.write("\n")
fd.close()
def mkcfgfile(filename, root, option, db):
# Options
data = ["[options]"]
for key, value in option.items():
data.extend(["%s = %s" % (key, j) for j in value])
# Repositories
# sort by repo name so tests can predict repo order, rather than be
# subjects to the whims of python dict() ordering
for key in sorted(db.keys()):
if key != "local":
value = db[key]
data.append("[%s]\n" \
"SigLevel = %s\n" \
"Server = file://%s" \
% (value.treename, value.getverify(), \
os.path.join(root, SYNCREPO, value.treename)))
for optkey, optval in value.option.items():
data.extend(["%s = %s" % (optkey, j) for j in optval])
mkfile(root, filename, "\n".join(data))
#
# MD5 helpers
#
def getmd5sum(filename):
if not os.path.isfile(filename):
return ""
fd = open(filename, "rb")
checksum = hashlib.md5()
while 1:
block = fd.read(32 * 1024)
if not block:
break
checksum.update(block)
fd.close()
return checksum.hexdigest()
def mkmd5sum(data):
checksum = hashlib.md5()
checksum.update(("%s\n" % data).encode('utf8'))
return checksum.hexdigest()
#
# Miscellaneous
#
def which(filename, path=None):
if not path:
path = os.environ["PATH"].split(os.pathsep)
for p in path:
f = os.path.join(p, filename)
if os.access(f, os.F_OK):
return f
return None
def grep(filename, pattern):
pat = re.compile(pattern)
myfile = open(filename, 'r')
for line in myfile:
if pat.search(line):
myfile.close()
return True
myfile.close()
return False
def mkdir(path):
if os.path.isdir(path):
return
elif os.path.isfile(path):
raise OSError("'%s' already exists and is not a directory" % path)
os.makedirs(path, 0o755)
| gpl-2.0 | 4,919,808,286,097,878,000 | 24.743316 | 77 | 0.583922 | false |
maferelo/saleor | saleor/product/migrations/0072_auto_20180925_1048.py | 3 | 1358 | # Generated by Django 2.0.8 on 2018-09-25 15:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("product", "0071_attributechoicevalue_value")]
operations = [
migrations.RenameModel(old_name="ProductAttribute", new_name="Attribute"),
migrations.RenameModel(
old_name="AttributeChoiceValueTranslation",
new_name="AttributeValueTranslation",
),
migrations.RenameModel(
old_name="AttributeChoiceValue", new_name="AttributeValue"
),
migrations.RenameModel(
old_name="ProductAttributeTranslation", new_name="AttributeTranslation"
),
migrations.RenameField(
model_name="attributetranslation",
old_name="product_attribute",
new_name="attribute",
),
migrations.RenameField(
model_name="attributevaluetranslation",
old_name="attribute_choice_value",
new_name="attribute_value",
),
migrations.AlterUniqueTogether(
name="attributetranslation",
unique_together={("language_code", "attribute")},
),
migrations.AlterUniqueTogether(
name="attributevaluetranslation",
unique_together={("language_code", "attribute_value")},
),
]
| bsd-3-clause | -7,478,345,584,148,143,000 | 32.95 | 83 | 0.613402 | false |
stefanfoulis/django-image-filer | image_filer/migrations/0006_teaser_plugin.py | 1 | 17818 |
from south.db import db
from django.db import models
from image_filer.models import *
class Migration:
def forwards(self, orm):
# Adding model 'ImageFilerTeaser'
db.create_table('cmsplugin_imagefilerteaser', (
('cmsplugin_ptr', orm['image_filer.imagefilerteaser:cmsplugin_ptr']),
('title', orm['image_filer.imagefilerteaser:title']),
('image', orm['image_filer.imagefilerteaser:image']),
('page_link', orm['image_filer.imagefilerteaser:page_link']),
('url', orm['image_filer.imagefilerteaser:url']),
('description', orm['image_filer.imagefilerteaser:description']),
))
db.send_create_signal('image_filer', ['ImageFilerTeaser'])
def backwards(self, orm):
# Deleting model 'ImageFilerTeaser'
db.delete_table('cmsplugin_imagefilerteaser')
models = {
'auth.group': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '5', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True', 'blank': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.CMSPlugin']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.page': {
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'menu_login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True', 'blank': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'image_filer.clipboard': {
'files': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['image_filer.Image']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'clipboards'", 'to': "orm['auth.User']"})
},
'image_filer.clipboarditem': {
'clipboard': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['image_filer.Clipboard']"}),
'file': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['image_filer.Image']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_checked': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'})
},
'image_filer.folder': {
'Meta': {'unique_together': "(('parent', 'name'),)"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_folders'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['image_filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'image_filer.folderpermission': {
'can_add_children': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'can_edit': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'can_read': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'everybody': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['image_filer.Folder']", 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'image_filer.image': {
'_height_field': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width_field': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'can_use_for_print': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'can_use_for_private_use': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'can_use_for_research': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'can_use_for_teaching': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'can_use_for_web': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'contact_of_files'", 'null': 'True', 'to': "orm['auth.User']"}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'image_files'", 'null': 'True', 'to': "orm['image_filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_images'", 'null': 'True', 'to': "orm['auth.User']"}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'usage_restriction_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'image_filer.imagefilerteaser': {
'Meta': {'db_table': "'cmsplugin_imagefilerteaser'"},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['image_filer.Image']", 'null': 'True', 'blank': 'True'}),
'page_link': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'image_filer.imagepublication': {
'Meta': {'db_table': "'cmsplugin_imagepublication'"},
'alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'float': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'free_link': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'height': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['image_filer.Image']"}),
'longdesc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'page_link': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'show_author': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'show_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'width': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'sites.site': {
'Meta': {'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['image_filer']
| mit | 3,079,141,465,672,521,700 | 86.343137 | 189 | 0.554383 | false |
mbakke/ganeti | test/ganeti.storage_unittest.py | 1 | 4553 | #!/usr/bin/python
#
# Copyright (C) 2012 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Script for testing ganeti.storage"""
import re
import unittest
import random
from ganeti import constants
from ganeti import utils
from ganeti import compat
from ganeti import errors
from ganeti import storage
import testutils
class TestVGReduce(testutils.GanetiTestCase):
VGNAME = "xenvg"
LIST_CMD = storage.LvmVgStorage.LIST_COMMAND
VGREDUCE_CMD = storage.LvmVgStorage.VGREDUCE_COMMAND
def _runCmd(self, cmd, **kwargs):
if not self.run_history:
self.fail("Empty run results")
exp_cmd, result = self.run_history.pop(0)
self.assertEqual(cmd, exp_cmd)
return result
def testOldVersion(self):
lvmvg = storage.LvmVgStorage()
stdout = self._ReadTestData("vgreduce-removemissing-2.02.02.txt")
vgs_fail = self._ReadTestData("vgs-missing-pvs-2.02.02.txt")
self.run_history = [
([self.VGREDUCE_CMD, "--removemissing", self.VGNAME],
utils.RunResult(0, None, stdout, "", "", None, None)),
([self.LIST_CMD, "--noheadings", "--nosuffix", self.VGNAME],
utils.RunResult(0, None, "", "", "", None, None)),
]
lvmvg._RemoveMissing(self.VGNAME, _runcmd_fn=self._runCmd)
self.assertEqual(self.run_history, [])
for ecode, out in [(1, ""), (0, vgs_fail)]:
self.run_history = [
([self.VGREDUCE_CMD, "--removemissing", self.VGNAME],
utils.RunResult(0, None, stdout, "", "", None, None)),
([self.LIST_CMD, "--noheadings", "--nosuffix", self.VGNAME],
utils.RunResult(ecode, None, out, "", "", None, None)),
]
self.assertRaises(errors.StorageError, lvmvg._RemoveMissing, self.VGNAME,
_runcmd_fn=self._runCmd)
self.assertEqual(self.run_history, [])
def testNewVersion(self):
lvmvg = storage.LvmVgStorage()
stdout1 = self._ReadTestData("vgreduce-removemissing-2.02.66-fail.txt")
stdout2 = self._ReadTestData("vgreduce-removemissing-2.02.66-ok.txt")
vgs_fail = self._ReadTestData("vgs-missing-pvs-2.02.66.txt")
# first: require --fail, check that it's used
self.run_history = [
([self.VGREDUCE_CMD, "--removemissing", self.VGNAME],
utils.RunResult(0, None, stdout1, "", "", None, None)),
([self.VGREDUCE_CMD, "--removemissing", "--force", self.VGNAME],
utils.RunResult(0, None, stdout2, "", "", None, None)),
([self.LIST_CMD, "--noheadings", "--nosuffix", self.VGNAME],
utils.RunResult(0, None, "", "", "", None, None)),
]
lvmvg._RemoveMissing(self.VGNAME, _runcmd_fn=self._runCmd)
self.assertEqual(self.run_history, [])
# second: make sure --fail is not used if not needed
self.run_history = [
([self.VGREDUCE_CMD, "--removemissing", self.VGNAME],
utils.RunResult(0, None, stdout2, "", "", None, None)),
([self.LIST_CMD, "--noheadings", "--nosuffix", self.VGNAME],
utils.RunResult(0, None, "", "", "", None, None)),
]
lvmvg._RemoveMissing(self.VGNAME, _runcmd_fn=self._runCmd)
self.assertEqual(self.run_history, [])
# third: make sure we error out if vgs doesn't find the volume
for ecode, out in [(1, ""), (0, vgs_fail)]:
self.run_history = [
([self.VGREDUCE_CMD, "--removemissing", self.VGNAME],
utils.RunResult(0, None, stdout1, "", "", None, None)),
([self.VGREDUCE_CMD, "--removemissing", "--force", self.VGNAME],
utils.RunResult(0, None, stdout2, "", "", None, None)),
([self.LIST_CMD, "--noheadings", "--nosuffix", self.VGNAME],
utils.RunResult(ecode, None, out, "", "", None, None)),
]
self.assertRaises(errors.StorageError, lvmvg._RemoveMissing, self.VGNAME,
_runcmd_fn=self._runCmd)
self.assertEqual(self.run_history, [])
if __name__ == "__main__":
testutils.GanetiTestProgram()
| gpl-2.0 | 897,607,324,631,520,600 | 39.292035 | 79 | 0.645069 | false |
macosforge/ccs-calendarserver | calendarserver/tools/dkimtool.py | 1 | 8493 | #!/usr/bin/env python
##
# Copyright (c) 2012-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import os
import sys
from Crypto.PublicKey import RSA
from StringIO import StringIO
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from twisted.logger import LogLevel, STDLibLogObserver
from twisted.python.usage import Options
from twext.python.log import Logger
from txweb2.http_headers import Headers
from txdav.caldav.datastore.scheduling.ischedule.dkim import RSA256, DKIMRequest, \
PublicKeyLookup, DKIMVerifier, DKIMVerificationError
log = Logger()
def _doKeyGeneration(options):
key = RSA.generate(options["key-size"])
output = key.exportKey()
lineBreak = False
if options["key"]:
with open(options["key"], "w") as f:
f.write(output)
else:
print(output)
lineBreak = True
output = key.publickey().exportKey()
if options["pub-key"]:
with open(options["pub-key"], "w") as f:
f.write(output)
else:
if lineBreak:
print
print(output)
lineBreak = True
if options["txt"]:
output = "".join(output.splitlines()[1:-1])
txt = "v=DKIM1; p=%s" % (output,)
if lineBreak:
print
print(txt)
@inlineCallbacks
def _doRequest(options):
if options["verbose"]:
log.levels().setLogLevelForNamespace("txdav.caldav.datastore.scheduling.ischedule.dkim", LogLevel.debug)
# Parse the HTTP file
with open(options["request"]) as f:
request = f.read()
method, uri, headers, stream = _parseRequest(request)
# Setup signing headers
sign_headers = options["signing"]
if sign_headers is None:
sign_headers = []
for hdr in ("Host", "Content-Type", "Originator", "Recipient+"):
if headers.hasHeader(hdr.rstrip("+")):
sign_headers.append(hdr)
else:
sign_headers = sign_headers.split(":")
dkim = DKIMRequest(
method,
uri,
headers,
stream,
options["domain"],
options["selector"],
options["key"],
options["algorithm"],
sign_headers,
True,
True,
False,
int(options["expire"]),
)
if options["fake-time"]:
dkim.time = "100"
dkim.expire = "200"
dkim.message_id = "1"
yield dkim.sign()
s = StringIO()
_writeRequest(dkim, s)
print(s.getvalue())
@inlineCallbacks
def _doVerify(options):
# Parse the HTTP file
with open(os.path.expanduser(options["verify"])) as f:
verify = f.read()
_method, _uri, headers, body = _parseRequest(verify)
# Check for local public key
if options["pub-key"]:
PublicKeyLookup_File.pubkeyfile = os.path.expanduser(options["pub-key"])
lookup = (PublicKeyLookup_File,)
else:
lookup = None
dkim = DKIMVerifier(headers, body, lookup)
if options["fake-time"]:
dkim.time = 0
try:
yield dkim.verify()
except DKIMVerificationError, e:
print("Verification Failed: %s" % (e,))
else:
print("Verification Succeeded")
def _parseRequest(request):
lines = request.splitlines(True)
method, uri, _ignore_version = lines.pop(0).split()
hdrs = []
body = None
for line in lines:
if body is not None:
body.append(line)
elif line.strip() == "":
body = []
elif line[0] in (" ", "\t"):
hdrs[-1] += line
else:
hdrs.append(line)
headers = Headers()
for hdr in hdrs:
name, value = hdr.split(':', 1)
headers.addRawHeader(name, value.strip())
stream = "".join(body)
return method, uri, headers, stream
def _writeRequest(request, f):
f.write("%s %s HTTP/1.1\r\n" % (request.method, request.uri,))
for name, valuelist in request.headers.getAllRawHeaders():
for value in valuelist:
f.write("%s: %s\r\n" % (name, value))
f.write("\r\n")
f.write(request.stream.read())
class PublicKeyLookup_File(PublicKeyLookup):
method = "*"
pubkeyfile = None
def getPublicKey(self):
"""
Do the key lookup using the actual lookup method.
"""
with open(self.pubkeyfile) as f:
data = f.read()
return RSA.importKey(data)
def usage(e=None):
if e:
print(e)
print("")
try:
DKIMToolOptions().opt_help()
except SystemExit:
pass
if e:
sys.exit(64)
else:
sys.exit(0)
description = """Usage: dkimtool [options]
Options:
-h Print this help and exit
# Key Generation
--key-gen Generate private/public key files
--key FILE Private key file to create [stdout]
--pub-key FILE Public key file to create [stdout]
--key-size SIZE Key size [1024]
--txt Also generate the public key TXT record
--fake-time Use fake t=, x= values when signing and also
ignore expiration on verification
# Request
--request FILE An HTTP request to sign
--algorithm ALGO Signature algorithm [rsa-sha256]
--domain DOMAIN Signature domain [example.com]
--selector SELECTOR Signature selector [dkim]
--key FILE Private key to use
--signing HEADERS List of headers to sign [automatic]
--expire SECONDS When to expire signature [no expiry]
# Verify
--verify FILE An HTTP request to verify
--pkey FILE Public key to use in place of
q= lookup
Description:
This utility is for testing DKIM signed HTTP requests. Key operations are:
--key-gen: generate a private/public RSA key.
--request: sign an HTTP request.
--verify: verify a signed HTTP request.
"""
class DKIMToolOptions(Options):
"""
Command-line options for 'calendarserver_dkimtool'
"""
synopsis = description
optFlags = [
['verbose', 'v', "Verbose logging."],
['key-gen', 'g', "Generate private/public key files"],
['txt', 't', "Also generate the public key TXT record"],
['fake-time', 'f', "Fake time values for signing/verification"],
]
optParameters = [
['key', 'k', None, "Private key file to create [default: stdout]"],
['pub-key', 'p', None, 'Public key file to create [default: stdout]'],
['key-size', 'x', 1024, 'Key size'],
['request', 'r', None, 'An HTTP request to sign'],
['algorithm', 'a', RSA256, 'Signature algorithm'],
['domain', 'd', 'example.com', 'Signature domain'],
['selector', 's', 'dkim', 'Signature selector'],
['signing', 'h', None, 'List of headers to sign [automatic]'],
['expire', 'e', 3600, 'When to expire signature'],
['verify', 'w', None, 'An HTTP request to verify'],
]
def __init__(self):
super(DKIMToolOptions, self).__init__()
self.outputName = '-'
@inlineCallbacks
def _runInReactor(fn, options):
try:
yield fn(options)
except Exception, e:
print(e)
finally:
reactor.stop()
def main(argv=sys.argv, stderr=sys.stderr):
options = DKIMToolOptions()
options.parseOptions(argv[1:])
#
# Send logging output to stdout
#
observer = STDLibLogObserver()
observer.start()
if options["verbose"]:
log.levels().setLogLevelForNamespace("txdav.caldav.datastore.scheduling.ischedule.dkim", LogLevel.debug)
if options["key-gen"]:
_doKeyGeneration(options)
elif options["request"]:
reactor.callLater(0, _runInReactor, _doRequest, options)
reactor.run()
elif options["verify"]:
reactor.callLater(0, _runInReactor, _doVerify, options)
reactor.run()
else:
usage("Invalid options")
if __name__ == '__main__':
main()
| apache-2.0 | 2,544,725,631,419,095,600 | 26.134185 | 112 | 0.602849 | false |
JohnVinyard/zounds | zounds/spectral/test_weighting.py | 1 | 4859 | import unittest2
import numpy as np
from .weighting import AWeighting
from .frequencyscale import LinearScale, FrequencyBand, GeometricScale, MelScale
from .tfrepresentation import FrequencyDimension
from .frequencyadaptive import FrequencyAdaptive
from zounds.timeseries import Seconds, TimeDimension, Milliseconds, SR11025
from zounds.core import ArrayWithUnits, IdentityDimension
from .functional import fir_filter_bank
class WeightingTests(unittest2.TestCase):
def test_cannot_multiply_when_array_does_not_have_expected_dimensions(self):
td = TimeDimension(Seconds(1), Seconds(1))
tf = ArrayWithUnits(np.ones((90, 100)), [td, IdentityDimension()])
weighting = AWeighting()
self.assertRaises(ValueError, lambda: tf * weighting)
def test_can_get_weights_from_tf_representation(self):
td = TimeDimension(Seconds(1), Seconds(1))
fd = FrequencyDimension(LinearScale(FrequencyBand(20, 22050), 100))
tf = ArrayWithUnits(np.ones((90, 100)), [td, fd])
weighting = AWeighting()
weights = weighting.weights(tf)
self.assertEqual((100,), weights.shape)
def test_can_get_weights_from_scale(self):
scale = LinearScale(FrequencyBand(20, 22050), 100)
weighting = AWeighting()
weights = weighting.weights(scale)
self.assertEqual((100,), weights.shape)
def test_can_apply_a_weighting_to_time_frequency_representation(self):
td = TimeDimension(Seconds(1), Seconds(1))
fd = FrequencyDimension(LinearScale(FrequencyBand(20, 22050), 100))
tf = ArrayWithUnits(np.ones((90, 100)), [td, fd])
weighting = AWeighting()
result = tf * weighting
self.assertGreater(result[0, -1], result[0, 0])
def test_can_apply_a_weighting_to_frequency_adaptive_representation(self):
td = TimeDimension(
duration=Seconds(1),
frequency=Milliseconds(500))
scale = GeometricScale(20, 5000, 0.05, 120)
arrs = [np.ones((10, x)) for x in range(1, 121)]
fa = FrequencyAdaptive(arrs, td, scale)
weighting = AWeighting()
result = fa * weighting
self.assertGreater(
result[:, scale[-1]].sum(), result[:, scale[0]].sum())
def test_can_invert_frequency_weighting(self):
td = TimeDimension(Seconds(1), Seconds(1))
fd = FrequencyDimension(LinearScale(FrequencyBand(20, 22050), 100))
tf = ArrayWithUnits(np.random.random_sample((90, 100)), [td, fd])
weighted = tf * AWeighting()
inverted = weighted / AWeighting()
np.testing.assert_allclose(tf, inverted)
def test_can_invert_frequency_weighting_for_adaptive_representation(self):
td = TimeDimension(
duration=Seconds(1),
frequency=Milliseconds(500))
scale = GeometricScale(20, 5000, 0.05, 120)
arrs = [np.random.random_sample((10, x)) for x in range(1, 121)]
fa = FrequencyAdaptive(arrs, td, scale)
weighting = AWeighting()
result = fa * weighting
inverted = result / AWeighting()
np.testing.assert_allclose(fa, inverted)
def test_can_apply_weighting_to_explicit_frequency_dimension(self):
td = TimeDimension(
duration=Seconds(1),
frequency=Milliseconds(500))
scale = GeometricScale(20, 5000, 0.05, 120)
arrs = [np.ones((10, x)) for x in range(1, 121)]
fa = FrequencyAdaptive(arrs, td, scale)
fa2 = ArrayWithUnits(fa, fa.dimensions)
weighting = AWeighting()
result = fa2 * weighting
self.assertGreater(
result[:, scale[-1]].sum(), result[:, scale[0]].sum())
def test_can_invert_weighting_for_explicit_frequency_dimension(self):
td = TimeDimension(
duration=Seconds(1),
frequency=Milliseconds(500))
scale = GeometricScale(20, 5000, 0.05, 120)
arrs = [np.ones((10, x)) for x in range(1, 121)]
fa = FrequencyAdaptive(arrs, td, scale)
fa2 = ArrayWithUnits(fa, fa.dimensions)
weighting = AWeighting()
result = fa2 * weighting
inverted = result / AWeighting()
np.testing.assert_allclose(fa, inverted)
def test_can_apply_weighting_to_filter_bank(self):
sr = SR11025()
band = FrequencyBand(20, sr.nyquist)
scale = MelScale(band, 100)
bank = fir_filter_bank(scale, 256, sr, np.hanning(25))
weighted = bank * AWeighting()
self.assertSequenceEqual(bank.dimensions, weighted.dimensions)
def test_multiplication_by_weighting_is_commutative(self):
sr = SR11025()
band = FrequencyBand(20, sr.nyquist)
scale = MelScale(band, 100)
bank = fir_filter_bank(scale, 256, sr, np.hanning(25))
np.testing.assert_allclose(bank * AWeighting(), AWeighting() * bank)
| mit | 7,542,779,155,713,181,000 | 42.383929 | 80 | 0.64396 | false |
cfe-lab/Umberjack | test/simulations/sim_pipeline.py | 1 | 7917 | """
The full pipeline for generating simulated population reads for unit testing.
Usage: python sim_pipeline.py [config file]
"""
import subprocess
import os
import logging
import sys
import ConfigParser
import hyphy.hyphy_handler as hyphy_handler
import fasttree.fasttree_handler as fasttree_handler
import config.settings as settings
settings.setup_logging()
LOGGER = logging.getLogger(__name__)
def get_path_str(path, pardir):
"""
If absolute path, then returns the path as is.
If relative path, then returns absolute path of concatenated pardir/path
:param str path: absolute or relative file or directory path
:param str pardir: parent directory to concatenate to path if path is relative directory
:return str: absolute resolved path
"""
if not os.path.isabs(path):
return os.path.join(pardir, path)
else:
return path
SECTION = "sim"
config_file = sys.argv[1]
config = ConfigParser.RawConfigParser()
config.read(config_file)
OUTDIR = os.path.dirname(config_file) # Output directory for simulated data
# Generate Tree
SEED = config.getint(SECTION, "SEED")
FILENAME_PREFIX = config.get(SECTION, "FILENAME_PREFIX")
NUM_CODON_SITES = config.getint(SECTION, "NUM_CODON_SITES")
NUM_INDIV = config.getint(SECTION, "NUM_INDIV")
treefile = OUTDIR + os.sep + FILENAME_PREFIX + ".nwk"
renamed_treefile = OUTDIR + os.sep + FILENAME_PREFIX + ".rename.nwk"
if os.path.exists(treefile) and os.path.getsize(treefile) and os.path.exists(renamed_treefile) and os.path.getsize(renamed_treefile):
LOGGER.warn("Not regenerating trees {} and {}".format(treefile, renamed_treefile) )
else:
asg_driver_exe = os.path.abspath(os.path.dirname(__file__) + os.sep + "asg_driver.py")
asg_driver_cmd = ["python", asg_driver_exe,
OUTDIR + os.sep + FILENAME_PREFIX,
str(NUM_INDIV),
str(SEED)]
LOGGER.debug("About to execute " + " ".join(asg_driver_cmd))
subprocess.check_call(asg_driver_cmd, env=os.environ)
LOGGER.debug("Finished execute ")
# Relabel tree nodes to more manageable names. Reformat tree so that indelible can handle it.
relabel_phylogeny_exe = os.path.abspath(os.path.dirname(__file__) + os.sep + "relabel_phylogeny.py")
relabel_phylogeny_cmd = ["python", relabel_phylogeny_exe,
treefile]
LOGGER.debug("About to execute " + " ".join(relabel_phylogeny_cmd))
subprocess.check_call(relabel_phylogeny_cmd, env=os.environ)
LOGGER.debug("Finished execute ")
# Use Indelible to create population sequences at different scaling factors (ie mutation rates)
INDELIBLE_BIN_DIR = get_path_str(config.get(SECTION, "INDELIBLE_BIN_DIR"), OUTDIR)
INDELIBLE_SCALING_RATES = config.get(SECTION, "INDELIBLE_SCALING_RATES")
batch_indelible_exe = os.path.abspath(os.path.dirname(__file__) + "/indelible/batch_indelible.py")
indelible_cmd = ["python", batch_indelible_exe,
renamed_treefile, # full filepath to tree
INDELIBLE_SCALING_RATES,
str(SEED), # random seed
str(NUM_CODON_SITES), # number of codon sites in genome
OUTDIR, # indelible output file directory
FILENAME_PREFIX, # Indelible output filename prefix
INDELIBLE_BIN_DIR] # indelible bin dir
LOGGER.debug("About to execute " + " ".join(indelible_cmd))
subprocess.check_call(indelible_cmd, env=os.environ)
LOGGER.debug("Finished execute ")
# Create sample genome by concatenating slices of indelible alignments from different mutation rates.
sample_genomes_fasta = OUTDIR + os.sep + "mixed" + os.sep + FILENAME_PREFIX + ".mixed.fasta"
sample_genomes_consensus_fasta = sample_genomes_fasta.replace(".fasta", ".consensus.fasta")
if (os.path.exists(sample_genomes_fasta) and os.path.getsize(sample_genomes_fasta) and
os.path.exists(sample_genomes_consensus_fasta) and os.path.getsize(sample_genomes_consensus_fasta)):
LOGGER.warn("Not regenerating combined sample genome fastas {} and {} ".format(sample_genomes_fasta, sample_genomes_consensus_fasta))
else:
sample_genomes_exe = os.path.abspath(os.path.dirname(__file__) + os.sep + "sample_genomes.py")
sample_genomes_cmd = ["python", sample_genomes_exe,
INDELIBLE_SCALING_RATES, # comma delimited list of mutation scaling rates
OUTDIR + os.sep + "mixed", # full filepath of directory for sample_genomes.py output
FILENAME_PREFIX + ".mixed", # prefix of sample_genomes.py population sequence output files
str(SEED), # random seed
str(NUM_CODON_SITES), # number codon sites
OUTDIR, # Indelible output directory
FILENAME_PREFIX] # INDELible output filename prefix
LOGGER.debug("About to execute " + " ".join(sample_genomes_cmd))
subprocess.check_call(sample_genomes_cmd, env=os.environ)
LOGGER.debug("Finished execute ")
# Simulate MiSeq reads from the population genomes.
ART_BIN_DIR = get_path_str(config.get(SECTION, "ART_BIN_DIR"), OUTDIR)
ART_QUAL_PROFILE_TSV1 = get_path_str(config.get(SECTION, "ART_QUAL_PROFILE_TSV1"), OUTDIR)
ART_QUAL_PROFILE_TSV2 = get_path_str(config.get(SECTION, "ART_QUAL_PROFILE_TSV2"), OUTDIR)
ART_FOLD_COVER = config.getint(SECTION, "ART_FOLD_COVER")
ART_MEAN_INSERT = config.getint(SECTION, "ART_MEAN_INSERT")
ART_STDEV_INSERT = config.getint(SECTION, "ART_STDEV_INSERT")
PICARD_BIN_DIR = get_path_str(config.get(SECTION, "PICARD_BIN_DIR"), OUTDIR)
BWA_BIN_DIR = get_path_str(config.get(SECTION, "BWA_BIN_DIR"), OUTDIR)
PROCS = config.getint(SECTION, "PROCS")
art_reads_dir = OUTDIR + os.sep + "mixed" + os.sep + "reads"
art_reads_filename_prefix = FILENAME_PREFIX + ".mixed.reads"
generate_reads_exe = os.path.abspath(os.path.dirname(__file__) + os.sep + "generate_reads.py")
generate_reads_cmd = ["python", generate_reads_exe,
ART_BIN_DIR,
ART_QUAL_PROFILE_TSV1,
ART_QUAL_PROFILE_TSV2,
sample_genomes_fasta,
sample_genomes_consensus_fasta,
art_reads_dir + os.sep + art_reads_filename_prefix, # dir and filename prefix of ART output
str(ART_FOLD_COVER),
str(ART_MEAN_INSERT),
str(ART_STDEV_INSERT),
PICARD_BIN_DIR,
BWA_BIN_DIR,
OUTDIR + os.sep + "mixed" + os.sep + "aln", # BWA output dir
str(PROCS),
str(SEED),
OUTDIR + os.sep + "mixed" + os.sep + FILENAME_PREFIX + ".mixed.rates.csv"] # Indelible mixed mutation rates csv
LOGGER.debug("About to execute " + " ".join(generate_reads_cmd))
subprocess.check_call(generate_reads_cmd, env=os.environ)
LOGGER.debug("Finished execute ")
# For the sample_genomes populations, we lose the true tree branch lengths when we concatenate multiple populations at different scalings together.
# Get FastTree to approximate tree for concatenated population sequences.
FASTTREE_EXE = get_path_str(config.get(SECTION, "FASTTREE_EXE"), OUTDIR)
sample_genomes_tree_fname = fasttree_handler.make_tree_repro(fasta_fname=sample_genomes_fasta, intree_fname=renamed_treefile,
fastree_exe=FASTTREE_EXE)
# Calculate HyPhy dN/dS for the full sample_genomes population fasta
HYPHY_EXE = get_path_str(config.get(SECTION, "HYPHY_EXE"), OUTDIR)
HYPHY_BASEPATH = get_path_str(config.get(SECTION, "HYPHY_BASEPATH"), OUTDIR)
hyphy_handler.calc_dnds(codon_fasta_filename=sample_genomes_fasta, tree_filename=sample_genomes_tree_fname,
hyphy_exe=HYPHY_EXE, hyphy_basedir=HYPHY_BASEPATH, threads=PROCS)
| bsd-2-clause | -6,855,658,814,491,119,000 | 46.981818 | 147 | 0.664898 | false |
pgoeser/gnuradio-mlse | python/testapp.py | 1 | 15170 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Paul Goeser
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
import mlse_swig as mlse
import time, random, cmath, math, operator
import signal # for correct KeyboardInterrupt redirection
import threading
import testapp_framework
from testapp_framework import *
from math import sqrt
# this file contains a small testapp that can run simulations of
# the mlse equalizer.
# you might want to grab the PYTHONPATH that run_tests uses.
# Or run it via run_testapp.
if __name__=="__main__":
import sys, threading
try:
action = sys.argv[1]
except:
action = "default"
try:
if(action=="rewrite"):
params = {"simu_length":2000}
snrs = range(0,21,2)
dispatcher = TestDispatcher(4,params, reuse=False)
var = {}
var = add_var(var,"noise_amplitude",snr_to_ampl(snrs))
# var = add_var(var,"noise_amplitude",[0.2]*2)
results = dispatcher.runjob(var)
# bers = extract_results(results,"ber")
bers = [ r["ber"] for r in results ]
print bers
berplot(snrs, bers, "SNR/BER", True)
if(action=="cmd"):
try:
from IPython import embed
except:
raise RuntimeError("Commandline needs ipython 0.11")
myns={}
embed(user_ns=myns)
locals().update(myns)
if(action=="debug"):
try:
from IPython import embed
except:
print "This needs ipython 0.11"
raise
params = {"simu_length":2000}
dispatcher = TestDispatcher(1,params, reuse=False)
var = [{}]
results = dispatcher.runjob(var)
results = dispatcher.runjob(var)
print results[0]["ber"]
myns={}
embed(user_ns=myns)
locals().update(myns)
if(action=="filtercompare"):
import filters
params = {"simu_length":40000, "channel_delay":8}
snrs = range(0,18+1,1)
dispatcher = TestDispatcher(4,params, reuse=False)
data = []
# correction_factor = {"laur":0.11671196828764578, "proj":0.15779756319255925, "gauss":0.13329449537101956, "dirac":1} # those numbers are the noise power that gets through the filter at 8 samples/symbol
names = {"laur":"Laurent","gauss":"Gauss","rrcos":"Wurzel-Nyquist","proj":"Projektion"}
for filt in ["laur","proj","gauss", "rrcos"]:
print "running",filt
var = {"filter_taps":eval("filters."+filt)}
# var = add_var(var,"noise_amplitude",snr_to_ampl(snrs, math.sqrt(1/correction_factor[filt])))
var = add_var(var,"noise_amplitude",snr_to_ampl(snrs, 7.97))
results = dispatcher.runjob(var)
bers = [ r["ber"] for r in results ]
data.append({"legend":names[filt], "bers":bers, "snrs":snrs})
print bers
# print results[-1]["gr_vector_sink_x_0"][-6:]
# import matplotlib.pyplot as plt
# plt.figure(None, figsize=(6,4.2), dpi=300) # important: figure size (in inches) to make it look good
# plt.plot(map(lambda x:x.real,results[-1]["gr_vector_sink_x_0"][-6:]))
# plt.plot(map(lambda x:x.imag,results[-1]["gr_vector_sink_x_0"][-6:]))
# plt.show()
berplot2(data, "Vergleich der Empfangsfilter", True, True)
if(action=="noisesimu"):
import filters
params = {}
dispatcher = TestDispatcher(4,params, reuse=False, topblock="noise_simulation")
snrs=[0,20,100]
for filt in ["laur","proj","gauss"]:
print "running",filt
var = [{"filter_taps":eval("filters."+filt)}]
var = add_var(var,"noise_amplitude",snr_to_ampl(snrs))
results = dispatcher.runjob(var)
data = [i["sink"] for i in results]
power= [ sum(d)/len(d) for d in data ]
print filt, ": ", power
if(action=="default"):
import filters
params = {"simu_length":4000, "channel_delay":8}
snrs = range(0,18+1,1)
dispatcher = TestDispatcher(2,params, reuse=False)
data = []
var = {"filter_taps":filters.rrcos}
var = add_var(var,"noise_amplitude",snr_to_ampl(snrs, 7.97))
results = dispatcher.runjob(var)
bers = [ r["ber"] for r in results ]
data.append({"legend":"default", "bers":bers, "snrs":snrs})
print bers
plt = berplot2(data, "BER simulation", True, True)
if(action=="chantaptest"):
import filters
params = {"simu_length":1000, "channel_delay":0}
snrs = [20]
dispatcher = TestDispatcher(4,params, reuse=False)
data = []
chantaps = ([1]+[0]*7)*6
var = {"filter_taps":filters.proj[16:48], "channel_taps":chantaps}
var = add_var(var,"noise_amplitude",snr_to_ampl(snrs, 8))
results = dispatcher.runjob(var)
bers = [ r["ber"] for r in results ]
data.append({"legend":"default", "bers":bers, "snrs":snrs})
print bers
import matplotlib.pyplot as plt
plt.figure(None, figsize=(6,4.2), dpi=200) # important: figure size (in inches) to make it look good
plt.stem(range(6),map(lambda x:x.real,results[-1]["gr_vector_sink_x_0"][-6:]),"bo-")
plt.stem(range(6),map(lambda x:x.imag,results[-1]["gr_vector_sink_x_0"][-6:]),"go-")
plt.show()
#plt = berplot2(data, "BER simulation", True, True)
if(action=="synctest"):
# synchronisation test without channel
import filters
data = []
for delay in range(4,12+1):
print "delay: %i"%delay
params = {"simu_length":10000, "channel_delay":delay}
snrs = range(0,18+1,1)
dispatcher = TestDispatcher(4,params, reuse=False)
var = {"filter_taps":filters.rrcos}
var = add_var(var,"noise_amplitude",snr_to_ampl(snrs, 8))
results = dispatcher.runjob(var)
bers = [ r["ber"] for r in results ]
data.append({"bers":bers, "snrs":snrs})
print bers
plt = berplot2(data, "Synchronisation test",)
save(action)
if(action=="synctest2"):
import filters
data = []
for delay in range(5,11+1):
print "delay: %i"%delay
params = {"simu_length":10000, "channel_delay":delay}
channel_taps = ([1]+[0]*7)*5+[1]
snrs = range(12,20+1,1)
dispatcher = TestDispatcher(2,params, reuse=False)
var = {"filter_taps":filters.rrcos[16:48], "channel_taps":channel_taps}
var = add_var(var,"noise_amplitude",snr_to_ampl(snrs, 8*6))
results = dispatcher.runjob(var)
bers = [ r["ber"] for r in results ]
data.append({"bers":bers, "snrs":snrs, "legend":delay})
print bers
plt = berplot2(data, "Synchronisation test", bermin=1e-6)
save(action)
if(action=="entzerrertest"):
import filters
data = []
channels_per_point = 500 #amount of different channels simulated for each plot-point
for chanlen in range(1,6+1):
params = {"simu_length":1000, "channel_delay":(8-chanlen)*4} # this centers the measured impulse response in the measuring window
snrs = range(0,18+1,1)
dispatcher = TestDispatcher(2,params, reuse=False)
bers = [0.]*len(snrs)
for n in range(channels_per_point):
print "still chanlen: %i, channel #%i"%(chanlen, n)
channel_taps = [0.]*(chanlen*8 - 7) # "allocate" vector
for i in range(chanlen):
#channel_taps[i*8] = cmath.rect(1.,2*math.pi*random.random()) # fill it, correctly spaced
channel_taps[i*8] = complex(random.gauss(0,1),random.gauss(0,1)) # fill it, correctly spaced
# calculate energy of the signal after channel
import numpy
#Eb = 8 * numpy.sum(numpy.absolute(numpy.convolve(filters.rrcos, channel_taps)))
f = numpy.convolve(filters.laur, channel_taps)
Eb = numpy.sum(numpy.multiply(numpy.conj(f),f)).real
# Eb = 8
#TODO: ist das korrekt?
print "Eb: %s"%Eb
var = {"filter_taps":filters.rrcos[16:48], "channel_taps":channel_taps}
var = add_var(var,"noise_amplitude",snr_to_ampl(snrs, Eb)) # here we need correction for the higher signal energy due to the filter
results = dispatcher.runjob(var)
newbers = [ r["ber"] for r in results ]
bers = map(sum,zip(bers, newbers)) # accumulate bers
bers = [i/channels_per_point for i in bers]
print bers
data.append({"legend":chanlen, "bers":bers, "snrs":snrs})
# import matplotlib.pyplot as plt
# plt.figure(None, figsize=(6,4.2), dpi=200) # important: figure size (in inches) to make it look good
# plt.stem(range(6),map(lambda x:x.real,results[-1]["gr_vector_sink_x_0"][-6:]),"bo-")
# plt.stem(range(6),map(lambda x:x.imag,results[-1]["gr_vector_sink_x_0"][-6:]),"go-")
# plt.show()
# plt = berplot2(data, "Entzerrertest", True, cmdline=False)
setdata(data, u"Entzerrertest mit Kanallängen 1-6")
savedt(action)
action=action+"-plot"
if(action=="entzerrertest-plot"):
filename="entzerrertest"
load(filename)
global savedata
savedata["title"]=u"Entzerrertest mit Kanallängen 1-6"
plt = berplot2()
from ber_awgn import ber_awgn, ebno_awgn
plt.plot(ebno_awgn, ber_awgn,"k--")
plt.legend(title=u"Kanallänge") # repaints legend
plt.annotate("AWGN (theoretisch)",(7.2,1e-3), rotation=-52, ha="center", va="center")
# a = list(plt.axis())
# a[1] += 1
# plt.axis(a) # make some space for legend
saveplt(filename)
if(action=="plot"):
dispatcher = TestDispatcher(4, blocksize=1000)
channel = [0,0,0,1,0,0,0]
snrlist = range(0,21,2)
res = dispatcher.runjob(snrlist, channel, 1000)
print res
del dispatcher
import matplotlib.pyplot as plt
plt.figure(None, figsize=(6,4.2), dpi=300) # important: figure size (in inches) to make it look good
plt.semilogy()
plt.plot(snrlist, res)
plt.grid("on")
plt.axis([min(snrlist),max(snrlist),1e-5,1])
plt.title("BER/SNR ohne Mehrwegeausbreitung")
plt.xlabel("SNR / dB")
plt.ylabel("$p_{BE}$")
plt.savefig("ber-snr_nochannel.eps")
if(action=="plot_nodecorr"):
dispatcher = TestDispatcher(4, blocksize=1000)
channel = [0,0,0,1,0,0,0]
snrlist = range(0,21)
res1 = dispatcher.runjob(snrlist, channel, 2000)
dispatcher = TestDispatcher(4, blocksize=1000, no_decorr=True)
res2 = dispatcher.runjob(snrlist, channel, 2000)
print res1
print res2
del dispatcher
import matplotlib.pyplot as plt
plt.figure(None, figsize=(6,4.2), dpi=300) # important: figure size (in inches) to make it look good
plt.semilogy()
plt.plot(snrlist, res1)
plt.plot(snrlist, res2)
plt.grid("on")
plt.axis([min(snrlist),max(snrlist),1e-5,1])
plt.title("BER/SNR ohne Kanal, mit/ohne MLSE")
plt.xlabel("SNR / dB")
plt.ylabel("$p_{BE}$")
plt.savefig("ber-snr_nochannel_decorrP.eps")
elif(action=="plot2"):
numchan=30
dispatcher = TestDispatcher(4, blocksize=100)
snrlist = range(0,21)
res=[]
import matplotlib.pyplot as plt
plt.figure(None, figsize=(6,4.2), dpi=80) # important: figure size (in inches) to make it look good
plt.semilogy()
plt.grid("on")
plt.axis([min(snrlist),max(snrlist),1e-5,1])
plt.title(u'BER/SNR mit Echos zufälliger Phase')
plt.xlabel("SNR / dB")
plt.ylabel("$p_{BE}$")
plt.interactive(True)
for numpath in range(1,6+1):
chanlist = [ ([0]*((7-numpath)/2)+ [ cmath.rect(1,2*math.pi*random.random()) for i in xrange(numpath)] + [0])[:7] for i in xrange(numchan) ]
r = [0]*len(snrlist)
for channel in chanlist:
r = map(operator.add, r, dispatcher.runjob(snrlist, channel, 100))
r = map(lambda x:x/numchan, r)
res.append(r)
plt.plot(snrlist, r)
plt.grid()
plt.grid("on")
plt.axis([min(snrlist),max(snrlist),1e-5,1])
plt.draw()
print res
plt.legend((u"Kanallänge 1", u"Kanallänge 2", u"Kanallänge 3", u"Kanallänge 4",u"Kanallänge 5",u"Kanallänge 6"),loc=3)
plt.savefig("ber-snr_manual_channel.eps")
del dispatcher
elif(action=="short"):
tester = Tester(blocksize=1000)
tester.run(10,1000)
finally:
print "quitting"
# del dispatcher #destructor, if it didn't happen already
# clean up any further dispatchers
for i in locals().values():
if isinstance(i,TestDispatcher):
i.stopworkers()
| gpl-3.0 | 5,299,925,792,086,577,000 | 42.314286 | 214 | 0.53252 | false |
clawpack/clawpack-4.x | apps/tsunami/bowl-slosh/setrun.py | 1 | 8919 | """
Module to set up run time parameters for Clawpack.
The values set in the function setrun are then written out to data files
that will be read in by the Fortran code.
"""
import os
from pyclaw import data
#------------------------------
def setrun(claw_pkg='geoclaw'):
#------------------------------
"""
Define the parameters used for running Clawpack.
INPUT:
claw_pkg expected to be "geoclaw" for this setrun.
OUTPUT:
rundata - object of class ClawRunData
"""
assert claw_pkg.lower() == 'geoclaw', "Expected claw_pkg = 'geoclaw'"
ndim = 2
rundata = data.ClawRunData(claw_pkg, ndim)
#------------------------------------------------------------------
# GeoClaw specific parameters:
#------------------------------------------------------------------
rundata = setgeo(rundata) # Defined below
#------------------------------------------------------------------
# Standard Clawpack parameters to be written to claw.data:
# (or to amr2ez.data for AMR)
#------------------------------------------------------------------
clawdata = rundata.clawdata # initialized when rundata instantiated
# Set single grid parameters first.
# See below for AMR parameters.
# ---------------
# Spatial domain:
# ---------------
# Number of space dimensions:
clawdata.ndim = ndim
# Lower and upper edge of computational domain:
clawdata.xlower = -2.
clawdata.xupper = 2.
clawdata.ylower = -2.
clawdata.yupper = 2.
# Number of grid cells:
clawdata.mx = 41
clawdata.my = 41
# ---------------
# Size of system:
# ---------------
# Number of equations in the system:
clawdata.meqn = 3
# Number of auxiliary variables in the aux array (initialized in setaux)
clawdata.maux = 3
# Index of aux array corresponding to capacity function, if there is one:
clawdata.mcapa = 0
# -------------
# Initial time:
# -------------
clawdata.t0 = 0.0
# -------------
# Output times:
#--------------
# Specify at what times the results should be written to fort.q files.
# Note that the time integration stops after the final output time.
# The solution at initial time t0 is always written in addition.
clawdata.outstyle = 1
if clawdata.outstyle==1:
# Output nout frames at equally spaced times up to tfinal:
clawdata.nout = 16
clawdata.tfinal = 4.4857014654663745
elif clawdata.outstyle == 2:
# Specify a list of output times.
clawdata.tout = [0.5, 1.0] # used if outstyle == 2
clawdata.nout = len(clawdata.tout)
elif clawdata.outstyle == 3:
# Output every iout timesteps with a total of ntot time steps:
iout = 5
ntot = 50
clawdata.iout = [iout, ntot]
# ---------------------------------------------------
# Verbosity of messages to screen during integration:
# ---------------------------------------------------
# The current t, dt, and cfl will be printed every time step
# at AMR levels <= verbosity. Set verbosity = 0 for no printing.
# (E.g. verbosity == 2 means print only on levels 1 and 2.)
clawdata.verbosity = 3
# --------------
# Time stepping:
# --------------
# if dt_variable==1: variable time steps used based on cfl_desired,
# if dt_variable==0: fixed time steps dt = dt_initial will always be used.
clawdata.dt_variable = 1
# Initial time step for variable dt.
# If dt_variable==0 then dt=dt_initial for all steps:
clawdata.dt_initial = 0.0001
# Max time step to be allowed if variable dt used:
clawdata.dt_max = 1e+99
# Desired Courant number if variable dt used, and max to allow without
# retaking step with a smaller dt:
clawdata.cfl_desired = 0.75
clawdata.cfl_max = 1.0
# Maximum number of time steps to allow between output times:
clawdata.max_steps = 5000
# ------------------
# Method to be used:
# ------------------
# Order of accuracy: 1 => Godunov, 2 => Lax-Wendroff plus limiters
clawdata.order = 2
# Transverse order for 2d or 3d (not used in 1d):
clawdata.order_trans = 2
# Number of waves in the Riemann solution:
clawdata.mwaves = 3
# List of limiters to use for each wave family:
# Required: len(mthlim) == mwaves
clawdata.mthlim = [3,3,3]
# Source terms splitting:
# src_split == 0 => no source term (src routine never called)
# src_split == 1 => Godunov (1st order) splitting used,
# src_split == 2 => Strang (2nd order) splitting used, not recommended.
clawdata.src_split = 1
# --------------------
# Boundary conditions:
# --------------------
# Number of ghost cells (usually 2)
clawdata.mbc = 2
# Choice of BCs at xlower and xupper:
# 0 => user specified (must modify bcN.f to use this option)
# 1 => extrapolation (non-reflecting outflow)
# 2 => periodic (must specify this at both boundaries)
# 3 => solid wall for systems where q(2) is normal velocity
clawdata.mthbc_xlower = 1
clawdata.mthbc_xupper = 1
clawdata.mthbc_ylower = 1
clawdata.mthbc_yupper = 1
# ---------------
# AMR parameters:
# ---------------
# max number of refinement levels:
mxnest = 2
clawdata.mxnest = -mxnest # negative ==> anisotropic refinement in x,y,t
# List of refinement ratios at each level (length at least mxnest-1)
clawdata.inratx = [4,4]
clawdata.inraty = [4,4]
clawdata.inratt = [2,6]
# Instead of setting these ratios, set:
# geodata.variable_dt_refinement_ratios = True
# in setgeo.
# to automatically choose refinement ratios in time based on estimate
# of maximum wave speed on all grids at each level.
# Specify type of each aux variable in clawdata.auxtype.
# This must be a list of length maux, each element of which is one of:
# 'center', 'capacity', 'xleft', or 'yleft' (see documentation).
clawdata.auxtype = ['center','center','yleft']
clawdata.tol = -1.0 # negative ==> don't use Richardson estimator
clawdata.tolsp = 0.5 # used in default flag2refine subroutine
# (Not used in geoclaw!)
clawdata.kcheck = 3 # how often to regrid (every kcheck steps)
clawdata.ibuff = 2 # width of buffer zone around flagged points
# More AMR parameters can be set -- see the defaults in pyclaw/data.py
return rundata
# end of function setrun
# ----------------------
#-------------------
def setgeo(rundata):
#-------------------
"""
Set GeoClaw specific runtime parameters.
For documentation see ....
"""
try:
geodata = rundata.geodata
except:
print "*** Error, this rundata has no geodata attribute"
raise AttributeError("Missing geodata attribute")
# == setgeo.data values ==
geodata.variable_dt_refinement_ratios = True
geodata.igravity = 1
geodata.gravity = 9.81
geodata.icoordsys = 1
# == settsunami.data values ==
geodata.sealevel = -10.
geodata.drytolerance = 1.e-3
geodata.wavetolerance = 1.e-2
geodata.depthdeep = 1.e2
geodata.maxleveldeep = 3
geodata.ifriction = 1
geodata.coeffmanning = 0.
geodata.frictiondepth = 1.e6
# == settopo.data values ==
geodata.topofiles = []
# for topography, append lines of the form
# [topotype, minlevel, maxlevel, t1, t2, fname]
geodata.topofiles.append([2, 1, 10, 0., 1.e10, 'bowl.topotype2'])
# == setdtopo.data values ==
geodata.dtopofiles = []
# for moving topography, append lines of the form: (<= 1 allowed for now!)
# [minlevel,maxlevel,fname]
# == setqinit.data values ==
geodata.iqinit = 0
geodata.qinitfiles = []
# for qinit perturbations, append lines of the form: (<= 1 allowed for now!)
# [minlev, maxlev, fname]
# == setregions.data values ==
geodata.regions = []
# to specify regions of refinement append lines of the form
# [minlevel,maxlevel,t1,t2,x1,x2,y1,y2]
# == setgauges.data values ==
geodata.gauges = []
# for gauges append lines of the form [gaugeno, x, y, t1, t2]
# == setfixedgrids.data values ==
geodata.fixedgrids = []
# for fixed grids append lines of the form
# [t1,t2,noutput,x1,x2,y1,y2,xpoints,ypoints,\
# ioutarrivaltimes,ioutsurfacemax]
#geodata.fixedgrids.append([1., 2., 4, 0., 100., 0., 100., 11, 11, 0, 0])
return rundata
# end of function setgeo
# ----------------------
if __name__ == '__main__':
# Set up run-time parameters and write all data files.
import sys
if len(sys.argv) == 2:
rundata = setrun(sys.argv[1])
else:
rundata = setrun()
rundata.write()
| bsd-3-clause | -2,397,011,731,965,025,000 | 26.358896 | 80 | 0.575737 | false |
jaracil/nxpy | examples/fib/fibcli.py | 1 | 1477 | # -*- coding: utf-8 -*-
##############################################################################
#
# pynexus, a Python library for easy playing with Nexus
# Copyright (C) 2016 by the pynexus team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pynexus
import sys
from urlparse import urlparse
if __name__ == '__main__':
"""
The argument is a standard string connection with the next structure:
protocol://[user:pass@]host[:port]/path
For example:
tcp://test:test@localhost:1717/test.fibonacci
"""
nexusClient = pynexus.Client(sys.argv[1])
method = urlparse(sys.argv[1]).path[1:]
try:
print(nexusClient.taskPush(method, {'v': sys.argv[2]}))
finally:
nexusClient.close()
| lgpl-3.0 | -2,999,314,142,047,001,600 | 35.02439 | 78 | 0.611374 | false |
NiclasEriksen/py-towerwars | src/numpy/f2py/__init__.py | 1 | 1195 | #!/usr/bin/env python2
from __future__ import division, absolute_import, print_function
__all__ = ['run_main', 'compile', 'f2py_testing']
import os
import sys
import subprocess
from . import f2py2e
from . import f2py_testing
from . import diagnose
from .info import __doc__
run_main = f2py2e.run_main
main = f2py2e.main
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
f = tempfile.NamedTemporaryFile(suffix='.f')
else:
f = open(source_fn, 'w')
try:
f.write(source)
f.flush()
args = ' -c -m %s %s %s'%(modulename, f.name, extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' % \
(sys.executable, args)
s, o = exec_command(c)
finally:
f.close()
return s
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
| cc0-1.0 | -7,324,453,556,563,046,000 | 23.387755 | 70 | 0.600837 | false |
zsjohny/jumpserver | apps/users/views/profile/pubkey.py | 1 | 1689 | # ~*~ coding: utf-8 ~*~
from django.http import HttpResponse
from django.urls import reverse_lazy
from django.utils.translation import ugettext as _
from django.views import View
from django.views.generic.edit import UpdateView
from common.utils import get_logger, ssh_key_gen
from common.permissions import (
PermissionsMixin, IsValidUser,
UserCanUpdateSSHKey,
)
from ... import forms
from ...models import User
__all__ = [
'UserPublicKeyUpdateView', 'UserPublicKeyGenerateView',
]
logger = get_logger(__name__)
class UserPublicKeyUpdateView(PermissionsMixin, UpdateView):
template_name = 'users/user_pubkey_update.html'
model = User
form_class = forms.UserPublicKeyForm
permission_classes = [IsValidUser, UserCanUpdateSSHKey]
success_url = reverse_lazy('users:user-profile')
def get_object(self, queryset=None):
return self.request.user
def get_context_data(self, **kwargs):
context = {
'app': _('Users'),
'action': _('Public key update'),
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class UserPublicKeyGenerateView(PermissionsMixin, View):
permission_classes = [IsValidUser]
def get(self, request, *args, **kwargs):
username = request.user.username
private, public = ssh_key_gen(username=username, hostname='jumpserver')
request.user.public_key = public
request.user.save()
response = HttpResponse(private, content_type='text/plain')
filename = "{0}-jumpserver.pem".format(username)
response['Content-Disposition'] = 'attachment; filename={}'.format(filename)
return response
| gpl-2.0 | -3,068,625,369,736,953,300 | 30.277778 | 84 | 0.686797 | false |
coolhacks/python-hacks | examples/codebreaker/vigenereDictionaryHacker.py | 1 | 1270 | # Vigenere Cipher Dictionary Hacker
# http://inventwithpython.com/hacking (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenere(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenere(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # remove the newline at the end
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found.
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
| mit | 432,503,798,185,404,800 | 31.564103 | 80 | 0.624409 | false |
denys-duchier/kivy | kivy/uix/dropdown.py | 1 | 11698 | '''
Drop-Down List
==============
.. versionadded:: 1.4.0
A versatile drop-down list that can be used with custom widgets. It allows you
to display a list of widgets under a displayed widget. Unlike other toolkits,
the list of widgets can contain any type of widget: simple buttons,
images etc.
The positioning of the drop-down list is fully automatic: we will always try to
place the dropdown list in a way that the user can select an item in the list.
Basic example
-------------
A button with a dropdown list of 10 possible values. All the buttons within the
dropdown list will trigger the dropdown :meth:`DropDown.select` method. After
being called, the main button text will display the selection of the
dropdown. ::
from kivy.uix.dropdown import DropDown
from kivy.uix.button import Button
from kivy.base import runTouchApp
# create a dropdown with 10 buttons
dropdown = DropDown()
for index in range(10):
# when adding widgets, we need to specify the height manually (disabling
# the size_hint_y) so the dropdown can calculate the area it needs.
btn = Button(text='Value %d' % index, size_hint_y=None, height=44)
# for each button, attach a callback that will call the select() method
# on the dropdown. We'll pass the text of the button as the data of the
# selection.
btn.bind(on_release=lambda btn: dropdown.select(btn.text))
# then add the button inside the dropdown
dropdown.add_widget(btn)
# create a big main button
mainbutton = Button(text='Hello', size_hint=(None, None))
# show the dropdown menu when the main button is released
# note: all the bind() calls pass the instance of the caller (here, the
# mainbutton instance) as the first argument of the callback (here,
# dropdown.open.).
mainbutton.bind(on_release=dropdown.open)
# one last thing, listen for the selection in the dropdown list and
# assign the data to the button text.
dropdown.bind(on_select=lambda instance, x: setattr(mainbutton, 'text', x))
runTouchApp(mainbutton)
Extending dropdown in Kv
------------------------
You could create a dropdown directly from your kv::
#:kivy 1.4.0
<CustomDropDown>:
Button:
text: 'My first Item'
size_hint_y: None
height: 44
on_release: root.select('item1')
Label:
text: 'Unselectable item'
size_hint_y: None
height: 44
Button:
text: 'My second Item'
size_hint_y: None
height: 44
on_release: root.select('item2')
And then, create the associated python class and use it::
class CustomDropDown(DropDown):
pass
dropdown = CustomDropDown()
mainbutton = Button(text='Hello', size_hint=(None, None))
mainbutton.bind(on_release=dropdown.open)
dropdown.bind(on_select=lambda instance, x: setattr(mainbutton, 'text', x))
'''
__all__ = ('DropDown', )
from kivy.uix.scrollview import ScrollView
from kivy.properties import ObjectProperty, NumericProperty, BooleanProperty
from kivy.core.window import Window
from kivy.lang import Builder
_grid_kv = '''
GridLayout:
size_hint_y: None
height: self.minimum_size[1]
cols: 1
'''
class DropDownException(Exception):
'''DropDownException class.
'''
pass
class DropDown(ScrollView):
'''DropDown class. See module documentation for more information.
:Events:
`on_select`: data
Fired when a selection is done. The data of the selection is passed
in as the first argument and is what you pass in the :meth:`select`
method as the first argument.
`on_dismiss`:
.. versionadded:: 1.8.0
Fired when the DropDown is dismissed, either on selection or on
touching outside the widget.
'''
auto_width = BooleanProperty(True)
'''By default, the width of the dropdown will be the same as the width of
the attached widget. Set to False if you want to provide your own width.
'''
max_height = NumericProperty(None, allownone=True)
'''Indicate the maximum height that the dropdown can take. If None, it will
take the maximum height available until the top or bottom of the screen
is reached.
:attr:`max_height` is a :class:`~kivy.properties.NumericProperty` and
defaults to None.
'''
dismiss_on_select = BooleanProperty(True)
'''By default, the dropdown will be automatically dismissed when a
selection has been done. Set to False to prevent the dismiss.
:attr:`dismiss_on_select` is a :class:`~kivy.properties.BooleanProperty`
and defaults to True.
'''
auto_dismiss = BooleanProperty(True)
'''By default, the dropdown will be automatically dismissed when a
touch happens outside of it, this option allow to disable this
feature
:attr:`auto_dismiss` is a :class:`~kivy.properties.BooleanProperty`
and defaults to True.
.. versionadded:: 1.8.0
'''
attach_to = ObjectProperty(allownone=True)
'''(internal) Property that will be set to the widget to which the
drop down list is attached.
The :meth:`open` method will automatically set this property whilst
:meth:`dismiss` will set it back to None.
'''
container = ObjectProperty()
'''(internal) Property that will be set to the container of the dropdown
list. It is a :class:`~kivy.uix.gridlayout.GridLayout` by default.
'''
__events__ = ('on_select', 'on_dismiss')
def __init__(self, **kwargs):
self._win = None
if 'container' not in kwargs:
c = self.container = Builder.load_string(_grid_kv)
else:
c = None
kwargs.setdefault('do_scroll_x', False)
if 'size_hint' not in kwargs:
kwargs.setdefault('size_hint_x', None)
kwargs.setdefault('size_hint_y', None)
super(DropDown, self).__init__(**kwargs)
if c is not None:
super(DropDown, self).add_widget(c)
self.on_container(self, c)
Window.bind(on_key_down=self.on_key_down)
self.fast_bind('size', self._reposition)
def on_key_down(self, instance, key, scancode, codepoint, modifiers):
if key == 27 and self.get_parent_window():
self.dismiss()
return True
def on_container(self, instance, value):
if value is not None:
self.container.bind(minimum_size=self._container_minimum_size)
def open(self, widget):
'''Open the dropdown list and attach it to a specific widget.
Depending on the position of the widget within the window and
the height of the dropdown, the dropdown might be above or below
that widget.
'''
# ensure we are not already attached
if self.attach_to is not None:
self.dismiss()
# we will attach ourself to the main window, so ensure the
# widget we are looking for have a window
self._win = widget.get_parent_window()
if self._win is None:
raise DropDownException(
'Cannot open a dropdown list on a hidden widget')
self.attach_to = widget
widget.bind(pos=self._reposition, size=self._reposition)
self._reposition()
# attach ourself to the main window
self._win.add_widget(self)
def dismiss(self, *largs):
'''Remove the dropdown widget from the window and detach it from
the attached widget.
'''
if self.parent:
self.parent.remove_widget(self)
if self.attach_to:
self.attach_to.unbind(pos=self._reposition, size=self._reposition)
self.attach_to = None
self.dispatch('on_dismiss')
def on_dismiss(self):
pass
def select(self, data):
'''Call this method to trigger the `on_select` event with the `data`
selection. The `data` can be anything you want.
'''
self.dispatch('on_select', data)
if self.dismiss_on_select:
self.dismiss()
def on_select(self, data):
pass
def _container_minimum_size(self, instance, size):
if self.max_height:
self.height = min(size[1], self.max_height)
self.do_scroll_y = size[1] > self.max_height
else:
self.height = size[1]
self.do_scroll_y = True
def add_widget(self, *largs):
if self.container:
return self.container.add_widget(*largs)
return super(DropDown, self).add_widget(*largs)
def remove_widget(self, *largs):
if self.container:
return self.container.remove_widget(*largs)
return super(DropDown, self).remove_widget(*largs)
def clear_widgets(self):
if self.container:
return self.container.clear_widgets()
return super(DropDown, self).clear_widgets()
def on_touch_down(self, touch):
if super(DropDown, self).on_touch_down(touch):
return True
if self.collide_point(*touch.pos):
return True
if self.attach_to and self.attach_to.collide_point(*touch.pos):
return True
if self.auto_dismiss:
self.dismiss()
def on_touch_up(self, touch):
if super(DropDown, self).on_touch_up(touch):
return True
if 'button' in touch.profile and touch.button.startswith('scroll'):
return
if self.auto_dismiss:
self.dismiss()
def _reposition(self, *largs):
# calculate the coordinate of the attached widget in the window
# coordinate system
win = self._win
widget = self.attach_to
if not widget or not win:
return
wx, wy = widget.to_window(*widget.pos)
wright, wtop = widget.to_window(widget.right, widget.top)
# set width and x
if self.auto_width:
self.width = wright - wx
# ensure the dropdown list doesn't get out on the X axis, with a
# preference to 0 in case the list is too wide.
x = wx
if x + self.width > win.width:
x = win.width - self.width
if x < 0:
x = 0
self.x = x
# determine if we display the dropdown upper or lower to the widget
h_bottom = wy - self.height
h_top = win.height - (wtop + self.height)
if h_bottom > 0:
self.top = wy
elif h_top > 0:
self.y = wtop
else:
# none of both top/bottom have enough place to display the
# widget at the current size. Take the best side, and fit to
# it.
height = max(h_bottom, h_top)
if height == h_bottom:
self.top = wy
self.height = wy
else:
self.y = wtop
self.height = win.height - wtop
if __name__ == '__main__':
from kivy.uix.button import Button
from kivy.base import runTouchApp
def show_dropdown(button, *largs):
dp = DropDown()
dp.bind(on_select=lambda instance, x: setattr(button, 'text', x))
for i in range(10):
item = Button(text='hello %d' % i, size_hint_y=None, height=44)
item.bind(on_release=lambda btn: dp.select(btn.text))
dp.add_widget(item)
dp.open(button)
def touch_move(instance, touch):
instance.center = touch.pos
btn = Button(text='SHOW', size_hint=(None, None), pos=(300, 200))
btn.bind(on_release=show_dropdown, on_touch_move=touch_move)
runTouchApp(btn)
| mit | 86,564,783,490,734,420 | 32.232955 | 80 | 0.617712 | false |
TeensyPass/teensycli | tests/test_gpg_data.py | 1 | 1391 | """
Tests the password gpg_data logic
"""
from os.path import join
from unittest.mock import patch, MagicMock
from teensy_pass.data.gpg_data import save, load, list_db, remove
@patch('teensy_pass.data.gpg_data.check_output')
@patch('teensy_pass.data.gpg_data.dumps')
@patch('builtins.open')
def test_save(open, dumps, check_output):
dumps.return_value = ''
save('foo', {'bar': 'foo'}, 'key', home='home')
check_output.assert_called_with("echo '' | gpg -ea -r key", shell=True)
open.assert_called_with(join('home', 'foo'), 'wb')
dumps.assert_called_with({'bar': 'foo'})
@patch('teensy_pass.data.gpg_data.check_output')
@patch('teensy_pass.data.gpg_data.loads')
@patch('builtins.open')
def test_load(open, loads, check_output):
loads.return_value = 'this is the end'
assert load('foo', home='home') == 'this is the end'
cmd = 'gpg --quiet --no-tty --decrypt {0}'.format(join('home', 'foo'))
check_output.assert_called_with(cmd, shell=True)
@patch('teensy_pass.data.gpg_data.walk')
def test_list_db(walk):
walk.return_value = [('foo', 'arg', ['file1', 'file2']),
('.', 'fdsf', ['file3', 'file4'])]
assert list_db(home='home') == 'foo/file1\nfoo/file2\nfile3\nfile4'
@patch('teensy_pass.data.gpg_data.os')
def test_remove(os):
remove('bar', home='foo')
os.remove.assert_called_with(join('foo', 'bar'))
| gpl-2.0 | 7,745,019,199,807,140,000 | 37.638889 | 75 | 0.640546 | false |
makeev/django-boilerplate | back/project/settings/settings.py | 1 | 5819 | import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
ROOT_DIR = os.path.dirname(BASE_DIR)
SITE_ID = 1
SECRET_KEY = '__FAKE__'
INTERNAL_IPS = [
'0.0.0.0',
'127.0.0.1',
]
ALLOWED_HOSTS = [
'0.0.0.0',
'127.0.0.1',
]
ADMINS = [
'admin@localhost',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(ROOT_DIR, 'db/db.sqlite3'),
'ATOMIC_REQUESTS': True,
'AUTOCOMMIT': True,
'CONN_MAX_AGE': 10,
'OPTIONS': {
'timeout': 10,
}
}
}
REDIS_HOST = 'localhost'
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '%s:6379' % REDIS_HOST,
'TIMEOUT': 3600 * 3,
'OPTIONS': {
'DB': 0,
'PASSWORD': '',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
},
},
'constance': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '%s:6379' % REDIS_HOST,
'TIMEOUT': 600,
},
}
INSTALLED_APPS = [
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.contenttypes',
'django.contrib.staticfiles',
'logentry_admin',
'rest_framework',
'constance',
'constance.backends.database',
'project',
'main',
'adminsortable2',
]
DEBUG = True
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.gzip.GZipMiddleware',
# 'debug_toolbar.middleware.DebugToolbarMiddleware',
'project.middleware.force_default_lang.ForceDefaultLangMiddleware',
]
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# 'project.context_processors.each_context',
],
'loaders': [
'django.template.loaders.app_directories.Loader',
'django.template.loaders.filesystem.Loader',
]
},
},
]
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder', # поиск статики в директориях приложений
]
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
MEDIA_ROOT = os.path.join(ROOT_DIR, 'www/media')
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(ROOT_DIR, 'www/static')
STATIC_URL = '/static/'
FIXTURE_DIRS = [BASE_DIR]
WSGI_APPLICATION = 'project.wsgi.application'
AUTH_PASSWORD_VALIDATORS = [
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': {'min_length': 6}},
{'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'},
]
# Argon2 is the winner of the 2015 Password Hashing Competition,
# a community organized open competition to select a next generation hashing algorithm.
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
]
DATABASE_ROUTERS = [
# 'project.routers.FiascoRouter',
# 'project.routers.Cbl4Router',
]
# custom user model
AUTH_USER_MODEL = 'main.User'
EMAIL_CONFIRMATION_SECRET = '__SECRET__'
EMAIL_CONFIRMATION_EXPIRE_DAYS = 3
# CSRF_USE_SESSIONS = False
# CSRF_COOKIE_NAME = 'a_csrf'
# CSRF_COOKIE_PATH = '/admin/'
# CSRF_COOKIE_HTTPONLY ???
# CSRF_COOKIE_SECURE = True
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
# 'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
# 'auth.authentication.RequestUserAuthentication',
),
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser'
),
'DEFAULT_RENDERER_CLASSES': (
'main.drf.CustomJSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
),
'EXCEPTION_HANDLER': 'main.drf.full_details_exception_handler',
'TEST_REQUEST_DEFAULT_FORMAT': 'json',
}
APPEND_SLASH = False
TIME_ZONE = 'Europe/Moscow'
USE_I18N = False
LANGUAGES = (
('en', 'English'),
)
LANGUAGE_CODE = 'en'
LOCALE_PATHS = (os.path.join(ROOT_DIR, 'locale'),)
USE_L10N = False
USE_TZ = True
DATE_FORMAT = 'd.m.Y'
SHORT_DATE_FORMAT = 'd.m.Y'
DATETIME_FORMAT = 'd.m.Y, H:i:s'
SHORT_DATETIME_FORMAT = 'd.m.Y, H:i'
TIME_FORMAT = 'H:i:s'
SHORT_TIME_FORMAT = 'H:i'
DEFAULT_FILE_STORAGE = 'project.helpers.services.ASCIIFileSystemStorage'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
DEFAULT_FROM_EMAIL = 'Default Noreply <[email protected]>'
| mit | -6,002,744,229,602,046,000 | 25.536697 | 109 | 0.646154 | false |
praw-dev/praw | praw/models/subreddits.py | 1 | 6211 | """Provide the Subreddits class."""
from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union
from warnings import warn
from ..const import API_PATH
from . import Subreddit
from .base import PRAWBase
from .listing.generator import ListingGenerator
from .util import stream_generator
if TYPE_CHECKING: # pragma: no cover
from ... import praw
class Subreddits(PRAWBase):
"""Subreddits is a Listing class that provides various subreddit lists."""
@staticmethod
def _to_list(subreddit_list):
return ",".join([str(x) for x in subreddit_list])
def default(
self, **generator_kwargs: Union[str, int, Dict[str, str]]
) -> Iterator["praw.models.Subreddit"]:
"""Return a :class:`.ListingGenerator` for default subreddits.
Additional keyword arguments are passed in the initialization of
:class:`.ListingGenerator`.
"""
return ListingGenerator(
self._reddit, API_PATH["subreddits_default"], **generator_kwargs
)
def gold(self, **generator_kwargs) -> Iterator["praw.models.Subreddit"]:
"""Alias for :meth:`.premium` to maintain backwards compatibility."""
warn(
"`subreddits.gold` has be renamed to `subreddits.premium`.",
category=DeprecationWarning,
stacklevel=2,
)
return self.premium(**generator_kwargs)
def premium(
self, **generator_kwargs: Union[str, int, Dict[str, str]]
) -> Iterator["praw.models.Subreddit"]:
"""Return a :class:`.ListingGenerator` for premium subreddits.
Additional keyword arguments are passed in the initialization of
:class:`.ListingGenerator`.
"""
return ListingGenerator(
self._reddit, API_PATH["subreddits_gold"], **generator_kwargs
)
def new(
self, **generator_kwargs: Union[str, int, Dict[str, str]]
) -> Iterator["praw.models.Subreddit"]:
"""Return a :class:`.ListingGenerator` for new subreddits.
Additional keyword arguments are passed in the initialization of
:class:`.ListingGenerator`.
"""
return ListingGenerator(
self._reddit, API_PATH["subreddits_new"], **generator_kwargs
)
def popular(
self, **generator_kwargs: Union[str, int, Dict[str, str]]
) -> Iterator["praw.models.Subreddit"]:
"""Return a :class:`.ListingGenerator` for popular subreddits.
Additional keyword arguments are passed in the initialization of
:class:`.ListingGenerator`.
"""
return ListingGenerator(
self._reddit, API_PATH["subreddits_popular"], **generator_kwargs
)
def recommended(
self,
subreddits: List[Union[str, "praw.models.Subreddit"]],
omit_subreddits: Optional[List[Union[str, "praw.models.Subreddit"]]] = None,
) -> List["praw.models.Subreddit"]:
"""Return subreddits recommended for the given list of subreddits.
:param subreddits: A list of Subreddit instances and/or subreddit names.
:param omit_subreddits: A list of Subreddit instances and/or subreddit names to
exclude from the results (Reddit's end may not work as expected).
"""
if not isinstance(subreddits, list):
raise TypeError("subreddits must be a list")
if omit_subreddits is not None and not isinstance(omit_subreddits, list):
raise TypeError("omit_subreddits must be a list or None")
params = {"omit": self._to_list(omit_subreddits or [])}
url = API_PATH["sub_recommended"].format(subreddits=self._to_list(subreddits))
return [
Subreddit(self._reddit, sub["sr_name"])
for sub in self._reddit.get(url, params=params)
]
def search(
self, query: str, **generator_kwargs: Union[str, int, Dict[str, str]]
) -> Iterator["praw.models.Subreddit"]:
"""Return a :class:`.ListingGenerator` of subreddits matching ``query``.
Subreddits are searched by both their title and description.
:param query: The query string to filter subreddits by.
Additional keyword arguments are passed in the initialization of
:class:`.ListingGenerator`.
.. seealso::
:meth:`~.search_by_name` to search by subreddit names
"""
self._safely_add_arguments(generator_kwargs, "params", q=query)
return ListingGenerator(
self._reddit, API_PATH["subreddits_search"], **generator_kwargs
)
def search_by_name(
self, query: str, include_nsfw: bool = True, exact: bool = False
) -> List["praw.models.Subreddit"]:
"""Return list of Subreddits whose names begin with ``query``.
:param query: Search for subreddits beginning with this string.
:param include_nsfw: Include subreddits labeled NSFW (default: True).
:param exact: Return only exact matches to ``query`` (default: False).
"""
result = self._reddit.post(
API_PATH["subreddits_name_search"],
data={"include_over_18": include_nsfw, "exact": exact, "query": query},
)
return [self._reddit.subreddit(x) for x in result["names"]]
def search_by_topic(self, query: str) -> List["praw.models.Subreddit"]:
"""Return list of Subreddits whose topics match ``query``.
:param query: Search for subreddits relevant to the search topic.
.. note::
As of 09/01/2020, this endpoint always returns 404.
"""
result = self._reddit.get(
API_PATH["subreddits_by_topic"], params={"query": query}
)
return [self._reddit.subreddit(x["name"]) for x in result if x.get("name")]
def stream(
self, **stream_options: Union[str, int, Dict[str, str]]
) -> Iterator["praw.models.Subreddit"]:
"""Yield new subreddits as they are created.
Subreddits are yielded oldest first. Up to 100 historical subreddits will
initially be returned.
Keyword arguments are passed to :func:`.stream_generator`.
"""
return stream_generator(self.new, **stream_options)
| bsd-2-clause | -2,262,619,277,718,906,000 | 35.321637 | 87 | 0.628884 | false |
chrys87/fenrir | src/fenrirscreenreader/commands/vmenu-profiles/KEY/nano/Help/about_nano.py | 1 | 1049 | #!/usr/bin/env python
# -*- encoding: utf-8
from fenrirscreenreader.core import debug
class command():
def __init__(self):
pass
def initialize(self, environment):
self.env = environment
self.keyMakro = [[1, 'KEY_LEFTCTRL'],
[1, 'KEY_G'],
[0.05, 'SLEEP'],
[0, 'KEY_G'],
[0, 'KEY_LEFTCTRL']]
def shutdown(self):
pass
def getDescription(self):
return "Learn about the Nano text editor."
def run(self):
self.env['runtime']['outputManager'].presentText(
"Okay, loading the information about Nano.", interrupt=True)
if self.env['runtime']['inputManager'].getShortcutType() in ['KEY']:
self.env['runtime']['inputManager'].sendKeys(self.keyMakro)
elif self.env['runtime']['inputManager'].getShortcutType() in ['BYTE']:
self.env['runtime']['byteManager'].sendBytes(self.byteMakro)
def setCallback(self, callback):
pass
| lgpl-3.0 | -2,095,288,501,178,641,700 | 30.787879 | 79 | 0.553861 | false |
jomyhuang/sdwle | SDWLE/agents/trade/possible_play.py | 1 | 6263 | from SDWLE.agents.trade.util import Util
from functools import reduce
class PossiblePlay:
def __init__(self, cards, available_mana):
if len(cards) == 0:
raise Exception("PossiblePlay cards is empty")
self.cards = cards
self.available_mana = available_mana
def card_mana(self):
def eff_mana(card):
if card.name == "The Coin":
return -1
else:
return card.mana_cost()
return reduce(lambda s, c: s + eff_mana(c), self.cards, 0)
def sorted_mana(self):
return Util.reverse_sorted(map(lambda c: c.mana_cost(), self.cards))
def wasted(self):
return self.available_mana - self.card_mana()
def value(self):
res = self.card_mana()
wasted = self.wasted()
if wasted < 0:
raise Exception("Too Much Mana")
res += wasted * -100000000000
factor = 100000000
for card_mana in self.sorted_mana():
res += card_mana * factor
factor = factor / 10
if self.has_hero_power() and self.available_mana < 6:
res -= 10000000000000000
if any(map(lambda c: c.name == "The Coin", self.cards)):
res -= 100
return res
def has_hero_power(self):
for card in self.cards:
if card.name == 'Hero Power':
return True
return False
def first_card(self):
if self.has_hero_power():
for card in self.cards:
if card.name == 'Hero Power':
return card
raise Exception("bad")
else:
return self.cards[0]
def __str__(self):
names = [c.name for c in self.cards]
s = str(names)
return "{} {}".format(s, self.value())
class CoinPlays:
def coin(self):
cards = [c for c in filter(lambda c: c.name == 'The Coin', self.cards)]
return cards[0]
def raw_plays_with_coin(self):
res = []
if self.has_coin():
coinPlays = self.after_coin().raw_plays()
for play in coinPlays:
cards = [self.coin()] + play
res.append(cards)
return res
def raw_plays(self):
res = []
for play in self.raw_plays_without_coin():
res.append(play)
for play in self.raw_plays_with_coin():
res.append(play)
return res
def has_coin(self):
return any(map(lambda c: c.name == "The Coin", self.cards))
def cards_without_coin(self):
return Util.filter_out_one(self.cards, lambda c: c.name == "The Coin")
def after_coin(self):
return PossiblePlays(self.cards_without_coin(), self.mana + 1)
def without_coin(self):
return PossiblePlays(self.cards_without_coin(), self.mana)
class HeroPowerCard:
def __init__(self):
self.mana = 2
self.name = "Hero Power"
self.player = None
def can_use(self, player, game):
return True
def mana_cost(self):
return 2
class PossiblePlays(CoinPlays):
def __init__(self, cards, mana, allow_hero_power=True):
self.cards = cards
self.mana = mana
self.allow_hero_power = allow_hero_power
def possible_is_pointless_coin(self, possible):
if len(possible) != 1 or possible[0].name != "The Coin":
return False
cards_playable_after_coin = [card for card in filter(lambda c: c.mana - 1 == self.mana, self.cards)]
return len(cards_playable_after_coin) == 0
def raw_plays_without_coin(self):
res = []
def valid_card(card):
saved_mana = card.player.mana
card.player.mana = self.mana
usable = card.can_use(card.player, card.player.game)
card.player.mana = saved_mana
return usable
possible = [card for card in
filter(valid_card, self.cards)]
if self.possible_is_pointless_coin(possible):
possible = []
if self.mana >= 2 and self.allow_hero_power:
possible.append(HeroPowerCard())
if len(possible) == 0:
return [[]]
for card in possible:
rest = self.cards[0:99999]
if card.name == 'Hero Power':
f_plays = PossiblePlays(rest,
self.mana - card.mana_cost(),
allow_hero_power=False).raw_plays()
else:
rest.remove(card)
f_plays = PossiblePlays(rest,
self.mana - card.mana_cost(),
allow_hero_power=self.allow_hero_power).raw_plays()
for following_play in f_plays:
combined = [card] + following_play
res.append(combined)
res = Util.uniq_by_sorted(res)
return res
def plays_inner(self):
res = [PossiblePlay(raw, self.mana) for raw in self.raw_plays() if len(raw) > 0]
res = sorted(res, key=PossiblePlay.value)
res.reverse()
return res
def plays(self):
return self.plays_inner()
def __str__(self):
res = []
for play in self.plays():
res.append(play.__str__())
return str.join("\n", res)
class PlayMixin:
def play_one_card(self, player):
if len(player.minions) == 7:
return
if player.game.game_ended:
return
allow_hero_power = (not player.hero.power.used) and player.hero.health > 2
plays = PossiblePlays(player.hand, player.mana, allow_hero_power=allow_hero_power).plays()
if len(plays) > 0:
play = plays[0]
if len(play.cards) == 0:
raise Exception("play has no cards")
card = play.first_card()
if card.name == 'Hero Power':
player.hero.power.use()
else:
self.last_card_played = card
player.game.play_card(card)
return card
def play_cards(self, player):
card = self.play_one_card(player)
if card:
self.play_cards(player)
| mit | -9,081,206,652,314,761,000 | 27.085202 | 108 | 0.531055 | false |
strummerTFIU/TFG-IsometricMaps | LAStools/ArcGIS_toolbox/scripts_production/las2demPro.py | 1 | 8098 | #
# las2demPro.py
#
# (c) 2013, martin isenburg - http://rapidlasso.com
# rapidlasso GmbH - fast tools to catch reality
#
# uses las2dem.exe to raster a folder of LiDAR files
#
# LiDAR input: LAS/LAZ/BIN/TXT/SHP/BIL/ASC/DTM
# raster output: BIL/ASC/IMG/TIF/DTM/PNG/JPG
#
# for licensing see http://lastools.org/LICENSE.txt
#
import sys, os, arcgisscripting, subprocess
def check_output(command,console):
if console == True:
process = subprocess.Popen(command)
else:
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
output,error = process.communicate()
returncode = process.poll()
return returncode,output
### create the geoprocessor object
gp = arcgisscripting.create(9.3)
### report that something is happening
gp.AddMessage("Starting las2dem production ...")
### get number of arguments
argc = len(sys.argv)
### report arguments (for debug)
#gp.AddMessage("Arguments:")
#for i in range(0, argc):
# gp.AddMessage("[" + str(i) + "]" + sys.argv[i])
### get the path to LAStools
lastools_path = os.path.dirname(os.path.dirname(os.path.dirname(sys.argv[0])))
### make sure the path does not contain spaces
if lastools_path.count(" ") > 0:
gp.AddMessage("Error. Path to .\\lastools installation contains spaces.")
gp.AddMessage("This does not work: " + lastools_path)
gp.AddMessage("This would work: C:\\software\\lastools")
sys.exit(1)
### complete the path to where the LAStools executables are
lastools_path = lastools_path + "\\bin"
### check if path exists
if os.path.exists(lastools_path) == False:
gp.AddMessage("Cannot find .\\lastools\\bin at " + lastools_path)
sys.exit(1)
else:
gp.AddMessage("Found " + lastools_path + " ...")
### create the full path to the las2dem executable
las2dem_path = lastools_path+"\\las2dem.exe"
### check if executable exists
if os.path.exists(lastools_path) == False:
gp.AddMessage("Cannot find las2dem.exe at " + las2dem_path)
sys.exit(1)
else:
gp.AddMessage("Found " + las2dem_path + " ...")
### create the command string for las2dem.exe
command = ['"'+las2dem_path+'"']
### maybe use '-verbose' option
if sys.argv[argc-1] == "true":
command.append("-v")
### counting up the arguments
c = 1
### add input LiDAR
wildcards = sys.argv[c+1].split()
for wildcard in wildcards:
command.append("-i")
command.append('"' + sys.argv[c] + "\\" + wildcard + '"')
c = c + 2
### maybe use a user-defined step size
if sys.argv[c] != "1":
command.append("-step")
command.append(sys.argv[c].replace(",","."))
c = c + 1
### maybe use a user-defined kill
if sys.argv[c] != "100":
command.append("-kill")
command.append(sys.argv[c].replace(",","."))
c = c + 1
### what should we raster
if sys.argv[c] == "slope":
command.append("-slope")
elif sys.argv[c] == "intensity":
command.append("-intensity")
elif sys.argv[c] == "rgb":
command.append("-rgb")
c = c + 1
### what should we output
if sys.argv[c] == "hillshade":
command.append("-hillshade")
elif sys.argv[c] == "gray ramp":
command.append("-gray")
elif sys.argv[c] == "false colors":
command.append("-false")
### do we have special lighting for hillshade
if sys.argv[c] == "hillshade":
if (sys.argv[c+1] != "north east") or (sys.argv[c+2] != "1 pm"):
command.append("-light")
if sys.argv[c+1] == "north":
command.append("0")
command.append("1.41421")
elif sys.argv[c+1] == "south":
command.append("0")
command.append("-1.41421")
elif sys.argv[c+1] == "east":
command.append("1.41421")
command.append("0")
elif sys.argv[c+1] == "west":
command.append("-1.41421")
command.append("0")
elif sys.argv[c+1] == "north east":
command.append("1")
command.append("1")
elif sys.argv[c+1] == "south east":
command.append("1")
command.append("-1")
elif sys.argv[c+1] == "north west":
command.append("-1")
command.append("1")
else: ### if sys.argv[c+1] == "south west"
command.append("-1")
command.append("-1")
if sys.argv[c+2] == "noon":
command.append("100")
elif sys.argv[c+2] == "1 pm":
command.append("2")
elif sys.argv[c+2] == "3 pm":
command.append("1")
elif sys.argv[c+2] == "6 pm":
command.append("0.5")
else: ### if sys.argv[c+2] == "9 pm"
command.append("0.1")
### do we have a min max value for colors
if (sys.argv[c] == "gray ramp") or (sys.argv[c] == "false colors"):
if (sys.argv[c+3] != "#") and (sys.argv[c+4] != "#"):
command.append("-set_min_max")
command.append(sys.argv[c+3].replace(",","."))
command.append(sys.argv[c+4].replace(",","."))
c = c + 5
### what should we triangulate
if sys.argv[c] == "ground points only":
command.append("-keep_class")
command.append("2")
command.append("-extra_pass")
elif sys.argv[c] == "ground and keypoints":
command.append("-keep_class")
command.append("2")
command.append("8")
command.append("-extra_pass")
elif sys.argv[c] == "ground and buildings":
command.append("-keep_class")
command.append("2")
command.append("6")
command.append("-extra_pass")
elif sys.argv[c] == "ground and vegetation":
command.append("-keep_class")
command.append("2")
command.append("3")
command.append("4")
command.append("5")
command.append("-extra_pass")
elif sys.argv[c] == "ground and objects":
command.append("-keep_class")
command.append("2")
command.append("3")
command.append("4")
command.append("5")
command.append("6")
command.append("-extra_pass")
elif sys.argv[c] == "last return only":
command.append("-last_only")
command.append("-extra_pass")
elif sys.argv[c] == "first return only":
command.append("-first_only")
command.append("-extra_pass")
c = c + 1
### should we use the tile bounding box
if sys.argv[c] == "true":
command.append("-use_tile_bb")
c = c + 1
### do we have lakes
if sys.argv[c] != "#":
command.append("-lakes")
command.append('"'+sys.argv[c]+'"')
c = c + 1
### do we have creeks
if sys.argv[c] != "#":
command.append("-creeks")
command.append('"'+sys.argv[c]+'"')
c = c + 1
### maybe an output format was selected
if sys.argv[c] != "#":
command.append("-o" + sys.argv[c])
c = c + 1
### maybe an output directory was selected
if sys.argv[c] != "#":
command.append("-odir")
command.append('"'+sys.argv[c]+'"')
c = c + 1
### maybe an output appendix was selected
if sys.argv[c] != "#":
command.append("-odix")
command.append('"'+sys.argv[c]+'"')
c = c + 1
### maybe we should run on multiple cores
if sys.argv[c] != "1":
command.append("-cores")
command.append(sys.argv[c])
c = c + 1
### maybe there are additional input options
if sys.argv[c] != "#":
additional_options = sys.argv[c].split()
for option in additional_options:
command.append(option)
### report command string
gp.AddMessage("LAStools command line:")
command_length = len(command)
command_string = str(command[0])
command[0] = command[0].strip('"')
for i in range(1, command_length):
command_string = command_string + " " + str(command[i])
command[i] = command[i].strip('"')
gp.AddMessage(command_string)
### run command
returncode,output = check_output(command, False)
### report output of las2dem
gp.AddMessage(str(output))
### check return code
if returncode != 0:
gp.AddMessage("Error. las2dem failed.")
sys.exit(1)
### report happy end
gp.AddMessage("Success. las2dem done.")
| mit | -6,832,320,602,952,765,000 | 28.104089 | 130 | 0.587059 | false |
mozilla/kuma | kuma/users/adapters.py | 1 | 13369 | from allauth.account.adapter import DefaultAccountAdapter, get_adapter
from allauth.account.models import EmailAddress
from allauth.account.utils import cleanup_email_addresses
from allauth.exceptions import ImmediateHttpResponse
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
from allauth.socialaccount.models import SocialLogin
from django import forms
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.db.models import Q
from django.shortcuts import redirect, render
from django.utils.cache import add_never_cache_headers
from django.utils.translation import ugettext_lazy as _
from waffle import switch_is_active
from kuma.core.urlresolvers import reverse
from .constants import USERNAME_CHARACTERS, USERNAME_REGEX
from .models import UserBan
REMOVE_BUG_URL = (
'https://bugzilla.mozilla.org/enter_bug.cgi?'
'&product=developer.mozilla.org'
'&component=User%20management'
'&short_desc=Account%20deletion%20request%20for%20[username]'
'&comment=Please%20delete%20my%20MDN%20account.%20My%20username%20is%3A'
'%0D%0A%0D%0A[username]'
'&status_whiteboard=[account-mod]'
'&defined_groups=1'
'&groups=mozilla-employee-confidential')
REMOVE_MESSAGE = _("Sorry, you must have at least one connected account so "
"you can sign in. To disconnect this account connect a "
"different one first. To delete your MDN profile please "
'<a href="%(bug_form_url)s" rel="nofollow">file a bug</a>.')
USERNAME_EMAIL = _('An email address cannot be used as a username.')
class KumaAccountAdapter(DefaultAccountAdapter):
def is_open_for_signup(self, request):
"""
We disable the signup with regular accounts as we require GitHub
(for now)
"""
return False
def clean_username(self, username):
"""
When signing up make sure the username isn't already used by
a different user, and doesn't contain invalid characters.
"""
# We have stricter username requirements than django-allauth,
# because we don't want to allow '@' in usernames. So we check
# that before calling super() to make sure we catch those
# problems and show our error messages.
if '@' in username:
raise forms.ValidationError(USERNAME_EMAIL)
if not USERNAME_REGEX.match(username):
raise forms.ValidationError(USERNAME_CHARACTERS)
username = super(KumaAccountAdapter, self).clean_username(username)
if get_user_model().objects.filter(username=username).exists():
raise forms.ValidationError(_('The username you entered '
'already exists.'))
return username
def message_templates(self, *names):
return tuple('messages/%s.txt' % name for name in names)
def add_message(self, request, level, message_template,
message_context={}, extra_tags='', *args, **kwargs):
"""
Adds an extra "account" tag to the success and error messages.
"""
# let's ignore some messages
if message_template.endswith(self.message_templates('logged_in',
'logged_out')):
return
# promote the "account_connected" message to success
if message_template.endswith(self.message_templates('account_connected')):
level = messages.SUCCESS
# when a next URL is set because of a multi step sign-in
# (e.g. sign-in with github, verified mail is found in other
# social accounts, agree to first log in with other to connect
# instead) and the next URL is not the edit profile page (which
# would indicate the start of the sign-in process from the edit
# profile page) we ignore the message "account connected" message
# as it would be misleading
# Bug 1229906#c2 - need from "create new account" page
user_url = reverse('users.user_edit',
kwargs={'username': request.user.username})
next_url = request.session.get('sociallogin_next_url', None)
if next_url != user_url:
return
# and add an extra tag to the account messages
extra_tag = 'account'
if extra_tags:
extra_tags += ' '
extra_tags += extra_tag
super(KumaAccountAdapter, self).add_message(request, level,
message_template,
message_context,
extra_tags,
*args, **kwargs)
def save_user(self, request, user, form, commit=True):
super(KumaAccountAdapter, self).save_user(request, user, form,
commit=False)
is_github_url_public = form.cleaned_data.get('is_github_url_public')
user.is_github_url_public = is_github_url_public
if commit: # pragma: no cover
# commit will be True, unless extended by a derived class
user.save()
return user
class KumaSocialAccountAdapter(DefaultSocialAccountAdapter):
def is_open_for_signup(self, request, sociallogin):
"""
We specifically enable social accounts as a way to signup
because the default adapter uses the account adpater above
as the default.
"""
allowed = True
if switch_is_active('registration_disabled'):
allowed = False
# bug 1291892: Don't confuse next login with connecting accounts
if not allowed:
for key in ('socialaccount_sociallogin', 'sociallogin_provider'):
try:
del request.session[key]
except KeyError: # pragma: no cover
pass
return allowed
def validate_disconnect(self, account, accounts):
"""
Validate whether or not the socialaccount account can be
safely disconnected.
"""
if len(accounts) == 1:
raise forms.ValidationError(REMOVE_MESSAGE %
{'bug_form_url': REMOVE_BUG_URL})
def pre_social_login(self, request, sociallogin):
"""
Invoked just after a user successfully authenticates via a
social provider, but before the login is actually processed.
We use it to:
1. Check if the user is connecting accounts via signup page
2. store the name of the socialaccount provider in the user's session.
TODO: When legacy Persona sessions are cleared (Nov 1 2016), this
function can be simplified.
"""
session_login_data = request.session.get('socialaccount_sociallogin', None)
request_login = sociallogin
# Is there already a sociallogin_provider in the session?
if session_login_data:
session_login = SocialLogin.deserialize(session_login_data)
# If the provider in the session is different from the provider in the
# request, the user is connecting a new provider to an existing account
if session_login.account.provider != request_login.account.provider:
# Does the request sociallogin match an existing user?
if not request_login.is_existing:
# go straight back to signup page with an error message
# BEFORE allauth over-writes the session sociallogin
level = messages.ERROR
message = "socialaccount/messages/account_not_found.txt"
get_adapter().add_message(request, level, message)
raise ImmediateHttpResponse(
redirect('socialaccount_signup')
)
# Is the user banned?
if sociallogin.is_existing:
bans = UserBan.objects.filter(user=sociallogin.user,
is_active=True)
if bans.exists():
banned_response = render(request, 'users/user_banned.html', {
'bans': bans,
'path': request.path
})
add_never_cache_headers(banned_response)
raise ImmediateHttpResponse(banned_response)
# sociallogin_provider is used in the UI to indicate what method was
# used to login to the website. The session variable
# 'socialaccount_sociallogin' has the same data, but will be dropped at
# the end of login.
request.session['sociallogin_provider'] = (sociallogin
.account.provider)
request.session.modified = True
def get_connect_redirect_url(self, request, socialaccount):
"""
Returns the default URL to redirect to after successfully
connecting a social account.
"""
assert request.user.is_authenticated
user_url = reverse('users.user_edit',
kwargs={'username': request.user.username})
return user_url
def save_user(self, request, sociallogin, form=None):
"""
Checks for an existing user (via verified email addresses within the
social login object) and, if one is found, associates the incoming
social account with that existing user instead of a new user.
It also removes the "socialaccount_sociallogin" key from the session.
If the "socialaccount_sociallogin" key remains in the session, then the
user will be unable to connect a second account unless they log out and
log in again. (TODO: Check if this part of the method is still
needed/used. I suspect not.)
"""
# We have to call get_existing_user() again. The result of the earlier
# call (within the is_auto_signup_allowed() method), can't be cached as
# an attribute on the instance because a different instance of this
# class is used when calling this method from the one used when calling
# is_auto_signup_allowed().
user = get_existing_user(sociallogin)
if user:
# We can re-use an existing user instead of creating a new one.
# Let's guarantee this user has an unusable password, just in case
# we're recovering an old user that has never had this done before.
user.set_unusable_password()
# This associates this new social account with the existing user.
sociallogin.connect(request, user)
# Since the "connect" call above does not add any email addresses
# from the social login that are missing from the user's current
# associated set, let's add them here.
add_user_email(request, user, sociallogin.email_addresses)
# Now that we've successfully associated a GitHub/Google social
# account with this existing user, let's delete all of the user's
# associated Persona social accounts (if any). Users may have
# multiple associated Persona social accounts (each identified
# by a unique email address).
user.socialaccount_set.filter(provider='persona').delete()
else:
user = super().save_user(request, sociallogin, form)
try:
del request.session['socialaccount_sociallogin']
except KeyError: # pragma: no cover
pass
return user
def is_auto_signup_allowed(self, request, sociallogin):
"""
We allow "auto-signup" (basically skipping the sign-up form page) only
if there is an existing user that we can re-use instead of creating
a new one.
"""
return bool(get_existing_user(sociallogin))
def get_existing_user(sociallogin):
"""
Attempts to find an existing user that is associated with a verified email
address that matches one of the verified email addresses within the
"sociallogin" object.
"""
emails = Q()
for email_address in sociallogin.email_addresses:
if email_address.verified:
emails |= Q(emailaddress__email=email_address.email)
if emails:
# Users can have multiple associated EmailAddress objects, so
# let's use "distinct()" to remove any duplicate users.
users = list(get_user_model().objects
.filter(emails,
emailaddress__verified=True)
.distinct())
# For now, we're only going to return a user if there's only one.
if len(users) == 1:
return users[0]
return None
def add_user_email(request, user, addresses):
"""
This is based on allauth.account.utils.setup_user_email, but targets
the addition of email-address objects to an existing user.
"""
for a in cleanup_email_addresses(request, addresses)[0]:
if not EmailAddress.objects.filter(user=user, email=a.email).exists():
a.user = user
a.save()
| mpl-2.0 | -4,814,583,395,379,713,000 | 43.862416 | 83 | 0.612686 | false |
saeki-masaki/cinder | cinder/backup/api.py | 1 | 15662 | # Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2014 TrilioData, Inc
# Copyright (c) 2015 EMC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Handles all requests relating to the volume backups service.
"""
from eventlet import greenthread
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from cinder.backup import rpcapi as backup_rpcapi
from cinder import context
from cinder.db import base
from cinder import exception
from cinder.i18n import _, _LI, _LW
import cinder.policy
from cinder import quota
from cinder import utils
import cinder.volume
from cinder.volume import utils as volume_utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
QUOTAS = quota.QUOTAS
def check_policy(context, action):
target = {
'project_id': context.project_id,
'user_id': context.user_id,
}
_action = 'backup:%s' % action
cinder.policy.enforce(context, _action, target)
class API(base.Base):
"""API for interacting with the volume backup manager."""
def __init__(self, db_driver=None):
self.backup_rpcapi = backup_rpcapi.BackupAPI()
self.volume_api = cinder.volume.API()
super(API, self).__init__(db_driver)
def get(self, context, backup_id):
check_policy(context, 'get')
rv = self.db.backup_get(context, backup_id)
return dict(rv)
def delete(self, context, backup_id):
"""Make the RPC call to delete a volume backup."""
check_policy(context, 'delete')
backup = self.get(context, backup_id)
if backup['status'] not in ['available', 'error']:
msg = _('Backup status must be available or error')
raise exception.InvalidBackup(reason=msg)
# Don't allow backup to be deleted if there are incremental
# backups dependent on it.
deltas = self.get_all(context, {'parent_id': backup['id']})
if deltas and len(deltas):
msg = _('Incremental backups exist for this backup.')
raise exception.InvalidBackup(reason=msg)
self.db.backup_update(context, backup_id, {'status': 'deleting'})
self.backup_rpcapi.delete_backup(context,
backup['host'],
backup['id'])
def get_all(self, context, search_opts=None):
if search_opts is None:
search_opts = {}
check_policy(context, 'get_all')
if context.is_admin:
backups = self.db.backup_get_all(context, filters=search_opts)
else:
backups = self.db.backup_get_all_by_project(context,
context.project_id,
filters=search_opts)
return backups
def _is_backup_service_enabled(self, volume, volume_host):
"""Check if there is a backup service available."""
topic = CONF.backup_topic
ctxt = context.get_admin_context()
services = self.db.service_get_all_by_topic(ctxt,
topic,
disabled=False)
for srv in services:
if (srv['availability_zone'] == volume['availability_zone'] and
srv['host'] == volume_host and
utils.service_is_up(srv)):
return True
return False
def _list_backup_services(self):
"""List all enabled backup services.
:returns: list -- hosts for services that are enabled for backup.
"""
topic = CONF.backup_topic
ctxt = context.get_admin_context()
services = self.db.service_get_all_by_topic(ctxt, topic)
return [srv['host'] for srv in services if not srv['disabled']]
def create(self, context, name, description, volume_id,
container, incremental=False, availability_zone=None):
"""Make the RPC call to create a volume backup."""
check_policy(context, 'create')
volume = self.volume_api.get(context, volume_id)
if volume['status'] != "available":
msg = (_('Volume to be backed up must be available '
'but the current status is "%s".')
% volume['status'])
raise exception.InvalidVolume(reason=msg)
volume_host = volume_utils.extract_host(volume['host'], 'host')
if not self._is_backup_service_enabled(volume, volume_host):
raise exception.ServiceNotFound(service_id='cinder-backup')
# do quota reserver before setting volume status and backup status
try:
reserve_opts = {'backups': 1,
'backup_gigabytes': volume['size']}
reservations = QUOTAS.reserve(context, **reserve_opts)
except exception.OverQuota as e:
overs = e.kwargs['overs']
usages = e.kwargs['usages']
quotas = e.kwargs['quotas']
def _consumed(resource_name):
return (usages[resource_name]['reserved'] +
usages[resource_name]['in_use'])
for over in overs:
if 'gigabytes' in over:
msg = _LW("Quota exceeded for %(s_pid)s, tried to create "
"%(s_size)sG backup (%(d_consumed)dG of "
"%(d_quota)dG already consumed)")
LOG.warning(msg, {'s_pid': context.project_id,
's_size': volume['size'],
'd_consumed': _consumed(over),
'd_quota': quotas[over]})
raise exception.VolumeBackupSizeExceedsAvailableQuota(
requested=volume['size'],
consumed=_consumed('backup_gigabytes'),
quota=quotas['backup_gigabytes'])
elif 'backups' in over:
msg = _LW("Quota exceeded for %(s_pid)s, tried to create "
"backups (%(d_consumed)d backups "
"already consumed)")
LOG.warning(msg, {'s_pid': context.project_id,
'd_consumed': _consumed(over)})
raise exception.BackupLimitExceeded(
allowed=quotas[over])
# Find the latest backup of the volume and use it as the parent
# backup to do an incremental backup.
latest_backup = None
if incremental:
backups = self.db.backup_get_all_by_volume(context.elevated(),
volume_id)
if backups:
latest_backup = max(backups, key=lambda x: x['created_at'])
else:
msg = _('No backups available to do an incremental backup.')
raise exception.InvalidBackup(reason=msg)
parent_id = None
if latest_backup:
parent_id = latest_backup['id']
if latest_backup['status'] != "available":
msg = _('The parent backup must be available for '
'incremental backup.')
raise exception.InvalidBackup(reason=msg)
self.db.volume_update(context, volume_id, {'status': 'backing-up'})
options = {'user_id': context.user_id,
'project_id': context.project_id,
'display_name': name,
'display_description': description,
'volume_id': volume_id,
'status': 'creating',
'container': container,
'parent_id': parent_id,
'size': volume['size'],
'host': volume_host, }
try:
backup = self.db.backup_create(context, options)
QUOTAS.commit(context, reservations)
except Exception:
with excutils.save_and_reraise_exception():
try:
self.db.backup_destroy(context, backup['id'])
finally:
QUOTAS.rollback(context, reservations)
# TODO(DuncanT): In future, when we have a generic local attach,
# this can go via the scheduler, which enables
# better load balancing and isolation of services
self.backup_rpcapi.create_backup(context,
backup['host'],
backup['id'],
volume_id)
return backup
def restore(self, context, backup_id, volume_id=None):
"""Make the RPC call to restore a volume backup."""
check_policy(context, 'restore')
backup = self.get(context, backup_id)
if backup['status'] != 'available':
msg = _('Backup status must be available')
raise exception.InvalidBackup(reason=msg)
size = backup['size']
if size is None:
msg = _('Backup to be restored has invalid size')
raise exception.InvalidBackup(reason=msg)
# Create a volume if none specified. If a volume is specified check
# it is large enough for the backup
if volume_id is None:
name = 'restore_backup_%s' % backup_id
description = 'auto-created_from_restore_from_backup'
LOG.info(_LI("Creating volume of %(size)s GB for restore of "
"backup %(backup_id)s"),
{'size': size, 'backup_id': backup_id},
context=context)
volume = self.volume_api.create(context, size, name, description)
volume_id = volume['id']
while True:
volume = self.volume_api.get(context, volume_id)
if volume['status'] != 'creating':
break
greenthread.sleep(1)
else:
volume = self.volume_api.get(context, volume_id)
if volume['status'] != "available":
msg = _('Volume to be restored to must be available')
raise exception.InvalidVolume(reason=msg)
LOG.debug('Checking backup size %(bs)s against volume size %(vs)s',
{'bs': size, 'vs': volume['size']})
if size > volume['size']:
msg = (_('volume size %(volume_size)d is too small to restore '
'backup of size %(size)d.') %
{'volume_size': volume['size'], 'size': size})
raise exception.InvalidVolume(reason=msg)
LOG.info(_LI("Overwriting volume %(volume_id)s with restore of "
"backup %(backup_id)s"),
{'volume_id': volume_id, 'backup_id': backup_id},
context=context)
# Setting the status here rather than setting at start and unrolling
# for each error condition, it should be a very small window
self.db.backup_update(context, backup_id, {'status': 'restoring'})
self.db.volume_update(context, volume_id, {'status':
'restoring-backup'})
volume_host = volume_utils.extract_host(volume['host'], 'host')
self.backup_rpcapi.restore_backup(context,
volume_host,
backup['id'],
volume_id)
d = {'backup_id': backup_id,
'volume_id': volume_id, }
return d
def reset_status(self, context, backup_id, status):
"""Make the RPC call to reset a volume backup's status.
Call backup manager to execute backup status reset operation.
:param context: running context
:param backup_id: which backup's status to be reset
:parma status: backup's status to be reset
:raises: InvalidBackup
"""
# get backup info
backup = self.get(context, backup_id)
# send to manager to do reset operation
self.backup_rpcapi.reset_status(ctxt=context, host=backup['host'],
backup_id=backup_id, status=status)
def export_record(self, context, backup_id):
"""Make the RPC call to export a volume backup.
Call backup manager to execute backup export.
:param context: running context
:param backup_id: backup id to export
:returns: dictionary -- a description of how to import the backup
:returns: contains 'backup_url' and 'backup_service'
:raises: InvalidBackup
"""
check_policy(context, 'backup-export')
backup = self.get(context, backup_id)
if backup['status'] != 'available':
msg = (_('Backup status must be available and not %s.') %
backup['status'])
raise exception.InvalidBackup(reason=msg)
LOG.debug("Calling RPCAPI with context: "
"%(ctx)s, host: %(host)s, backup: %(id)s.",
{'ctx': context,
'host': backup['host'],
'id': backup['id']})
export_data = self.backup_rpcapi.export_record(context,
backup['host'],
backup['id'])
return export_data
def import_record(self, context, backup_service, backup_url):
"""Make the RPC call to import a volume backup.
:param context: running context
:param backup_service: backup service name
:param backup_url: backup description to be used by the backup driver
:raises: InvalidBackup
:raises: ServiceNotFound
"""
check_policy(context, 'backup-import')
# NOTE(ronenkat): since we don't have a backup-scheduler
# we need to find a host that support the backup service
# that was used to create the backup.
# We send it to the first backup service host, and the backup manager
# on that host will forward it to other hosts on the hosts list if it
# cannot support correct service itself.
hosts = self._list_backup_services()
if len(hosts) == 0:
raise exception.ServiceNotFound(service_id=backup_service)
options = {'user_id': context.user_id,
'project_id': context.project_id,
'volume_id': '0000-0000-0000-0000',
'status': 'creating', }
backup = self.db.backup_create(context, options)
first_host = hosts.pop()
self.backup_rpcapi.import_record(context,
first_host,
backup['id'],
backup_service,
backup_url,
hosts)
return backup
| apache-2.0 | 5,188,271,449,334,529,000 | 40.989276 | 78 | 0.541885 | false |
cilium-team/cilium | Documentation/conf.py | 1 | 7929 | # -*- coding: utf-8 -*-
#
# Cilium documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 12 18:34:43 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
# import sys
import re
import subprocess
import semver
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
html_logo = "images/logo.svg"
extensions = ['sphinx.ext.ifconfig',
'sphinx.ext.githubpages',
'sphinx.ext.extlinks',
'sphinxcontrib.openapi',
'sphinx_tabs.tabs',
'sphinxcontrib.spelling',
'versionwarning.extension']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Cilium'
copyright = u'2017-2020, Cilium Authors'
author = u'Cilium Authors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
release = open("../VERSION", "r").read().strip()
# Used by version warning
versionwarning_body_selector = "div.document"
# The version of Go used to compile Cilium
go_release = open("../GO_VERSION", "r").read().strip()
# The image tag for Cilium docker images
image_tag = 'v' + release
# Fetch the docs version from an environment variable.
# Map latest -> master.
# Map stable -> current version number.
branch = os.environ.get('READTHEDOCS_VERSION')
if not branch or branch == 'latest':
branch = 'HEAD'
archive_name = 'master'
chart_release = './cilium'
image_tag = 'latest'
elif branch == 'stable':
branch = release
archive_name = release
chart_release = 'cilium/cilium --version ' + release
tags.add('stable')
else:
archive_name = branch
chart_release = 'cilium/cilium --version ' + release
tags.add('stable')
relinfo = semver.parse_version_info(release)
current_release = '%d.%d' % (relinfo.major, relinfo.minor)
if relinfo.patch == 90:
next_release = '%d.%d' % (relinfo.major, relinfo.minor + 1)
else:
next_release = current_release
githubusercontent = 'https://raw.githubusercontent.com/cilium/cilium/'
scm_web = githubusercontent + branch
jenkins_branch = 'https://jenkins.cilium.io/view/Cilium-v' + current_release
archive_filename = archive_name + '.tar.gz'
archive_link = 'https://github.com/cilium/cilium/archive/' + archive_filename
archive_name = 'cilium-' + archive_name.strip('v')
project_link = 'https://github.com/cilium/cilium/projects?query=is:open+' + next_release
backport_format = 'https://github.com/cilium/cilium/pulls?q=is:open+is:pr+label:%s/' + current_release
# Store variables in the epilogue so they are globally available.
rst_epilog = """
.. |SCM_WEB| replace:: \{s}
.. |SCM_BRANCH| replace:: \{b}
.. |SCM_ARCHIVE_NAME| replace:: \{a}
.. |SCM_ARCHIVE_FILENAME| replace:: \{f}
.. |SCM_ARCHIVE_LINK| replace:: \{l}
.. |CURRENT_RELEASE| replace:: \{c}
.. |NEXT_RELEASE| replace:: \{n}
.. |CHART_RELEASE| replace:: \{h}
.. |GO_RELEASE| replace:: \{g}
.. |IMAGE_TAG| replace:: \{i}
""".format(s=scm_web, b=branch, a=archive_name, f=archive_filename, l=archive_link, c=current_release, n=next_release, h=chart_release, g=go_release, i=image_tag)
extlinks = {
'git-tree': (scm_web + "/%s", ''),
'jenkins-branch': (jenkins_branch + "/%s", ''),
'github-project': (project_link + '%s', ''),
'github-backport': (backport_format, ''),
}
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
if os.uname()[4] == "aarch64":
html_theme = "sphinx_rtd_theme"
else:
html_theme = "sphinx_rtd_theme_cilium"
html_context = {
'release': release
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'logo_only': True
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['images', '_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Ciliumdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
'extraclassoptions': 'openany',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Cilium.tex', u'Cilium Documentation',
u'Cilium Authors', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'cilium', u'Cilium Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Cilium', u'Cilium Documentation',
author, 'Cilium', 'One line description of project.',
'Miscellaneous'),
]
http_strict_mode = False
# Try as hard as possible to find references
default_role = 'any'
def setup(app):
app.add_stylesheet('parsed-literal.css')
app.add_stylesheet('copybutton.css')
app.add_javascript('clipboardjs.min.js')
app.add_javascript("copybutton.js")
| apache-2.0 | 733,843,290,874,148,100 | 31.101215 | 162 | 0.667423 | false |
josiahcarlson/rpqueue | setup.py | 1 | 1133 | #!/usr/bin/env python
from distutils.core import setup
try:
with open('README.rst') as f:
long_description = f.read()
except IOError:
long_description = ''
setup(
name='rpqueue',
version=open('VERSION').read(),
description='Use Redis as a priority-enabled and time-based task queue.',
author='Josiah Carlson',
author_email='[email protected]',
url='https://github.com/josiahcarlson/rpqueue',
packages=['rpqueue'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='GNU LGPL v2.1',
long_description=long_description,
requires=['redis'],
)
| lgpl-2.1 | -6,295,639,616,045,155,000 | 32.323529 | 89 | 0.625772 | false |
vponomaryov/rally | tests/unit/plugins/openstack/wrappers/test_keystone.py | 1 | 8687 | # Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import exceptions
import mock
from rally.plugins.openstack.wrappers import keystone
from tests.unit import test
class KeystoneWrapperTestBase(object):
def test_list_services(self):
service = mock.MagicMock()
service.id = "fake_id"
service.name = "Foobar"
service.extra_field = "extra_field"
self.client.services.list.return_value = [service]
result = list(self.wrapped_client.list_services())
self.assertEqual([("fake_id", "Foobar")], result)
self.assertEqual("fake_id", result[0].id)
self.assertEqual("Foobar", result[0].name)
self.assertFalse(hasattr(result[0], "extra_field"))
def test_wrap(self):
client = mock.MagicMock()
client.version = "dummy"
self.assertRaises(NotImplementedError, keystone.wrap, client)
def test_delete_service(self):
self.wrapped_client.delete_service("fake_id")
self.client.services.delete.assert_called_once_with("fake_id")
def test_list_roles(self):
role = mock.MagicMock()
role.id = "fake_id"
role.name = "Foobar"
role.extra_field = "extra_field"
self.client.roles.list.return_value = [role]
result = list(self.wrapped_client.list_roles())
self.assertEqual([("fake_id", "Foobar")], result)
self.assertEqual("fake_id", result[0].id)
self.assertEqual("Foobar", result[0].name)
self.assertFalse(hasattr(result[0], "extra_field"))
def test_delete_role(self):
self.wrapped_client.delete_role("fake_id")
self.client.roles.delete.assert_called_once_with("fake_id")
class KeystoneV2WrapperTestCase(test.TestCase, KeystoneWrapperTestBase):
def setUp(self):
super(KeystoneV2WrapperTestCase, self).setUp()
self.client = mock.MagicMock()
self.client.version = "v2.0"
self.wrapped_client = keystone.wrap(self.client)
def test_create_project(self):
self.wrapped_client.create_project("Foobar")
self.client.tenants.create.assert_called_once_with("Foobar")
def test_create_project_in_non_default_domain_fail(self):
self.assertRaises(
NotImplementedError, self.wrapped_client.create_project,
"Foobar", "non-default-domain")
def test_delete_project(self):
self.wrapped_client.delete_project("fake_id")
self.client.tenants.delete.assert_called_once_with("fake_id")
def test_list_projects(self):
tenant = mock.MagicMock()
tenant.id = "fake_id"
tenant.name = "Foobar"
tenant.extra_field = "extra_field"
self.client.tenants.list.return_value = [tenant]
result = list(self.wrapped_client.list_projects())
self.assertEqual([("fake_id", "Foobar", "default")], result)
self.assertEqual("fake_id", result[0].id)
self.assertEqual("Foobar", result[0].name)
self.assertEqual("default", result[0].domain_id)
self.assertFalse(hasattr(result[0], "extra_field"))
def test_create_user(self):
self.wrapped_client.create_user("foo", "bar", email="[email protected]",
project_id="tenant_id",
domain_name="default")
self.client.users.create.assert_called_once_with(
"foo", "bar", "[email protected]", "tenant_id")
def test_create_user_in_non_default_domain_fail(self):
self.assertRaises(
NotImplementedError, self.wrapped_client.create_user,
"foo", "bar", email="[email protected]", project_id="tenant_id",
domain_name="non-default-domain")
def test_delete_user(self):
self.wrapped_client.delete_user("fake_id")
self.client.users.delete.assert_called_once_with("fake_id")
def test_list_users(self):
user = mock.MagicMock()
user.id = "fake_id"
user.name = "foo"
user.tenantId = "tenant_id"
user.extra_field = "extra_field"
self.client.users.list.return_value = [user]
result = list(self.wrapped_client.list_users())
self.assertEqual([("fake_id", "foo", "tenant_id", "default")], result)
self.assertEqual("fake_id", result[0].id)
self.assertEqual("foo", result[0].name)
self.assertEqual("tenant_id", result[0].project_id)
self.assertEqual("default", result[0].domain_id)
self.assertFalse(hasattr(result[0], "extra_field"))
class KeystoneV3WrapperTestCase(test.TestCase, KeystoneWrapperTestBase):
def setUp(self):
super(KeystoneV3WrapperTestCase, self).setUp()
self.client = mock.MagicMock()
self.client.version = "v3"
self.wrapped_client = keystone.wrap(self.client)
self.client.domains.get.side_effect = exceptions.NotFound
self.client.domains.list.return_value = [
mock.MagicMock(id="domain_id")]
def test_create_project(self):
self.wrapped_client.create_project("Foobar", "domain")
self.client.projects.create.assert_called_once_with(
name="Foobar", domain="domain_id")
def test_create_project_with_non_existing_domain_fail(self):
self.client.domains.list.return_value = []
self.assertRaises(exceptions.NotFound,
self.wrapped_client.create_project,
"Foobar", "non-existing-domain")
def test_delete_project(self):
self.wrapped_client.delete_project("fake_id")
self.client.projects.delete.assert_called_once_with("fake_id")
def test_list_projects(self):
project = mock.MagicMock()
project.id = "fake_id"
project.name = "Foobar"
project.domain_id = "domain_id"
project.extra_field = "extra_field"
self.client.projects.list.return_value = [project]
result = list(self.wrapped_client.list_projects())
self.assertEqual([("fake_id", "Foobar", "domain_id")], result)
self.assertEqual("fake_id", result[0].id)
self.assertEqual("Foobar", result[0].name)
self.assertEqual("domain_id", result[0].domain_id)
self.assertFalse(hasattr(result[0], "extra_field"))
def test_create_user(self):
fake_role = mock.MagicMock(id="fake_role_id")
fake_role.name = "__member__"
self.client.roles.list.return_value = [fake_role]
self.client.users.create.return_value = mock.MagicMock(
id="fake_user_id")
self.wrapped_client.create_user(
"foo", "bar", email="[email protected]",
project_id="project_id", domain_name="domain")
self.client.users.create.assert_called_once_with(
name="foo", password="bar",
email="[email protected]", default_project="project_id",
domain="domain_id")
def test_create_user_with_non_existing_domain_fail(self):
self.client.domains.list.return_value = []
self.assertRaises(exceptions.NotFound,
self.wrapped_client.create_user, "foo", "bar",
email="[email protected]", project_id="project_id",
domain_name="non-existing-domain")
def test_delete_user(self):
self.wrapped_client.delete_user("fake_id")
self.client.users.delete.assert_called_once_with("fake_id")
def test_list_users(self):
user = mock.MagicMock()
user.id = "fake_id"
user.name = "foo"
user.default_project_id = "project_id"
user.domain_id = "domain_id"
user.extra_field = "extra_field"
self.client.users.list.return_value = [user]
result = list(self.wrapped_client.list_users())
self.assertEqual([("fake_id", "foo", "project_id", "domain_id")],
result)
self.assertEqual("fake_id", result[0].id)
self.assertEqual("foo", result[0].name)
self.assertEqual("project_id", result[0].project_id)
self.assertEqual("domain_id", result[0].domain_id)
self.assertFalse(hasattr(result[0], "extra_field"))
| apache-2.0 | -867,412,063,852,557,000 | 40.764423 | 78 | 0.626453 | false |
xbcsmith/frell | test/json_obj.py | 1 | 1900 | import logging
log = logging.getLogger()
logging.basicConfig(level=logging.DEBUG)
class StorageObject(object):
def __init__(self, storage):
self._dict = storage
def get(self, fieldName):
return self._dict.get(fieldName)
def set(self, fieldName, value):
return self._dict.__setitem__(fieldName, value)
class Field(object):
def __init__(self, fieldName, fieldType):
self.fieldName = fieldName
self.fieldType = fieldType
def __get__(self, instance, owner):
log.debug("Calling __get__ for %s", self.fieldName)
val = instance._dict.get(self.fieldName)
if issubclass(self.fieldType, (int, str)):
return val
if val is None:
val = instance._dict[self.fieldName] = {}
return self.fieldType(val)
def __set__(self, instance, value):
log.debug("Calling __set__ for %s", self.fieldName)
if isinstance(instance, StorageObject):
return instance.set(self.fieldName, value)
instance._dict[self.fieldName] = value
class Location(StorageObject):
city = Field('city', str)
zip = Field('zip', str)
class User(StorageObject):
username = Field('username', str)
uid = Field('uid', int)
location = Field('location', Location)
class JsonObject(StorageObject):
tag = Field('tag', str)
created_by = Field('created_by', User)
modified_by = Field('modified_by', User)
j = JsonObject({'a' : 1, 'created_by' : {'username' : 'miiban', 'uid' : 500}})
print "Created by:", j.created_by
print "Modified by:", j.modified_by
print "Modified by username:", j.modified_by.username
j.modified_by.username = 'bcsmit'
j.modified_by.uid= 501
print "Modified by username:", j.modified_by.username
print "Modified by zip:", j.modified_by.location.zip
j.modified_by.location.zip = 27511
print "Modified by zip:", j.modified_by.location.zip
| apache-2.0 | 8,738,986,348,364,620,000 | 31.20339 | 78 | 0.647895 | false |
Arkapravo/morse-0.6 | src/morse/sensors/stereo_unit.py | 1 | 2329 | import logging; logger = logging.getLogger("morse." + __name__)
from morse.core.services import async_service
import morse.core.sensor
import bge
from functools import partial
class StereoUnitClass(morse.core.sensor.MorseSensorClass):
""" Base for stereo pairs
It is used to link two camera objects, and export the images
as a stereo pair.
"""
def __init__(self, obj, parent=None):
""" Constructor method.
Receives the reference to the Blender object.
The second parameter should be the name of the object's parent.
"""
logger.info('%s initialization' % obj.name)
# Call the constructor of the parent class
super(self.__class__,self).__init__(obj, parent)
self.num_cameras = 0
self.camera_list = []
# Create a list of the cameras attached to this component
for child in obj.children:
# Skip this object if it is not a component
# It is most likely just a geometric shape object
try:
child['Component_Tag']
except KeyError as detail:
continue
camera_name = child.name
# Store only the name of the camera
# All data from the camera can be accessed later
# by using bge.logic.componentDict[camera_name],
# which will return the instance of the camera object
self.camera_list.append(camera_name)
self.num_cameras += 1
logger.info("Stereo Unit has %d cameras" % self.num_cameras)
logger.info('Component initialized')
def capture_completion(self, answer):
self._expected_answer-= 1
if self._expected_answer == 0:
status, res = answer
self.completed(status, res)
def interrupt(self):
for camera in self.camera_list:
camera_instance = bge.logic.componentDict[camera]
camera_instance.interrupt()
@async_service
def capture(self, n):
self._expected_answer = self.num_cameras
for camera in self.camera_list:
camera_instance = bge.logic.componentDict[camera]
camera_instance.capture(partial(self.capture_completion), n)
def default_action(self):
""" Main function of this component. """
pass
| bsd-3-clause | -2,536,339,978,277,546,000 | 34.287879 | 72 | 0.617003 | false |
SumiTomohiko/Yog | tests/test_dict.py | 1 | 2284 | # -*- coding: utf-8 -*-
from testcase import TestCase
class TestDict(TestCase):
def test_literal0(self):
self._test("""
d = {}
puts(d.size)
""", """0
""")
def test_literal5(self):
self._test("""
d = { 42: 26 }
puts(d[42])
""", """26
""")
def test_literal10(self):
self._test("""
d = { 42: 26, }
puts(d[42])
""", """26
""")
def test_literal20(self):
self._test("""
d = { 42: 26, "foo": "bar" }
puts(d[42])
""", """26
""")
def test_literal30(self):
self._test("""
d = { 'foo: "bar" }
puts(d['foo])
""", """bar
""")
def test_dict0(self):
self._test("""
def foo(d)
puts(d[42])
end
d = Dict.new()
d[42] = 26
foo(d)
""", """26
""")
def test_dict10(self):
self._test("""
def foo(d)
puts(d[4611686018427387904])
end
d = Dict.new()
d[4611686018427387904] = 42
foo(d)
""", """42
""")
def test_dict20(self):
self._test("""
def foo(d)
puts(d["foo"])
end
d = Dict.new()
d["foo"] = 42
foo(d)
""", """42
""")
def test_KeyError0(self):
def test_stderr(stderr):
self._test_regexp(r"""Traceback \(most recent call last\):
File "[^"]+", line 3, in <package>
KeyError: .*
""", stderr)
self._test("""
d = Dict.new()
puts(d["foo"])
""", stderr=test_stderr)
def test_add0(self):
self._test("""
print(({} + {}).size)
""", "0")
def test_add10(self):
self._test("""
print(({ 'foo: 42 } + { 'bar: 26 }).size)
""", "2")
def test_add20(self):
self._test("""
print(({ 'foo: 42 } + { 'bar: 26 })['foo])
""", "42")
def test_add30(self):
self._test("""
print(({ 'foo: 42 } + { 'bar: 26 })['bar])
""", "26")
def test_each0(self):
self._test("""
d = { 'foo: 'bar }
d.each() do |key, value|
print(key.inspect())
end
""", "'foo")
def test_each10(self):
self._test("""
d = { 'foo: 'bar }
d.each() do |key, value|
print(value.inspect())
end
""", "'bar")
def test_get0(self):
self._test("""
print({ 42: 26 }.get(42))
""", "26")
def test_get10(self):
self._test("""
print({ 42: 26 }.get(\"foo\"))
""", "nil")
def test_get20(self):
self._test("""
print({ 42: 26 }.get(\"foo\", \"bar\"))
""", "bar")
# vim: tabstop=4 shiftwidth=4 expandtab softtabstop=4
| mit | -2,695,812,920,068,500,500 | 15.198582 | 70 | 0.487741 | false |
Mangara/ArboralExplorer | lib/Cmpl/cmplServer/cmplServer/CmplServerTools.py | 1 | 3351 | #***********************************************************************
# This code is part of CmplServer
#
# Copyright (C) 2013, 2014
# Mike Steglich - Technical University of Applied Sciences
# Wildau, Germany
#
# CmplServer is a project of the Technical University of
# Applied Sciences Wildau and the Institute for Operations Research
# and Business Management at the Martin Luther University
# Halle-Wittenberg.
# Please visit the project homepage <www.coliop.org>
#
# CmplServer is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# CmplServer is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
#**********************************************************************
#!/usr/bin/python
from SocketServer import ThreadingMixIn
from SimpleXMLRPCServer import SimpleXMLRPCServer
from time import gmtime, strftime
from pyCmpl.CmplDefs import *
import os
import time
import string
import platform
#*************** CmplXmlRpcServer ********************************
class CmplXmlRpcServer(ThreadingMixIn, SimpleXMLRPCServer):
#*********** process_request **********
def process_request(self, request, client_address):
self.client_address = client_address
return SimpleXMLRPCServer.process_request(self, request, client_address)
#*********** end process_request ******
#*************** end CmplXmlRpcServer ****************************
#*************** CmplServerTools ***************************************
class CmplServerTools(object):
#*********** cmplLogging ***********
@staticmethod
def cmplLogging(logFile, msg , id=None , name=None ):
try:
if id==None and name==None:
logFile.write("[" + strftime("%Y-%m-%d %H:%M:%S", gmtime())+ "] - " + msg+"\n")
elif name==None:
#logFile.write( string.split(id,"-")[0] + " - [" + strftime("%Y-%m-%d %H:%M:%S", gmtime())+ "] - " + id + " <"+msg+">\n")
logFile.write( "[" + strftime("%Y-%m-%d %H:%M:%S", gmtime())+ "] - " + id + " - <"+msg+">\n")
else:
#logFile.write( string.split(id,"-")[0] + " - [" + strftime("%Y-%m-%d %H:%M:%S", gmtime())+ "] - " + id + " - " + name + " <"+msg+">\n")
logFile.write( "[" + strftime("%Y-%m-%d %H:%M:%S", gmtime())+ "] - " + id + " - " + name + " <"+msg+">\n")
logFile.flush()
except IOError, e:
pass
#raise Exception("IO error for solution or log file ")
#*********** end cmplLogging *******
#*********** readFileContent **********
@staticmethod
def readFileContent(fileName):
try:
f = open(fileName, "r")
lines = f.read()
f.close()
return lines
except IOError, e:
raise Exception("IO error for file "+fileName)
#*********** end readFileContent ******
#*************** end CmplServerTools ***********************************
| apache-2.0 | 539,508,423,461,816,500 | 31.230769 | 140 | 0.563414 | false |
Lana-B/Pheno4T | madanalysis/install/install_matplotlib.py | 1 | 5864 | ################################################################################
#
# Copyright (C) 2012-2013 Eric Conte, Benjamin Fuks
# The MadAnalysis development team, email: <[email protected]>
#
# This file is part of MadAnalysis 5.
# Official website: <https://launchpad.net/madanalysis5>
#
# MadAnalysis 5 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MadAnalysis 5 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MadAnalysis 5. If not, see <http://www.gnu.org/licenses/>
#
################################################################################
from madanalysis.install.install_service import InstallService
from shell_command import ShellCommand
import os
import sys
import logging
class InstallMatplotlib:
def __init__(self,main):
self.main = main
self.installdir = os.path.normpath(self.main.archi_info.ma5dir+'/tools/matplotlib/')
self.toolsdir = os.path.normpath(self.main.archi_info.ma5dir+'/tools')
self.tmpdir = self.main.session_info.tmpdir
self.downloaddir = self.main.session_info.downloaddir
self.untardir = os.path.normpath(self.tmpdir + '/MA5_matplotlib/')
self.ncores = 1
self.files = {"matplotlib.tar.gz" : "http://sourceforge.net/projects/matplotlib/files/matplotlib/matplotlib-1.3.1/matplotlib-1.3.1.tar.gz"}
def Detect(self):
if not os.path.isdir(self.toolsdir):
logging.debug("The folder '"+self.toolsdir+"' is not found")
return False
if not os.path.isdir(self.installdir):
logging.debug("The folder "+self.installdir+"' is not found")
return False
return True
def Remove(self,question=True):
from madanalysis.IOinterface.folder_writer import FolderWriter
return FolderWriter.RemoveDirectory(self.installdir,question)
def CreatePackageFolder(self):
if not InstallService.create_tools_folder(self.toolsdir):
return False
if not InstallService.create_package_folder(self.toolsdir,'matplotlib'):
return False
return True
def CreateTmpFolder(self):
ok = InstallService.prepare_tmp(self.untardir, self.downloaddir)
if ok:
self.tmpdir=self.untardir
return ok
def Download(self):
# Checking connection with MA5 web site
if not InstallService.check_ma5site():
return False
# Launching wget
logname = os.path.normpath(self.installdir+'/wget.log')
if not InstallService.wget(self.files,logname,self.downloaddir):
return False
# Ok
return True
def Unpack(self):
# Logname
logname = os.path.normpath(self.installdir+'/unpack.log')
# Unpacking the tarball
ok, packagedir = InstallService.untar(logname, self.tmpdir,'matplotlib.tar.gz')
if not ok:
return False
# Ok: returning the good folder
self.tmpdir=packagedir
return True
def Build(self):
# Input
theCommands=['python','setup.py','build']
logname=os.path.normpath(self.installdir+'/compilation.log')
# Execute
logging.debug('shell command: '+' '.join(theCommands))
ok, out= ShellCommand.ExecuteWithLog(theCommands,\
logname,\
self.tmpdir,\
silent=False)
# return result
if not ok:
logging.error('impossible to build the project. For more details, see the log file:')
logging.error(logname)
return ok
def Install(self):
# Input
theCommands=['python','setup.py','install','--home='+self.installdir]
logname=os.path.normpath(self.installdir+'/compilation.log')
# Execute
logging.debug('shell command: '+' '.join(theCommands))
ok, out= ShellCommand.ExecuteWithLog(theCommands,\
logname,\
self.tmpdir,\
silent=False)
# return result
if not ok:
logging.error('impossible to build the project. For more details, see the log file:')
logging.error(logname)
return ok
def Check(self):
# Check matplotlib downloaded version is in use
try:
import matplotlib
if str(matplotlib.__version__) != "1.3.1":
logging.error("Not using the right version of Matplotlib.")
self.display_log()
return False
except:
logging.error("Cannot use Matplotlib. Please install it.")
self.display_log()
return False
return True
def display_log(self):
logging.error("More details can be found into the log files:")
logging.error(" - "+os.path.normpath(self.installdir+"/wget.log"))
logging.error(" - "+os.path.normpath(self.installdir+"/unpack.log"))
logging.error(" - "+os.path.normpath(self.installdir+"/configuration.log"))
logging.error(" - "+os.path.normpath(self.installdir+"/compilation.log"))
logging.error(" - "+os.path.normpath(self.installdir+"/installation.log"))
def NeedToRestart(self):
return True
| gpl-3.0 | 4,663,602,692,768,040,000 | 36.589744 | 147 | 0.597715 | false |
dropbox/emmer | emmer/response_router.py | 1 | 5866 | import re
class ResponseRouter(object):
"""Handles the passing of control from a conversation to a client app's
routes.
For read requests and write requests, ResponseRouter maintains two lists of
rules, where each rule is a tuple is of the form(filename pattern, action).
When a request comes in, the filename given is checked against the list of
filename regex patterns, and the first rule that matches invokes the
corresponding action.
actions are application level functions that take the following argument:
client_host: The ip or hostname of the client.
client_port: The port of the client
filename: The filename included in the client request.
Additionally, a write request takes an additional argument:
data: The data sent from the client in the tftp conversation.
In the case of read requests, actions should return string data that will
be served directly back to clients.
"""
def __init__(self):
self.read_rules = []
self.write_rules = []
def append_read_rule(self, filename_pattern, action):
"""Adds a rule associating a filename pattern with an action for read
requests. The action given will execute when a read request is received
but before any responses are given.
Args:
filename_pattern: A string pattern to match future read request
filenames against.
action: A function to invoke when a later read request arrives
matching the given filename_pattern.
"""
self.read_rules.append((filename_pattern, action))
def append_write_rule(self, filename_pattern, action):
"""Adds a rule associating a filename pattern with an action for write
requests. The action given will execute when a write request is
completed and all data received.
Args:
filename_pattern: A string pattern to match future read request
filenames against.
action: A function to invoke when a later read request arrives
matching the given filename_pattern.
"""
self.write_rules.append((filename_pattern, action))
def initialize_read(self, filename, client_host, client_port):
"""For a read request, finds the appropriate action and invokes it.
Args:
filename: The filename included in the client's request.
client_host: The host of the client connecting.
client_port: The port of the client connecting.
Returns:
A ReadBuffer containing the file contents to return. If there is no
corresponding action, returns None.
"""
action = self.find_action(self.read_rules, filename)
if action:
return ReadBuffer(action(client_host, client_port, filename))
else:
return None
def initialize_write(self, filename, client_host, client_port):
"""For a write request, finds the appropriate action and returns it.
This is different than a read request in that the action is invoked at
the end of the file transfer.
Args:
filename: The filename included in the client's request.
client_host: The host of the client connecting.
client_port: The port of the client connecting.
Returns:
An action that is to be run at the end of a write request file
transfer. If there is no corresponding action, returns None.
"""
return self.find_action(self.write_rules, filename)
def find_action(self, rules, filename):
"""Given a list of rules and a filename to match against them, returns
an action stored in one of those rules. The action returned corresponds
to the first rule that matches the filename given.
Args:
rules: A list of tuples, where each tuple is (filename pattern,
action).
filename: A filename to match against the filename regex patterns.
Returns:
An action corresponding to the first rule that matches the filename
given. If no rules match, returns None.
"""
for (filename_pattern, action) in rules:
if re.match(filename_pattern, filename):
return action
return None
class ReadBuffer(object):
"""A ReadBuffer is used to temporarily store read request data while the
transfer has not completely succeeded. It offers an interface for
retrieving chunks of data in 512 byte chunks based on block number.
"""
def __init__(self, data):
self.data = data
def get_block_count(self):
"""Returns the amount of blocks that this ReadBuffer can produce
This amount is also the largest value that can be passed into
get_block.
"""
return (len(self.data) / 512) + 1
def get_block(self, block_num):
"""Returns the data corresponding to the given block number
Args:
block_num: The block number of data to request. By the TFTP
protocol, blocks are consecutive 512 byte sized chunks of data with
the exception of the final block which may be less than 512 chunks.
Return:
A 512 byte or less chunk of data corresponding to the given block
number.
"""
return self.data[(block_num - 1) * 512:block_num * 512]
class WriteBuffer(object):
"""A WriteBuffer is used to temporarily store write request data while the
transfer has not completely succeeded.
Retrieve the data from the `data` property.
"""
def __init__(self):
self.data = ""
def receive_data(self, data):
"""Write some more data to the WriteBuffer """
self.data += data
| mit | -3,915,947,709,512,583,700 | 38.106667 | 79 | 0.648483 | false |
henrysher/duplicity | duplicity/backends/dpbxbackend.py | 1 | 19561 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2013 jno <[email protected]>
# Copyright 2016 Dmitry Nezhevenko <[email protected]>
#
# Version: 0.3
#
# 0. You can make me happy with https://www.dropbox.com/referrals/NTE2ODA0Mzg5
# 1. Most of the code was taken from cli_client.py. The ftpsbackend.py was used as a template
# 2. DPBX & dpbx are used because the use of the actual name is prohibited
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import StringIO
import os
import re
import sys
import time
import traceback
import urllib
from duplicity import log, globals
from duplicity import progress
from duplicity.errors import BackendException
from duplicity.globals import num_retries
from requests.exceptions import ConnectionError
import duplicity.backend
# This is chunk size for upload using Dpbx chumked API v2. It doesn't
# make sense to make it much large since Dpbx SDK uses connection pool
# internally. So multiple chunks will sent using same keep-alive socket
# Plus in case of network problems we most likely will be able to retry
# only failed chunk
DPBX_UPLOAD_CHUNK_SIZE = 16 * 1024 * 1024
# Download internal buffer size. Files are downloaded using one request.
DPBX_DOWNLOAD_BUF_SIZE = 512 * 1024
DPBX_AUTORENAMED_FILE_RE = re.compile(r' \([0-9]+\)\.[^\.]+$')
def log_exception(e):
log.Error('Exception [%s]:' % (e,))
f = StringIO.StringIO()
traceback.print_exc(file=f)
f.seek(0)
for s in f.readlines():
log.Error('| ' + s.rstrip())
f.close()
def command(login_required=True):
"""a decorator for handling authentication and exceptions"""
def decorate(f):
def wrapper(self, *args):
try:
return f(self, *args)
except ApiError as e:
log_exception(e)
raise BackendException('dpbx api error "%s"' % (e,))
except Exception as e:
log_exception(e)
log.Error('dpbx code error "%s"' % (e,), log.ErrorCode.backend_code_error)
raise
wrapper.__doc__ = f.__doc__
return wrapper
return decorate
class DPBXBackend(duplicity.backend.Backend):
"""Connect to remote store using Dr*pB*x service"""
def __init__(self, parsed_url):
duplicity.backend.Backend.__init__(self, parsed_url)
try:
from dropbox import Dropbox
from dropbox.exceptions import AuthError, BadInputError, ApiError
from dropbox.files import (UploadSessionCursor, CommitInfo,
WriteMode, GetMetadataError,
DeleteError, UploadSessionLookupError,
ListFolderError)
from dropbox.oauth import DropboxOAuth2FlowNoRedirect
except ImportError as e:
raise BackendException("""\
This backend requires the dropbox package version 6.9.0
To install use "sudo pip install dropbox==6.9.0"
Exception: %s""" % str(e))
self.api_account = None
self.api_client = None
self.auth_flow = None
self.login()
def user_authenticated(self):
try:
account = self.api_client.users_get_current_account()
log.Debug("User authenticated as ,%s" % account)
return True
except:
log.Debug('User not authenticated')
return False
def load_access_token(self):
return os.environ.get('DPBX_ACCESS_TOKEN', None)
def save_access_token(self, access_token):
raise BackendException('dpbx: Please set DPBX_ACCESS_TOKEN=\"%s\" environment variable' %
access_token)
def obtain_access_token(self):
log.Info("dpbx: trying to obtain access token")
for env_var in ['DPBX_APP_KEY', 'DPBX_APP_SECRET']:
if env_var not in os.environ:
raise BackendException('dpbx: %s environment variable not set' % env_var)
app_key = os.environ['DPBX_APP_KEY']
app_secret = os.environ['DPBX_APP_SECRET']
if not sys.stdout.isatty() or not sys.stdin.isatty():
log.FatalError('dpbx error: cannot interact, but need human attention',
log.ErrorCode.backend_command_error)
auth_flow = DropboxOAuth2FlowNoRedirect(app_key, app_secret)
log.Debug('dpbx,auth_flow.start()')
authorize_url = auth_flow.start()
print
print '-' * 72
print "1. Go to: " + authorize_url
print "2. Click \"Allow\" (you might have to log in first)."
print "3. Copy the authorization code."
print '-' * 72
auth_code = raw_input("Enter the authorization code here: ").strip()
try:
log.Debug('dpbx,auth_flow.finish(%s)' % auth_code)
authresult = auth_flow.finish(auth_code)
except Exception as e:
raise BackendException('dpbx: Unable to obtain access token: %s' % e)
log.Info("dpbx: Authentication successfull")
self.save_access_token(authresult.access_token)
def login(self):
if self.load_access_token() is None:
self.obtain_access_token()
self.api_client = Dropbox(self.load_access_token())
self.api_account = None
try:
log.Debug('dpbx,users_get_current_account([token])')
self.api_account = self.api_client.users_get_current_account()
log.Debug("dpbx,%s" % self.api_account)
except (BadInputError, AuthError) as e:
log.Debug('dpbx,exception: %s' % e)
log.Info("dpbx: Authentication failed. Trying to obtain new access token")
self.obtain_access_token()
# We're assuming obtain_access_token will throw exception.
# So this line should not be reached
raise BackendException("dpbx: Please update DPBX_ACCESS_TOKEN and try again")
log.Info("dpbx: Successfully authenticated as %s" %
self.api_account.name.display_name)
def _error_code(self, operation, e):
if isinstance(e, ApiError):
err = e.error
if isinstance(err, GetMetadataError) and err.is_path():
if err.get_path().is_not_found():
return log.ErrorCode.backend_not_found
elif isinstance(err, DeleteError) and err.is_path_lookup():
lookup = e.error.get_path_lookup()
if lookup.is_not_found():
return log.ErrorCode.backend_not_found
@command()
def _put(self, source_path, remote_filename):
remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip()
file_size = os.path.getsize(source_path.name)
progress.report_transfer(0, file_size)
if file_size < DPBX_UPLOAD_CHUNK_SIZE:
# Upload whole file at once to avoid extra server request
res_metadata = self.put_file_small(source_path, remote_path)
else:
res_metadata = self.put_file_chunked(source_path, remote_path)
# A few sanity checks
if res_metadata.path_display != remote_path:
raise BackendException('dpbx: result path mismatch: %s (expected: %s)' %
(res_metadata.path_display, remote_path))
if res_metadata.size != file_size:
raise BackendException('dpbx: result size mismatch: %s (expected: %s)' %
(res_metadata.size, file_size))
def put_file_small(self, source_path, remote_path):
if not self.user_authenticated():
self.login()
file_size = os.path.getsize(source_path.name)
f = source_path.open('rb')
try:
log.Debug('dpbx,files_upload(%s, [%d bytes])' % (remote_path, file_size))
res_metadata = self.api_client.files_upload(f.read(), remote_path,
mode=WriteMode.overwrite,
autorename=False,
client_modified=None,
mute=True)
log.Debug('dpbx,files_upload(): %s' % res_metadata)
progress.report_transfer(file_size, file_size)
return res_metadata
finally:
f.close()
def put_file_chunked(self, source_path, remote_path):
if not self.user_authenticated():
self.login()
file_size = os.path.getsize(source_path.name)
f = source_path.open('rb')
try:
buf = f.read(DPBX_UPLOAD_CHUNK_SIZE)
log.Debug('dpbx,files_upload_session_start([%d bytes]), total: %d' %
(len(buf), file_size))
upload_sid = self.api_client.files_upload_session_start(buf)
log.Debug('dpbx,files_upload_session_start(): %s' % upload_sid)
upload_cursor = UploadSessionCursor(upload_sid.session_id, f.tell())
commit_info = CommitInfo(remote_path, mode=WriteMode.overwrite,
autorename=False, client_modified=None,
mute=True)
res_metadata = None
progress.report_transfer(f.tell(), file_size)
requested_offset = None
current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
retry_number = globals.num_retries
is_eof = False
# We're doing our own error handling and retrying logic because
# we can benefit from Dpbx chunked upload and retry only failed
# chunk
while not is_eof or not res_metadata:
try:
if requested_offset is not None:
upload_cursor.offset = requested_offset
if f.tell() != upload_cursor.offset:
f.seek(upload_cursor.offset)
buf = f.read(current_chunk_size)
is_eof = f.tell() >= file_size
if not is_eof and len(buf) == 0:
continue
# reset temporary status variables
requested_offset = None
current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
retry_number = globals.num_retries
if not is_eof:
assert len(buf) != 0
log.Debug('dpbx,files_upload_sesssion_append([%d bytes], offset=%d)' %
(len(buf), upload_cursor.offset))
self.api_client.files_upload_session_append(buf,
upload_cursor.session_id,
upload_cursor.offset)
else:
log.Debug('dpbx,files_upload_sesssion_finish([%d bytes], offset=%d)' %
(len(buf), upload_cursor.offset))
res_metadata = self.api_client.files_upload_session_finish(buf,
upload_cursor,
commit_info)
upload_cursor.offset = f.tell()
log.Debug('progress: %d of %d' % (upload_cursor.offset,
file_size))
progress.report_transfer(upload_cursor.offset, file_size)
except ApiError as e:
error = e.error
if isinstance(error, UploadSessionLookupError) and error.is_incorrect_offset():
# Server reports that we should send another chunk.
# Most likely this is caused by network error during
# previous upload attempt. In such case we'll get
# expected offset from server and it's enough to just
# seek() and retry again
new_offset = error.get_incorrect_offset().correct_offset
log.Debug('dpbx,files_upload_session_append: incorrect offset: %d (expected: %s)' %
(upload_cursor.offset, new_offset))
if requested_offset is not None:
# chunk failed even after seek attempt. Something
# strange and no safe way to recover
raise BackendException("dpbx: unable to chunk upload")
else:
# will seek and retry
requested_offset = new_offset
continue
raise
except ConnectionError as e:
log.Debug('dpbx,files_upload_session_append: %s' % e)
retry_number -= 1
if not self.user_authenticated():
self.login()
if retry_number == 0:
raise
# We don't know for sure, was partial upload successful or
# not. So it's better to retry smaller amount to avoid extra
# reupload
log.Info('dpbx: sleeping a bit before chunk retry')
time.sleep(30)
current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE / 5
requested_offset = None
continue
if f.tell() != file_size:
raise BackendException('dpbx: something wrong')
log.Debug('dpbx,files_upload_sesssion_finish(): %s' % res_metadata)
progress.report_transfer(f.tell(), file_size)
return res_metadata
finally:
f.close()
@command()
def _get(self, remote_filename, local_path):
if not self.user_authenticated():
self.login()
remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip()
log.Debug('dpbx,files_download(%s)' % remote_path)
res_metadata, http_fd = self.api_client.files_download(remote_path)
log.Debug('dpbx,files_download(%s): %s, %s' % (remote_path, res_metadata,
http_fd))
file_size = res_metadata.size
to_fd = None
progress.report_transfer(0, file_size)
try:
to_fd = local_path.open('wb')
for c in http_fd.iter_content(DPBX_DOWNLOAD_BUF_SIZE):
to_fd.write(c)
progress.report_transfer(to_fd.tell(), file_size)
finally:
if to_fd:
to_fd.close()
http_fd.close()
# It's different from _query() check because we're not querying metadata
# again. Since this check is free, it's better to have it here
local_size = os.path.getsize(local_path.name)
if local_size != file_size:
raise BackendException("dpbx: wrong file size: %d (expected: %d)" %
(local_size, file_size))
local_path.setdata()
@command()
def _list(self):
# Do a long listing to avoid connection reset
if not self.user_authenticated():
self.login()
remote_dir = '/' + urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip()
log.Debug('dpbx.files_list_folder(%s)' % remote_dir)
res = []
try:
resp = self.api_client.files_list_folder(remote_dir)
log.Debug('dpbx.list(%s): %s' % (remote_dir, resp))
while True:
res.extend([entry.name for entry in resp.entries])
if not resp.has_more:
break
resp = self.api_client.files_list_folder_continue(resp.cursor)
except ApiError as e:
if (isinstance(e.error, ListFolderError) and e.error.is_path() and
e.error.get_path().is_not_found()):
log.Debug('dpbx.list(%s): ignore missing folder (%s)' % (remote_dir, e))
else:
raise
# Warn users of old version dpbx about automatically renamed files
self.check_renamed_files(res)
return res
@command()
def _delete(self, filename):
if not self.user_authenticated():
self.login()
remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
remote_path = '/' + os.path.join(remote_dir, filename).rstrip()
log.Debug('dpbx.files_delete(%s)' % remote_path)
self.api_client.files_delete(remote_path)
# files_permanently_delete seems to be better for backup purpose
# but it's only available for Business accounts
# self.api_client.files_permanently_delete(remote_path)
@command()
def _close(self):
"""close backend session? no! just "flush" the data"""
log.Debug('dpbx.close():')
@command()
def _query(self, filename):
if not self.user_authenticated():
self.login()
remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
remote_path = '/' + os.path.join(remote_dir, filename).rstrip()
log.Debug('dpbx.files_get_metadata(%s)' % remote_path)
info = self.api_client.files_get_metadata(remote_path)
log.Debug('dpbx.files_get_metadata(%s): %s' % (remote_path, info))
return {'size': info.size}
def check_renamed_files(self, file_list):
if not self.user_authenticated():
self.login()
bad_list = [x for x in file_list if DPBX_AUTORENAMED_FILE_RE.search(x) is not None]
if len(bad_list) == 0:
return
log.Warn('-' * 72)
log.Warn('Warning! It looks like there are automatically renamed files on backend')
log.Warn('They were probably created when using older version of duplicity.')
log.Warn('')
log.Warn('Please check your backup consistency. Most likely you will need to choose')
log.Warn('largest file from duplicity-* (number).gpg and remove brackets from its name.')
log.Warn('')
log.Warn('These files are not managed by duplicity at all and will not be')
log.Warn('removed/rotated automatically.')
log.Warn('')
log.Warn('Affected files:')
for x in bad_list:
log.Warn('\t%s' % x)
log.Warn('')
log.Warn('In any case it\'s better to create full backup.')
log.Warn('-' * 72)
duplicity.backend.register_backend("dpbx", DPBXBackend)
| gpl-2.0 | -7,412,439,218,951,572,000 | 40.442797 | 107 | 0.557998 | false |
possess1on/botTelegram-zabbix | botTelegram-zabbix.py | 1 | 19925 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#########################################################################
# BotTelegram Zabbix
# Filename: botTelegram-zabbix.py
##########################################################################
from telegram.ext import Updater, CommandHandler
import logging
import sys
import subprocess
import urllib
import requests
##########################################
# Python install module
# git clone https://github.com/possess1on/botTelegram-zabbix.git
# cd botTelegram-zabbix
# pip install python-telegram-bot
# sudo apt-get install python-pip
# pip install -r requirements.txt
# Test
# python botTelegram-zabbix.py
#
# BG
# python botTelegram-zabbix.py&
#
##########################################
##########################################
# Install python & pip
# pip install pip python-telegram-bot --upgrade
# apt-get install python-urllib3
##########################################
varZabbixmapa1 = "url"
varZabbixmapa2 = "url"
varZabbixmapa3 = "url"
varZabbixmapa4 = "url"
varZabbixmapa5 = "url"
varZabbixmapa6 = "url"
varZabbixmapa7 = "url"
varZabbixmapa8 = "url"
varZabbixmapa9 = "url"
varZabbixmapa10 = "url"
varZabbixmapa11 = "url"
varZabbixmapa12 = "url"
varZabbixmapa13 = "url"
varZabbixmapa14 = "url"
varZabbixmapa15 = "url"
users_liberados = [id]
varBotToken = 'token'
varUsername = "log"
varPassword = "pass"
varZabbixServer = "url"
varZabbixLanguage = "US"
# Enable logging
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO,
filename='botTelegram_zabbix.log')
logging.info('Started')
logger = logging.getLogger(__name__)
job_queue = None
# Zabbix cookie
varcookie = None
def start(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
bot.sendMessage(update.message.chat_id, text='Добро пожаловать!!')
def mapa1(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa1
file_img = "botTelegram_mapa1.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa2(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa2
file_img = "botTelegram_mapa2.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa3(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa3
file_img = "botTelegram_mapa3.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa4(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa4
file_img = "botTelegram_mapa4.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa5(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa5
file_img = "botTelegram_mapa5.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa6(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa6
file_img = "botTelegram_mapa6.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa7(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa7
file_img = "botTelegram_mapa7.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa8(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa8
file_img = "botTelegram_mapa8.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa9(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa9
file_img = "botTelegram_mapa9.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa10(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa10
file_img = "botTelegram_mapa10.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa11(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa11
file_img = "botTelegram_mapa11.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa12(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa12
file_img = "botTelegram_mapa12.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa13(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa13
file_img = "botTelegram_mapa13.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa14(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa14
file_img = "botTelegram_mapa14.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def mapa15(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
# urllib.urlretrieve(varZabbixmapa5, "botTelegram_mapa5.jpg")
login()
zbx_img_url = varZabbixmapa15
file_img = "botTelegram_mapa15.jpg"
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Код 404 проверьте адрес: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except IndexError:
return
except ValueError:
return
def help(bot, update):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
bot.sendMessage(update.message.chat_id, text="Help:\n"
"/atm - Банкоматы\n"
"/postamat - Почтаматы\n"
"/140100 - Аксу\n"
"/140200 - Актогай\n"
"/140300 - Баянаул\n"
"/140400 - Железинка\n"
"/140500 - Иртышск\n"
"/140600 - Качиры\n"
"/140700 - Лебяжий\n"
"/140800 - Майск\n"
"/140900 - ПРУПС\n"
"/141000 - Успенка\n"
"/141100 - Щербакты\n"
"/141200 - Экибастуз\n"
"/140000 - ОПСы\n")
def error(bot, update, error):
logger.warn('Update "%s" error "%s"' % (update, error))
def login():
global varcookie
requests.packages.urllib3.disable_warnings()
if varZabbixLanguage == "PT":
data_api = {"name": varUsername, "password": varPassword, "enter": "Connect-SE"}
else:
data_api = {"name": varUsername, "password": varPassword, "enter": "Sign in"}
req_cookie = requests.post(varZabbixServer + "/", data=data_api, verify=True)
varcookie = req_cookie.cookies
if len(req_cookie.history) > 1 and req_cookie.history[0].status_code == 302:
logger.warn("Проверьте адрес сервера")
if not varcookie:
logger.warn("Проверьте имя пользователя и пароль")
varcookie = None
def grafico(bot, update, args):
chat_id = update.message.chat_id
if not chat_id in users_liberados:
logging.info("Не найден - ID {}".format(chat_id))
return
try:
#print len(args)
if len(args) < 2:
bot.sendMessage(chat_id, text='Корректность')
return False
grafico_id = args[0]
grafico_seg = args[1]
login()
zbx_img_url = ("{}/chart.php?itemids={}&period={}&width=600".format(varZabbixServer, grafico_id, grafico_seg))
file_img = "botTelegram_grafico_{}.jpg".format(grafico_id)
res = requests.get(zbx_img_url, cookies=varcookie)
res_code = res.status_code
if res_code == 404:
logger.warn("Проверьте адрес Zabbix Grafico: {}".format(zbx_img_url))
return False
res_img = res.content
with open(file_img, 'wb') as fp:
fp.write(res_img)
fp.close()
bot.sendPhoto(chat_id=update.message.chat_id, photo=open(file_img, 'rb'))
except (IndexError, ValueError):
update.message.reply_text('Проверьте ID grafico')
return
def main():
global job_queue
updater = Updater(varBotToken)
job_queue = updater.job_queue
dp = updater.dispatcher
dp.add_handler(CommandHandler("atm", mapa1))
dp.add_handler(CommandHandler("postamat", mapa2))
dp.add_handler(CommandHandler("140100", mapa3))
dp.add_handler(CommandHandler("140200", mapa4))
dp.add_handler(CommandHandler("140300", mapa5))
dp.add_handler(CommandHandler("140400", mapa6))
dp.add_handler(CommandHandler("140500", mapa7))
dp.add_handler(CommandHandler("140600", mapa8))
dp.add_handler(CommandHandler("140700", mapa9))
dp.add_handler(CommandHandler("140800", mapa10))
dp.add_handler(CommandHandler("140900", mapa11))
dp.add_handler(CommandHandler("141000", mapa12))
dp.add_handler(CommandHandler("141100", mapa13))
dp.add_handler(CommandHandler("141200", mapa14))
dp.add_handler(CommandHandler("140000", mapa15))
dp.add_handler(CommandHandler("grafico", grafico, pass_args=True))
dp.add_handler(CommandHandler("help", help))
# log all errors
dp.add_error_handler(error)
# Start the Bot
updater.start_polling()
updater.idle()
logging.info('Finished')
logging.shutdown()
if __name__ == '__main__':
main()
| gpl-2.0 | -7,515,542,066,276,007,000 | 29.095016 | 122 | 0.571192 | false |
xow/mdk | mdk/commands/install.py | 1 | 3148 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Moodle Development Kit
Copyright (c) 2013 Frédéric Massart - FMCorz.net
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
http://github.com/FMCorz/mdk
"""
import os
import logging
from .. import db
from ..command import Command
from ..tools import mkdir
DB = db.DB
class InstallCommand(Command):
_description = 'Install a Moodle instance'
def __init__(self, *args, **kwargs):
super(InstallCommand, self).__init__(*args, **kwargs)
self._arguments = [
(
['-e', '--engine'],
{
'action': 'store',
'choices': ['mariadb', 'mysqli', 'pgsql'],
'default': self.C.get('defaultEngine'),
'help': 'database engine to use',
'metavar': 'engine'
}
),
(
['-f', '--fullname'],
{
'action': 'store',
'help': 'full name of the instance',
'metavar': 'fullname'
}
),
(
['-r', '--run'],
{
'action': 'store',
'help': 'scripts to run after installation',
'metavar': 'run',
'nargs': '*'
}
),
(
['name'],
{
'default': None,
'help': 'name of the instance',
'metavar': 'name',
'nargs': '?'
})
]
def run(self, args):
name = args.name
engine = args.engine
fullname = args.fullname
M = self.Wp.resolve(name)
if not M:
raise Exception('This is not a Moodle instance')
name = M.get('identifier')
dataDir = self.Wp.getPath(name, 'data')
if not os.path.isdir(dataDir):
mkdir(dataDir, 0777)
kwargs = {
'engine': engine,
'fullname': fullname,
'dataDir': dataDir,
'wwwroot': self.Wp.getUrl(name)
}
M.install(**kwargs)
# Running scripts
if M.isInstalled() and type(args.run) == list:
for script in args.run:
logging.info('Running script \'%s\'' % (script))
try:
M.runScript(script)
except Exception as e:
logging.warning('Error while running the script: %s' % e)
| gpl-3.0 | -6,347,851,598,104,812,000 | 28.12963 | 77 | 0.493961 | false |
mick-d/nipype | nipype/algorithms/modelgen.py | 5 | 37627 | # -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
The modelgen module provides classes for specifying designs for individual
subject analysis of task-based fMRI experiments. In particular it also includes
algorithms for generating regressors for sparse and sparse-clustered acquisition
experiments.
These functions include:
* SpecifyModel: allows specification of sparse and non-sparse models
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data'))
>>> os.chdir(datadir)
"""
from __future__ import print_function, division, unicode_literals, absolute_import
from builtins import range, str, bytes, int
from copy import deepcopy
import os
from nibabel import load
import numpy as np
from scipy.special import gammaln
from ..utils import NUMPY_MMAP
from ..interfaces.base import (BaseInterface, TraitedSpec, InputMultiPath,
traits, File, Bunch, BaseInterfaceInputSpec,
isdefined)
from ..utils.filemanip import filename_to_list
from ..utils.misc import normalize_mc_params
from .. import config, logging
iflogger = logging.getLogger('interface')
def gcd(a, b):
"""Returns the greatest common divisor of two integers
uses Euclid's algorithm
>>> gcd(4, 5)
1
>>> gcd(4, 8)
4
>>> gcd(22, 55)
11
"""
while b > 0:
a, b = b, a % b
return a
def spm_hrf(RT, P=None, fMRI_T=16):
""" python implementation of spm_hrf
see spm_hrf for implementation details
% RT - scan repeat time
% p - parameters of the response function (two gamma
% functions)
% defaults (seconds)
% p(0) - delay of response (relative to onset) 6
% p(1) - delay of undershoot (relative to onset) 16
% p(2) - dispersion of response 1
% p(3) - dispersion of undershoot 1
% p(4) - ratio of response to undershoot 6
% p(5) - onset (seconds) 0
% p(6) - length of kernel (seconds) 32
%
% hrf - hemodynamic response function
% p - parameters of the response function
the following code using scipy.stats.distributions.gamma
doesn't return the same result as the spm_Gpdf function ::
hrf = gamma.pdf(u, p[0]/p[2], scale=dt/p[2]) -
gamma.pdf(u, p[1]/p[3], scale=dt/p[3])/p[4]
>>> print(spm_hrf(2))
[ 0.00000000e+00 8.65660810e-02 3.74888236e-01 3.84923382e-01
2.16117316e-01 7.68695653e-02 1.62017720e-03 -3.06078117e-02
-3.73060781e-02 -3.08373716e-02 -2.05161334e-02 -1.16441637e-02
-5.82063147e-03 -2.61854250e-03 -1.07732374e-03 -4.10443522e-04
-1.46257507e-04]
"""
p = np.array([6, 16, 1, 1, 6, 0, 32], dtype=float)
if P is not None:
p[0:len(P)] = P
_spm_Gpdf = lambda x, h, l: np.exp(h * np.log(l) + (h - 1) * np.log(x) - (l * x) - gammaln(h))
# modelled hemodynamic response function - {mixture of Gammas}
dt = RT / float(fMRI_T)
u = np.arange(0, int(p[6] / dt + 1)) - p[5] / dt
with np.errstate(divide='ignore'): # Known division-by-zero
hrf = _spm_Gpdf(u, p[0] / p[2], dt / p[2]) - _spm_Gpdf(u, p[1] / p[3],
dt / p[3]) / p[4]
idx = np.arange(0, int((p[6] / RT) + 1)) * fMRI_T
hrf = hrf[idx]
hrf = hrf / np.sum(hrf)
return hrf
def orth(x_in, y_in):
"""Orthogonalize y_in with respect to x_in.
>>> orth_expected = np.array([1.7142857142857144, 0.42857142857142883, \
-0.85714285714285676])
>>> err = np.abs(np.array(orth([1, 2, 3],[4, 5, 6]) - orth_expected))
>>> all(err < np.finfo(float).eps)
True
"""
x = np.array(x_in)[:, None]
y = np.array(y_in)[:, None]
y = y - np.dot(x, np.dot(np.linalg.inv(np.dot(x.T, x)), np.dot(x.T, y)))
if np.linalg.norm(y, 1) > np.exp(-32):
y = y[:, 0].tolist()
else:
y = y_in
return y
def scale_timings(timelist, input_units, output_units, time_repetition):
"""Scales timings given input and output units (scans/secs)
Parameters
----------
timelist: list of times to scale
input_units: 'secs' or 'scans'
output_units: Ibid.
time_repetition: float in seconds
"""
if input_units == output_units:
_scalefactor = 1.
if (input_units == 'scans') and (output_units == 'secs'):
_scalefactor = time_repetition
if (input_units == 'secs') and (output_units == 'scans'):
_scalefactor = 1. / time_repetition
timelist = [np.max([0., _scalefactor * t]) for t in timelist]
return timelist
def gen_info(run_event_files):
"""Generate subject_info structure from a list of event files
"""
info = []
for i, event_files in enumerate(run_event_files):
runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[])
for event_file in event_files:
_, name = os.path.split(event_file)
if '.run' in name:
name, _ = name.split('.run%03d' % (i + 1))
elif '.txt' in name:
name, _ = name.split('.txt')
runinfo.conditions.append(name)
event_info = np.atleast_2d(np.loadtxt(event_file))
runinfo.onsets.append(event_info[:, 0].tolist())
if event_info.shape[1] > 1:
runinfo.durations.append(event_info[:, 1].tolist())
else:
runinfo.durations.append([0])
if event_info.shape[1] > 2:
runinfo.amplitudes.append(event_info[:, 2].tolist())
else:
delattr(runinfo, 'amplitudes')
info.append(runinfo)
return info
class SpecifyModelInputSpec(BaseInterfaceInputSpec):
subject_info = InputMultiPath(Bunch, mandatory=True,
xor=['subject_info', 'event_files'],
desc='Bunch or List(Bunch) subject-specific '
'condition information. see '
':ref:`SpecifyModel` or '
'SpecifyModel.__doc__ for details')
event_files = InputMultiPath(traits.List(File(exists=True)), mandatory=True,
xor=['subject_info', 'event_files'],
desc='List of event description files 1, 2 or 3 '
'column format corresponding to onsets, '
'durations and amplitudes')
realignment_parameters = InputMultiPath(File(exists=True),
desc='Realignment parameters returned '
'by motion correction algorithm',
copyfile=False)
parameter_source = traits.Enum("SPM", "FSL", "AFNI", "FSFAST", "NIPY",
usedefault=True,
desc="Source of motion parameters")
outlier_files = InputMultiPath(File(exists=True),
desc='Files containing scan outlier indices '
'that should be tossed',
copyfile=False)
functional_runs = InputMultiPath(traits.Either(traits.List(File(exists=True)),
File(exists=True)),
mandatory=True,
desc='Data files for model. List of 4D '
'files or list of list of 3D '
'files per session',
copyfile=False)
input_units = traits.Enum('secs', 'scans', mandatory=True,
desc='Units of event onsets and durations (secs '
'or scans). Output units are always in secs')
high_pass_filter_cutoff = traits.Float(mandatory=True,
desc='High-pass filter cutoff in secs')
time_repetition = traits.Float(mandatory=True,
desc='Time between the start of one volume '
'to the start of the next image volume.')
# Not implemented yet
# polynomial_order = traits.Range(0, low=0,
# desc ='Number of polynomial functions to model high pass filter.')
class SpecifyModelOutputSpec(TraitedSpec):
session_info = traits.Any(desc='Session info for level1designs')
class SpecifyModel(BaseInterface):
"""Makes a model specification compatible with spm/fsl designers.
The subject_info field should contain paradigm information in the form of
a Bunch or a list of Bunch. The Bunch should contain the following
information::
[Mandatory]
- conditions : list of names
- onsets : lists of onsets corresponding to each condition
- durations : lists of durations corresponding to each condition. Should be
left to a single 0 if all events are being modelled as impulses.
[Optional]
- regressor_names : list of str
list of names corresponding to each column. Should be None if
automatically assigned.
- regressors : list of lists
values for each regressor - must correspond to the number of
volumes in the functional run
- amplitudes : lists of amplitudes for each event. This will be ignored by
SPM's Level1Design.
The following two (tmod, pmod) will be ignored by any Level1Design class
other than SPM:
- tmod : lists of conditions that should be temporally modulated. Should
default to None if not being used.
- pmod : list of Bunch corresponding to conditions
- name : name of parametric modulator
- param : values of the modulator
- poly : degree of modulation
Alternatively, you can provide information through event files.
The event files have to be in 1, 2 or 3 column format with the columns
corresponding to Onsets, Durations and Amplitudes and they have to have the
name event_name.runXXX... e.g.: Words.run001.txt. The event_name part will
be used to create the condition names.
Examples
--------
>>> from nipype.algorithms import modelgen
>>> from nipype.interfaces.base import Bunch
>>> s = modelgen.SpecifyModel()
>>> s.inputs.input_units = 'secs'
>>> s.inputs.functional_runs = ['functional2.nii', 'functional3.nii']
>>> s.inputs.time_repetition = 6
>>> s.inputs.high_pass_filter_cutoff = 128.
>>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], durations=[[1]])
>>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]])
>>> s.inputs.subject_info = [evs_run2, evs_run3]
Using pmod:
>>> evs_run2 = Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 50], [100, 180]], \
durations=[[0], [0]], pmod=[Bunch(name=['amp'], poly=[2], param=[[1, 2]]), \
None])
>>> evs_run3 = Bunch(conditions=['cond1', 'cond2'], onsets=[[20, 120], [80, 160]], \
durations=[[0], [0]], pmod=[Bunch(name=['amp'], poly=[2], param=[[1, 2]]), \
None])
>>> s.inputs.subject_info = [evs_run2, evs_run3]
"""
input_spec = SpecifyModelInputSpec
output_spec = SpecifyModelOutputSpec
def _generate_standard_design(self, infolist, functional_runs=None,
realignment_parameters=None, outliers=None):
""" Generates a standard design matrix paradigm given information about
each run
"""
sessinfo = []
output_units = 'secs'
if 'output_units' in self.inputs.traits():
output_units = self.inputs.output_units
for i, info in enumerate(infolist):
sessinfo.insert(i, dict(cond=[]))
if isdefined(self.inputs.high_pass_filter_cutoff):
sessinfo[i]['hpf'] = \
np.float(self.inputs.high_pass_filter_cutoff)
if hasattr(info, 'conditions') and info.conditions is not None:
for cid, cond in enumerate(info.conditions):
sessinfo[i]['cond'].insert(cid, dict())
sessinfo[i]['cond'][cid]['name'] = info.conditions[cid]
scaled_onset = scale_timings(info.onsets[cid],
self.inputs.input_units,
output_units,
self.inputs.time_repetition)
sessinfo[i]['cond'][cid]['onset'] = scaled_onset
scaled_duration = scale_timings(info.durations[cid],
self.inputs.input_units,
output_units,
self.inputs.time_repetition)
sessinfo[i]['cond'][cid]['duration'] = scaled_duration
if hasattr(info, 'amplitudes') and info.amplitudes:
sessinfo[i]['cond'][cid]['amplitudes'] = \
info.amplitudes[cid]
if hasattr(info, 'tmod') and info.tmod and \
len(info.tmod) > cid:
sessinfo[i]['cond'][cid]['tmod'] = info.tmod[cid]
if hasattr(info, 'pmod') and info.pmod and \
len(info.pmod) > cid:
if info.pmod[cid]:
sessinfo[i]['cond'][cid]['pmod'] = []
for j, name in enumerate(info.pmod[cid].name):
sessinfo[i]['cond'][cid]['pmod'].insert(j, {})
sessinfo[i]['cond'][cid]['pmod'][j]['name'] = \
name
sessinfo[i]['cond'][cid]['pmod'][j]['poly'] = \
info.pmod[cid].poly[j]
sessinfo[i]['cond'][cid]['pmod'][j]['param'] = \
info.pmod[cid].param[j]
sessinfo[i]['regress'] = []
if hasattr(info, 'regressors') and info.regressors is not None:
for j, r in enumerate(info.regressors):
sessinfo[i]['regress'].insert(j, dict(name='', val=[]))
if hasattr(info, 'regressor_names') and \
info.regressor_names is not None:
sessinfo[i]['regress'][j]['name'] = \
info.regressor_names[j]
else:
sessinfo[i]['regress'][j]['name'] = 'UR%d' % (j + 1)
sessinfo[i]['regress'][j]['val'] = info.regressors[j]
sessinfo[i]['scans'] = functional_runs[i]
if realignment_parameters is not None:
for i, rp in enumerate(realignment_parameters):
mc = realignment_parameters[i]
for col in range(mc.shape[1]):
colidx = len(sessinfo[i]['regress'])
sessinfo[i]['regress'].insert(colidx, dict(name='', val=[]))
sessinfo[i]['regress'][colidx]['name'] = 'Realign%d' % (col + 1)
sessinfo[i]['regress'][colidx]['val'] = mc[:, col].tolist()
if outliers is not None:
for i, out in enumerate(outliers):
numscans = 0
for f in filename_to_list(sessinfo[i]['scans']):
shape = load(f, mmap=NUMPY_MMAP).shape
if len(shape) == 3 or shape[3] == 1:
iflogger.warning(('You are using 3D instead of 4D '
'files. Are you sure this was '
'intended?'))
numscans += 1
else:
numscans += shape[3]
for j, scanno in enumerate(out):
colidx = len(sessinfo[i]['regress'])
sessinfo[i]['regress'].insert(colidx, dict(name='', val=[]))
sessinfo[i]['regress'][colidx]['name'] = 'Outlier%d' % (j + 1)
sessinfo[i]['regress'][colidx]['val'] = \
np.zeros((1, numscans))[0].tolist()
sessinfo[i]['regress'][colidx]['val'][int(scanno)] = 1
return sessinfo
def _generate_design(self, infolist=None):
"""Generate design specification for a typical fmri paradigm
"""
realignment_parameters = []
if isdefined(self.inputs.realignment_parameters):
for parfile in self.inputs.realignment_parameters:
realignment_parameters.append(
np.apply_along_axis(func1d=normalize_mc_params,
axis=1, arr=np.loadtxt(parfile),
source=self.inputs.parameter_source))
outliers = []
if isdefined(self.inputs.outlier_files):
for filename in self.inputs.outlier_files:
try:
outindices = np.loadtxt(filename, dtype=int)
except IOError:
outliers.append([])
else:
if outindices.size == 1:
outliers.append([outindices.tolist()])
else:
outliers.append(outindices.tolist())
if infolist is None:
if isdefined(self.inputs.subject_info):
infolist = self.inputs.subject_info
else:
infolist = gen_info(self.inputs.event_files)
self._sessinfo = self._generate_standard_design(infolist,
functional_runs=self.inputs.functional_runs,
realignment_parameters=realignment_parameters,
outliers=outliers)
def _run_interface(self, runtime):
"""
"""
self._sessioninfo = None
self._generate_design()
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
if not hasattr(self, '_sessinfo'):
self._generate_design()
outputs['session_info'] = self._sessinfo
return outputs
class SpecifySPMModelInputSpec(SpecifyModelInputSpec):
concatenate_runs = traits.Bool(False, usedefault=True,
desc='Concatenate all runs to look like a '
'single session.')
output_units = traits.Enum('secs', 'scans', usedefault=True,
desc='Units of design event onsets and durations '
'(secs or scans)')
class SpecifySPMModel(SpecifyModel):
"""Adds SPM specific options to SpecifyModel
adds:
- concatenate_runs
- output_units
Examples
--------
>>> from nipype.algorithms import modelgen
>>> from nipype.interfaces.base import Bunch
>>> s = modelgen.SpecifySPMModel()
>>> s.inputs.input_units = 'secs'
>>> s.inputs.output_units = 'scans'
>>> s.inputs.high_pass_filter_cutoff = 128.
>>> s.inputs.functional_runs = ['functional2.nii', 'functional3.nii']
>>> s.inputs.time_repetition = 6
>>> s.inputs.concatenate_runs = True
>>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], durations=[[1]])
>>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]])
>>> s.inputs.subject_info = [evs_run2, evs_run3]
"""
input_spec = SpecifySPMModelInputSpec
def _concatenate_info(self, infolist):
nscans = []
for i, f in enumerate(self.inputs.functional_runs):
if isinstance(f, list):
numscans = len(f)
elif isinstance(f, (str, bytes)):
img = load(f, mmap=NUMPY_MMAP)
numscans = img.shape[3]
else:
raise Exception('Functional input not specified correctly')
nscans.insert(i, numscans)
# now combine all fields into 1
# names, onsets, durations, amplitudes, pmod, tmod, regressor_names,
# regressors
infoout = infolist[0]
for j, val in enumerate(infolist[0].durations):
if len(infolist[0].onsets[j]) > 1 and len(val) == 1:
infoout.durations[j] = (infolist[0].durations[j] *
len(infolist[0].onsets[j]))
for i, info in enumerate(infolist[1:]):
# info.[conditions, tmod] remain the same
if info.onsets:
for j, val in enumerate(info.onsets):
if self.inputs.input_units == 'secs':
onsets = np.array(info.onsets[j]) +\
self.inputs.time_repetition * \
sum(nscans[0:(i + 1)])
infoout.onsets[j].extend(onsets.tolist())
else:
onsets = np.array(info.onsets[j]) + \
sum(nscans[0:(i + 1)])
infoout.onsets[j].extend(onsets.tolist())
for j, val in enumerate(info.durations):
if len(info.onsets[j]) > 1 and len(val) == 1:
infoout.durations[j].extend(info.durations[j] *
len(info.onsets[j]))
elif len(info.onsets[j]) == len(val):
infoout.durations[j].extend(info.durations[j])
else:
raise ValueError('Mismatch in number of onsets and \
durations for run {0}, condition \
{1}'.format(i + 2, j + 1))
if hasattr(info, 'amplitudes') and info.amplitudes:
for j, val in enumerate(info.amplitudes):
infoout.amplitudes[j].extend(info.amplitudes[j])
if hasattr(info, 'pmod') and info.pmod:
for j, val in enumerate(info.pmod):
if val:
for key, data in enumerate(val.param):
infoout.pmod[j].param[key].extend(data)
if hasattr(info, 'regressors') and info.regressors:
# assumes same ordering of regressors across different
# runs and the same names for the regressors
for j, v in enumerate(info.regressors):
infoout.regressors[j].extend(info.regressors[j])
# insert session regressors
if not hasattr(infoout, 'regressors') or not infoout.regressors:
infoout.regressors = []
onelist = np.zeros((1, sum(nscans)))
onelist[0, sum(nscans[0:i]):sum(nscans[0:(i + 1)])] = 1
infoout.regressors.insert(len(infoout.regressors),
onelist.tolist()[0])
return [infoout], nscans
def _generate_design(self, infolist=None):
if not isdefined(self.inputs.concatenate_runs) or \
not self.inputs.concatenate_runs:
super(SpecifySPMModel, self)._generate_design(infolist=infolist)
return
if isdefined(self.inputs.subject_info):
infolist = self.inputs.subject_info
else:
infolist = gen_info(self.inputs.event_files)
concatlist, nscans = self._concatenate_info(infolist)
functional_runs = [filename_to_list(self.inputs.functional_runs)]
realignment_parameters = []
if isdefined(self.inputs.realignment_parameters):
realignment_parameters = []
for parfile in self.inputs.realignment_parameters:
mc = np.apply_along_axis(func1d=normalize_mc_params,
axis=1, arr=np.loadtxt(parfile),
source=self.inputs.parameter_source)
if not realignment_parameters:
realignment_parameters.insert(0, mc)
else:
realignment_parameters[0] = \
np.concatenate((realignment_parameters[0], mc))
outliers = []
if isdefined(self.inputs.outlier_files):
outliers = [[]]
for i, filename in enumerate(self.inputs.outlier_files):
try:
out = np.loadtxt(filename)
except IOError:
iflogger.warn('Error reading outliers file %s', filename)
out = np.array([])
if out.size > 0:
iflogger.debug('fname=%s, out=%s, nscans=%d',
filename, out, sum(nscans[0:i]))
sumscans = out.astype(int) + sum(nscans[0:i])
if out.size == 1:
outliers[0]+= [np.array(sumscans, dtype=int).tolist()]
else:
outliers[0]+= np.array(sumscans, dtype=int).tolist()
self._sessinfo = self._generate_standard_design(concatlist,
functional_runs=functional_runs,
realignment_parameters=realignment_parameters,
outliers=outliers)
class SpecifySparseModelInputSpec(SpecifyModelInputSpec):
time_acquisition = traits.Float(0, mandatory=True,
desc='Time in seconds to acquire a single '
'image volume')
volumes_in_cluster = traits.Range(1, usedefault=True,
desc='Number of scan volumes in a cluster')
model_hrf = traits.Bool(desc='Model sparse events with hrf')
stimuli_as_impulses = traits.Bool(True,
desc='Treat each stimulus to be impulse-like',
usedefault=True)
use_temporal_deriv = traits.Bool(requires=['model_hrf'],
desc='Create a temporal derivative in '
'addition to regular regressor')
scale_regressors = traits.Bool(True, desc='Scale regressors by the peak',
usedefault=True)
scan_onset = traits.Float(0.0,
desc='Start of scanning relative to onset of run in secs',
usedefault=True)
save_plot = traits.Bool(desc=('Save plot of sparse design calculation '
'(requires matplotlib)'))
class SpecifySparseModelOutputSpec(SpecifyModelOutputSpec):
sparse_png_file = File(desc='PNG file showing sparse design')
sparse_svg_file = File(desc='SVG file showing sparse design')
class SpecifySparseModel(SpecifyModel):
""" Specify a sparse model that is compatible with spm/fsl designers
References
----------
.. [1] Perrachione TK and Ghosh SS (2013) Optimized design and analysis of
sparse-sampling fMRI experiments. Front. Neurosci. 7:55
http://journal.frontiersin.org/Journal/10.3389/fnins.2013.00055/abstract
Examples
--------
>>> from nipype.algorithms import modelgen
>>> from nipype.interfaces.base import Bunch
>>> s = modelgen.SpecifySparseModel()
>>> s.inputs.input_units = 'secs'
>>> s.inputs.functional_runs = ['functional2.nii', 'functional3.nii']
>>> s.inputs.time_repetition = 6
>>> s.inputs.time_acquisition = 2
>>> s.inputs.high_pass_filter_cutoff = 128.
>>> s.inputs.model_hrf = True
>>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], \
durations=[[1]])
>>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], \
durations=[[1]])
>>> s.inputs.subject_info = [evs_run2, evs_run3]
"""
input_spec = SpecifySparseModelInputSpec
output_spec = SpecifySparseModelOutputSpec
def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans):
"""Generates a regressor for a sparse/clustered-sparse acquisition
"""
bplot = False
if isdefined(self.inputs.save_plot) and self.inputs.save_plot:
bplot = True
import matplotlib
matplotlib.use(config.get('execution', 'matplotlib_backend'))
import matplotlib.pyplot as plt
TR = np.round(self.inputs.time_repetition * 1000) # in ms
if self.inputs.time_acquisition:
TA = np.round(self.inputs.time_acquisition * 1000) # in ms
else:
TA = TR # in ms
nvol = self.inputs.volumes_in_cluster
SCANONSET = np.round(self.inputs.scan_onset * 1000)
total_time = TR * (nscans - nvol) / nvol + TA * nvol + SCANONSET
SILENCE = TR - TA * nvol
dt = TA / 10.0
durations = np.round(np.array(i_durations) * 1000)
if len(durations) == 1:
durations = durations * np.ones((len(i_onsets)))
onsets = np.round(np.array(i_onsets) * 1000)
dttemp = gcd(TA, gcd(SILENCE, TR))
if dt < dttemp:
if dttemp % dt != 0:
dt = float(gcd(dttemp, dt))
if dt < 1:
raise Exception('Time multiple less than 1 ms')
iflogger.info('Setting dt = %d ms\n' % dt)
npts = int(np.ceil(total_time / dt))
times = np.arange(0, total_time, dt) * 1e-3
timeline = np.zeros((npts))
timeline2 = np.zeros((npts))
if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf:
hrf = spm_hrf(dt * 1e-3)
reg_scale = 1.0
if self.inputs.scale_regressors:
boxcar = np.zeros(int(50.0 * 1e3 / dt))
if self.inputs.stimuli_as_impulses:
boxcar[int(1.0 * 1e3 / dt)] = 1.0
reg_scale = float(TA / dt)
else:
boxcar[int(1.0 * 1e3 / dt):int(2.0 * 1e3 / dt)] = 1.0
if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf:
response = np.convolve(boxcar, hrf)
reg_scale = 1.0 / response.max()
iflogger.info('response sum: %.4f max: %.4f' % (response.sum(),
response.max()))
iflogger.info('reg_scale: %.4f' % reg_scale)
for i, t in enumerate(onsets):
idx = int(np.round(t / dt))
if i_amplitudes:
if len(i_amplitudes) > 1:
timeline2[idx] = i_amplitudes[i]
else:
timeline2[idx] = i_amplitudes[0]
else:
timeline2[idx] = 1
if bplot:
plt.subplot(4, 1, 1)
plt.plot(times, timeline2)
if not self.inputs.stimuli_as_impulses:
if durations[i] == 0:
durations[i] = TA * nvol
stimdur = np.ones((int(durations[i] / dt)))
timeline2 = np.convolve(timeline2, stimdur)[0:len(timeline2)]
timeline += timeline2
timeline2[:] = 0
if bplot:
plt.subplot(4, 1, 2)
plt.plot(times, timeline)
if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf:
timeline = np.convolve(timeline, hrf)[0:len(timeline)]
if isdefined(self.inputs.use_temporal_deriv) and \
self.inputs.use_temporal_deriv:
# create temporal deriv
timederiv = np.concatenate(([0], np.diff(timeline)))
if bplot:
plt.subplot(4, 1, 3)
plt.plot(times, timeline)
if isdefined(self.inputs.use_temporal_deriv) and \
self.inputs.use_temporal_deriv:
plt.plot(times, timederiv)
# sample timeline
timeline2 = np.zeros((npts))
reg = []
regderiv = []
for i, trial in enumerate(np.arange(nscans) / nvol):
scanstart = int((SCANONSET + trial * TR + (i % nvol) * TA) / dt)
scanidx = scanstart + np.arange(int(TA / dt))
timeline2[scanidx] = np.max(timeline)
reg.insert(i, np.mean(timeline[scanidx]) * reg_scale)
if isdefined(self.inputs.use_temporal_deriv) and \
self.inputs.use_temporal_deriv:
regderiv.insert(i, np.mean(timederiv[scanidx]) * reg_scale)
if isdefined(self.inputs.use_temporal_deriv) and \
self.inputs.use_temporal_deriv:
iflogger.info('orthoganlizing derivative w.r.t. main regressor')
regderiv = orth(reg, regderiv)
if bplot:
plt.subplot(4, 1, 3)
plt.plot(times, timeline2)
plt.subplot(4, 1, 4)
plt.bar(np.arange(len(reg)), reg, width=0.5)
plt.savefig('sparse.png')
plt.savefig('sparse.svg')
if regderiv:
return [reg, regderiv]
else:
return reg
def _cond_to_regress(self, info, nscans):
"""Converts condition information to full regressors
"""
reg = []
regnames = []
for i, cond in enumerate(info.conditions):
if hasattr(info, 'amplitudes') and info.amplitudes:
amplitudes = info.amplitudes[i]
else:
amplitudes = None
regnames.insert(len(regnames), cond)
scaled_onsets = scale_timings(info.onsets[i],
self.inputs.input_units,
'secs',
self.inputs.time_repetition)
scaled_durations = scale_timings(info.durations[i],
self.inputs.input_units,
'secs',
self.inputs.time_repetition)
regressor = self._gen_regress(scaled_onsets,
scaled_durations,
amplitudes,
nscans)
if isdefined(self.inputs.use_temporal_deriv) and \
self.inputs.use_temporal_deriv:
reg.insert(len(reg), regressor[0])
regnames.insert(len(regnames), cond + '_D')
reg.insert(len(reg), regressor[1])
else:
reg.insert(len(reg), regressor)
# need to deal with temporal and parametric modulators
# for sparse-clustered acquisitions enter T1-effect regressors
nvol = self.inputs.volumes_in_cluster
if nvol > 1:
for i in range(nvol - 1):
treg = np.zeros((nscans / nvol, nvol))
treg[:, i] = 1
reg.insert(len(reg), treg.ravel().tolist())
regnames.insert(len(regnames), 'T1effect_%d' % i)
return reg, regnames
def _generate_clustered_design(self, infolist):
"""Generates condition information for sparse-clustered
designs.
"""
infoout = deepcopy(infolist)
for i, info in enumerate(infolist):
infoout[i].conditions = None
infoout[i].onsets = None
infoout[i].durations = None
if info.conditions:
img = load(self.inputs.functional_runs[i], mmap=NUMPY_MMAP)
nscans = img.shape[3]
reg, regnames = self._cond_to_regress(info, nscans)
if hasattr(infoout[i], 'regressors') and infoout[i].regressors:
if not infoout[i].regressor_names:
infoout[i].regressor_names = \
['R%d' % j for j in range(len(infoout[i].regressors))]
else:
infoout[i].regressors = []
infoout[i].regressor_names = []
for j, r in enumerate(reg):
regidx = len(infoout[i].regressors)
infoout[i].regressor_names.insert(regidx, regnames[j])
infoout[i].regressors.insert(regidx, r)
return infoout
def _generate_design(self, infolist=None):
if isdefined(self.inputs.subject_info):
infolist = self.inputs.subject_info
else:
infolist = gen_info(self.inputs.event_files)
sparselist = self._generate_clustered_design(infolist)
super(SpecifySparseModel, self)._generate_design(infolist=sparselist)
def _list_outputs(self):
outputs = self._outputs().get()
if not hasattr(self, '_sessinfo'):
self._generate_design()
outputs['session_info'] = self._sessinfo
if isdefined(self.inputs.save_plot) and self.inputs.save_plot:
outputs['sparse_png_file'] = os.path.join(os.getcwd(), 'sparse.png')
outputs['sparse_svg_file'] = os.path.join(os.getcwd(), 'sparse.svg')
return outputs
| bsd-3-clause | -2,022,518,823,186,561,500 | 42.349078 | 102 | 0.52372 | false |
lovelysystems/lovely.pyrest | lovely/pyrest/rest.py | 1 | 7881 | import inspect
import logging
import venusian
from pyramid.config import predicates
log = logging.getLogger(__name__)
SERVICES = {}
def get_services():
return SERVICES.values()
class ViewMapper(object):
""" Mapper to pass request specific data to view method
"""
ROOT_ARRAY_KW_NAME = "items"
def __init__(self, **kw):
self.attr = kw.get('attr')
def __call__(self, view):
def wrapper(context, request):
def json_body(request):
""" Failsave function to access request.json_body
"""
try:
body = request.json_body
if isinstance(body, list):
return {self.ROOT_ARRAY_KW_NAME: body}
return body
except:
return {}
def mapply(func, request):
""" This function passes request.matchdict, request.params and
request.json_body as kwargs to the given function
"""
kw = {}
# add kwargs from matchdict
kw.update(request.matchdict)
# add kwargs from request params
kw.update(dict(request.params.items()))
# add kwargs from request body
kw.update(json_body(request))
return func(**kw)
inst = view(request)
meth = getattr(inst, self.attr)
return mapply(meth, request)
return wrapper
class BaseRouteNotFoundException(Exception):
""" A exception to indicate that the required base route was not found
Possible reasons for this exception:
- the route has not been defined
- the config has not been commited before calling config.scan()
"""
class RestService(object):
""" Decorator for REST API classes
@RestService('users')
class UserService(object):
def __init__(self, request):
self.request = request
@rpcmethod(route_suffix='/{id}', request_method='PUT')
def edit(self, id, data):
# code goes here
def includeme(config):
config.add_route('users', '/users', static=True)
"""
venusian = venusian
def reverse_engineer_route(self, route):
kw = {}
if route.factory:
kw['factory'] = route.factory
if route.pregenerator:
kw['pregenerator'] = route.pregenerator
def xhr(p):
kw['xhr'] = p.val
def path_info(p):
kw['path_info'] = p.val.pattern
def request_param(p):
kw['request_param'] = p.val[0]
def header(p):
kw['header'] = p.text().split(" ", 1)[-1]
def accept(p):
kw['accept'] = p.val
def custom_predicates(p):
if not 'custom_predicates' in kw:
kw['custom_predicates'] = []
kw['custom_predicates'].append(p.func)
def request_method(p):
kw['request_method'] = p.val[0]
predicate_map = {predicates.XHRPredicate: xhr,
predicates.PathInfoPredicate: path_info,
predicates.RequestParamPredicate: request_param,
predicates.HeaderPredicate: header,
predicates.AcceptPredicate: accept,
predicates.CustomPredicate: custom_predicates,
predicates.RequestMethodPredicate: request_method,
}
for p in route.predicates:
predicate_map[p.__class__](p)
return kw
def __init__(self, baseRouteName, **view_kwargs):
self.baseRouteName = baseRouteName
self.serviceName = None
self.view_kwargs = view_kwargs
# All methods of the services get registered here for sphinx autodoc
self.methods = []
def __call__(self, wrapped):
def callback(context, name, service):
config = context.config.with_package(info.module)
# load the base route to get it's resolved pattern
mapper = config.get_routes_mapper()
baseRoute = mapper.get_route(self.baseRouteName)
if baseRoute is None:
raise BaseRouteNotFoundException
# get default route arguments
route_defaults = self.reverse_engineer_route(baseRoute)
# get all rpcmethod decorated members
def isRESTMethod(obj):
return (inspect.ismethod(obj)
and (hasattr(obj, '__rpc_method_route__') or
hasattr(obj, '__rpc_method_view__'))
)
methods = inspect.getmembers(service, isRESTMethod)
# register the service
self.serviceName = '@'.join((self.baseRouteName, baseRoute.path))
SERVICES[self.serviceName] = self
self.description = service.__doc__
# if the module is used multiple times for documentation generation
# the service get registered a few times so reset methods here.
self.methods = []
# loop through all decorated methods and add a route and a view
# for it
for (methodName, method) in methods:
route_kw = {}
route_kw.update(route_defaults)
if hasattr(method, '__rpc_method_route__'):
route_kw.update(method.__rpc_method_route__)
# allow http method GET by default
if 'request_method' not in route_kw:
route_kw['request_method'] = 'GET'
view_kw = {}
view_kw.update(self.view_kwargs)
if hasattr(method, '__rpc_method_view__'):
view_kw.update(method.__rpc_method_view__)
route_name = ('.'.join((self.baseRouteName, methodName))
+ '@'
+ baseRoute.path
)
pattern = baseRoute.pattern + route_kw.pop('route_suffix', '')
# Register method
validator = None
if method.im_func.__name__ == 'validation_wrapper':
# index 2 of func_closure is the schema param of the
# validate method in the tuple, not accessible via keyword
validator = method.im_func.func_closure[2].cell_contents
self.methods.append(
(pattern, route_kw, view_kw, method, validator))
config.add_route(route_name, pattern, **route_kw)
config.add_view(view=service,
route_name=route_name,
attr=methodName,
mapper=ViewMapper,
renderer='json',
**view_kw)
log.debug('Adding REST method %s %s (%s)',
route_kw['request_method'], pattern, route_name)
info = self.venusian.attach(wrapped, callback, category='restservice',
depth=1)
return wrapped
def rpcmethod_route(context_factory=None, **kwargs):
""" Decorator to mark methods of classes decorated with `RestService`
as member of the REST Service
"""
def wrapper(f):
f.context_factory = context_factory
f.__rpc_method_route__ = kwargs
return f
return wrapper
def rpcmethod_view(context_factory=None, **kwargs):
""" Decorator to mark methods of classes decorated with `RestService`
as member of the REST Service
"""
def wrapper(f):
f.context_factory = context_factory
f.__rpc_method_view__ = kwargs
return f
return wrapper
| apache-2.0 | 8,094,006,089,928,319,000 | 34.340807 | 79 | 0.531024 | false |
gatoravi/svviz | src/svviz/kde.py | 1 | 3322 | #-------------------------------------------------------------------------------
#
# Define classes for (uni/multi)-variate kernel density estimation.
#
# Currently, only Gaussian kernels are implemented.
#
# Written by: Robert Kern
#
# Date: 2004-08-09
#
# Modified: 2005-02-10 by Robert Kern.
# Contributed to Scipy
# 2005-10-07 by Robert Kern.
# Some fixes to match the new scipy_core
#
# Copyright 2004-2005 by Enthought, Inc.
#
#-------------------------------------------------------------------------------
from __future__ import division
from numpy import atleast_2d, reshape, zeros, newaxis, dot, exp, pi, sqrt, power, sum, linalg
import numpy as np
class gaussian_kde(object):
def __init__(self, dataset):
self.dataset = atleast_2d(dataset)
if not self.dataset.size > 1:
raise ValueError("`dataset` input should have multiple elements.")
self.d, self.n = self.dataset.shape
self._compute_covariance()
def evaluate(self, points):
points = atleast_2d(points)
d, m = points.shape
if d != self.d:
if d == 1 and m == self.d:
# points was passed in as a row vector
points = reshape(points, (self.d, 1))
m = 1
else:
msg = "points have dimension %s, dataset has dimension %s" % (d,
self.d)
raise ValueError(msg)
result = zeros((m,), dtype=np.float)
if m >= self.n:
# there are more points than data, so loop over data
for i in range(self.n):
diff = self.dataset[:, i, newaxis] - points
tdiff = dot(self.inv_cov, diff)
energy = sum(diff*tdiff,axis=0) / 2.0
result = result + exp(-energy)
else:
# loop over points
for i in range(m):
diff = self.dataset - points[:, i, newaxis]
tdiff = dot(self.inv_cov, diff)
energy = sum(diff * tdiff, axis=0) / 2.0
result[i] = sum(exp(-energy), axis=0)
result = result / self._norm_factor
return result
__call__ = evaluate
def scotts_factor(self):
return power(self.n, -1./(self.d+4))
def _compute_covariance(self):
self.factor = self.scotts_factor()
# Cache covariance and inverse covariance of the data
if not hasattr(self, '_data_inv_cov'):
self._data_covariance = atleast_2d(np.cov(self.dataset, rowvar=1,
bias=False))
self._data_inv_cov = linalg.inv(self._data_covariance)
self.covariance = self._data_covariance * self.factor**2
self.inv_cov = self._data_inv_cov / self.factor**2
self._norm_factor = sqrt(linalg.det(2*pi*self.covariance)) * self.n
if __name__ == '__main__':
from biorpy import r
from scipy import stats
values = np.concatenate([np.random.normal(size=20), np.random.normal(loc=6, size=30)])
kde = stats.gaussian_kde(values)
x = np.linspace(-5,10, 50)
y = kde(x)
print y
r.plot(x, y, type="l", col="red")
kde2 = gaussian_kde(values)
y2 = kde2(x)
r.lines(x, y2, col="blue", lty=2)
raw_input("")
| mit | -4,350,426,905,358,045,000 | 29.477064 | 93 | 0.524985 | false |
wxgeo/geophar | wxgeometrie/sympy/crypto/tests/test_crypto.py | 5 | 11371 | from sympy.core import symbols
from sympy.core.compatibility import range
from sympy.crypto.crypto import (cycle_list,
encipher_shift, encipher_affine, encipher_substitution,
check_and_join, encipher_vigenere, decipher_vigenere,
encipher_hill, decipher_hill, encipher_bifid5, encipher_bifid6,
bifid5_square, bifid6_square, bifid5, bifid6, bifid10,
decipher_bifid5, decipher_bifid6, encipher_kid_rsa,
decipher_kid_rsa, kid_rsa_private_key, kid_rsa_public_key,
decipher_rsa, rsa_private_key, rsa_public_key, encipher_rsa,
lfsr_connection_polynomial, lfsr_autocorrelation, lfsr_sequence,
encode_morse, decode_morse, elgamal_private_key, elgamal_public_key,
encipher_elgamal, decipher_elgamal, dh_private_key, dh_public_key,
dh_shared_key, decipher_shift, decipher_affine, encipher_bifid,
decipher_bifid, bifid_square, padded_key, uniq, decipher_gm,
encipher_gm, gm_public_key, gm_private_key)
from sympy.matrices import Matrix
from sympy.ntheory import isprime, is_primitive_root
from sympy.polys.domains import FF
from sympy.utilities.pytest import raises, slow
from random import randrange
def test_cycle_list():
assert cycle_list(3, 4) == [3, 0, 1, 2]
assert cycle_list(-1, 4) == [3, 0, 1, 2]
assert cycle_list(1, 4) == [1, 2, 3, 0]
def test_encipher_shift():
assert encipher_shift("ABC", 0) == "ABC"
assert encipher_shift("ABC", 1) == "BCD"
assert encipher_shift("ABC", -1) == "ZAB"
assert decipher_shift("ZAB", -1) == "ABC"
def test_encipher_affine():
assert encipher_affine("ABC", (1, 0)) == "ABC"
assert encipher_affine("ABC", (1, 1)) == "BCD"
assert encipher_affine("ABC", (-1, 0)) == "AZY"
assert encipher_affine("ABC", (-1, 1), symbols="ABCD") == "BAD"
assert encipher_affine("123", (-1, 1), symbols="1234") == "214"
assert encipher_affine("ABC", (3, 16)) == "QTW"
assert decipher_affine("QTW", (3, 16)) == "ABC"
def test_encipher_substitution():
assert encipher_substitution("ABC", "BAC", "ABC") == "BAC"
assert encipher_substitution("123", "1243", "1234") == "124"
def test_check_and_join():
assert check_and_join("abc") == "abc"
assert check_and_join(uniq("aaabc")) == "abc"
assert check_and_join("ab c".split()) == "abc"
assert check_and_join("abc", "a", filter=True) == "a"
raises(ValueError, lambda: check_and_join('ab', 'a'))
def test_encipher_vigenere():
assert encipher_vigenere("ABC", "ABC") == "ACE"
assert encipher_vigenere("ABC", "ABC", symbols="ABCD") == "ACA"
assert encipher_vigenere("ABC", "AB", symbols="ABCD") == "ACC"
assert encipher_vigenere("AB", "ABC", symbols="ABCD") == "AC"
assert encipher_vigenere("A", "ABC", symbols="ABCD") == "A"
def test_decipher_vigenere():
assert decipher_vigenere("ABC", "ABC") == "AAA"
assert decipher_vigenere("ABC", "ABC", symbols="ABCD") == "AAA"
assert decipher_vigenere("ABC", "AB", symbols="ABCD") == "AAC"
assert decipher_vigenere("AB", "ABC", symbols="ABCD") == "AA"
assert decipher_vigenere("A", "ABC", symbols="ABCD") == "A"
def test_encipher_hill():
A = Matrix(2, 2, [1, 2, 3, 5])
assert encipher_hill("ABCD", A) == "CFIV"
A = Matrix(2, 2, [1, 0, 0, 1])
assert encipher_hill("ABCD", A) == "ABCD"
assert encipher_hill("ABCD", A, symbols="ABCD") == "ABCD"
A = Matrix(2, 2, [1, 2, 3, 5])
assert encipher_hill("ABCD", A, symbols="ABCD") == "CBAB"
assert encipher_hill("AB", A, symbols="ABCD") == "CB"
# message length, n, does not need to be a multiple of k;
# it is padded
assert encipher_hill("ABA", A) == "CFGC"
assert encipher_hill("ABA", A, pad="Z") == "CFYV"
def test_decipher_hill():
A = Matrix(2, 2, [1, 2, 3, 5])
assert decipher_hill("CFIV", A) == "ABCD"
A = Matrix(2, 2, [1, 0, 0, 1])
assert decipher_hill("ABCD", A) == "ABCD"
assert decipher_hill("ABCD", A, symbols="ABCD") == "ABCD"
A = Matrix(2, 2, [1, 2, 3, 5])
assert decipher_hill("CBAB", A, symbols="ABCD") == "ABCD"
assert decipher_hill("CB", A, symbols="ABCD") == "AB"
# n does not need to be a multiple of k
assert decipher_hill("CFA", A) == "ABAA"
def test_encipher_bifid5():
assert encipher_bifid5("AB", "AB") == "AB"
assert encipher_bifid5("AB", "CD") == "CO"
assert encipher_bifid5("ab", "c") == "CH"
assert encipher_bifid5("a bc", "b") == "BAC"
def test_bifid5_square():
A = bifid5
f = lambda i, j: symbols(A[5*i + j])
M = Matrix(5, 5, f)
assert bifid5_square("") == M
def test_decipher_bifid5():
assert decipher_bifid5("AB", "AB") == "AB"
assert decipher_bifid5("CO", "CD") == "AB"
assert decipher_bifid5("ch", "c") == "AB"
assert decipher_bifid5("b ac", "b") == "ABC"
def test_encipher_bifid6():
assert encipher_bifid6("AB", "AB") == "AB"
assert encipher_bifid6("AB", "CD") == "CP"
assert encipher_bifid6("ab", "c") == "CI"
assert encipher_bifid6("a bc", "b") == "BAC"
def test_decipher_bifid6():
assert decipher_bifid6("AB", "AB") == "AB"
assert decipher_bifid6("CP", "CD") == "AB"
assert decipher_bifid6("ci", "c") == "AB"
assert decipher_bifid6("b ac", "b") == "ABC"
def test_bifid6_square():
A = bifid6
f = lambda i, j: symbols(A[6*i + j])
M = Matrix(6, 6, f)
assert bifid6_square("") == M
def test_rsa_public_key():
assert rsa_public_key(2, 2, 1) == (4, 1)
assert rsa_public_key(2, 3, 1) == (6, 1)
assert rsa_public_key(5, 3, 3) == (15, 3)
assert rsa_public_key(8, 8, 8) is False
def test_rsa_private_key():
assert rsa_private_key(2, 2, 1) == (4, 1)
assert rsa_private_key(2, 3, 1) == (6, 1)
assert rsa_private_key(5, 3, 3) == (15, 3)
assert rsa_private_key(23,29,5) == (667,493)
assert rsa_private_key(8, 8, 8) is False
def test_encipher_rsa():
puk = rsa_public_key(2, 2, 1)
assert encipher_rsa(2, puk) == 2
puk = rsa_public_key(2, 3, 1)
assert encipher_rsa(2, puk) == 2
puk = rsa_public_key(5, 3, 3)
assert encipher_rsa(2, puk) == 8
def test_decipher_rsa():
prk = rsa_private_key(2, 2, 1)
assert decipher_rsa(2, prk) == 2
prk = rsa_private_key(2, 3, 1)
assert decipher_rsa(2, prk) == 2
prk = rsa_private_key(5, 3, 3)
assert decipher_rsa(8, prk) == 2
def test_kid_rsa_public_key():
assert kid_rsa_public_key(1, 2, 1, 1) == (5, 2)
assert kid_rsa_public_key(1, 2, 2, 1) == (8, 3)
assert kid_rsa_public_key(1, 2, 1, 2) == (7, 2)
def test_kid_rsa_private_key():
assert kid_rsa_private_key(1, 2, 1, 1) == (5, 3)
assert kid_rsa_private_key(1, 2, 2, 1) == (8, 3)
assert kid_rsa_private_key(1, 2, 1, 2) == (7, 4)
def test_encipher_kid_rsa():
assert encipher_kid_rsa(1, (5, 2)) == 2
assert encipher_kid_rsa(1, (8, 3)) == 3
assert encipher_kid_rsa(1, (7, 2)) == 2
def test_decipher_kid_rsa():
assert decipher_kid_rsa(2, (5, 3)) == 1
assert decipher_kid_rsa(3, (8, 3)) == 1
assert decipher_kid_rsa(2, (7, 4)) == 1
def test_encode_morse():
assert encode_morse('ABC') == '.-|-...|-.-.'
assert encode_morse('SMS ') == '...|--|...||'
assert encode_morse('SMS\n') == '...|--|...||'
assert encode_morse('') == ''
assert encode_morse(' ') == '||'
assert encode_morse(' ', sep='`') == '``'
assert encode_morse(' ', sep='``') == '````'
assert encode_morse('!@#$%^&*()_+') == '-.-.--|.--.-.|...-..-|-.--.|-.--.-|..--.-|.-.-.'
def test_decode_morse():
assert decode_morse('-.-|.|-.--') == 'KEY'
assert decode_morse('.-.|..-|-.||') == 'RUN'
raises(KeyError, lambda: decode_morse('.....----'))
def test_lfsr_sequence():
raises(TypeError, lambda: lfsr_sequence(1, [1], 1))
raises(TypeError, lambda: lfsr_sequence([1], 1, 1))
F = FF(2)
assert lfsr_sequence([F(1)], [F(1)], 2) == [F(1), F(1)]
assert lfsr_sequence([F(0)], [F(1)], 2) == [F(1), F(0)]
F = FF(3)
assert lfsr_sequence([F(1)], [F(1)], 2) == [F(1), F(1)]
assert lfsr_sequence([F(0)], [F(2)], 2) == [F(2), F(0)]
assert lfsr_sequence([F(1)], [F(2)], 2) == [F(2), F(2)]
def test_lfsr_autocorrelation():
raises(TypeError, lambda: lfsr_autocorrelation(1, 2, 3))
F = FF(2)
s = lfsr_sequence([F(1), F(0)], [F(0), F(1)], 5)
assert lfsr_autocorrelation(s, 2, 0) == 1
assert lfsr_autocorrelation(s, 2, 1) == -1
def test_lfsr_connection_polynomial():
F = FF(2)
x = symbols("x")
s = lfsr_sequence([F(1), F(0)], [F(0), F(1)], 5)
assert lfsr_connection_polynomial(s) == x**2 + 1
s = lfsr_sequence([F(1), F(1)], [F(0), F(1)], 5)
assert lfsr_connection_polynomial(s) == x**2 + x + 1
def test_elgamal_private_key():
a, b, _ = elgamal_private_key(digit=100)
assert isprime(a)
assert is_primitive_root(b, a)
assert len(bin(a)) >= 102
def test_elgamal():
dk = elgamal_private_key(5)
ek = elgamal_public_key(dk)
P = ek[0]
assert P - 1 == decipher_elgamal(encipher_elgamal(P - 1, ek), dk)
raises(ValueError, lambda: encipher_elgamal(P, dk))
raises(ValueError, lambda: encipher_elgamal(-1, dk))
def test_dh_private_key():
p, g, _ = dh_private_key(digit = 100)
assert isprime(p)
assert is_primitive_root(g, p)
assert len(bin(p)) >= 102
def test_dh_public_key():
p1, g1, a = dh_private_key(digit = 100)
p2, g2, ga = dh_public_key((p1, g1, a))
assert p1 == p2
assert g1 == g2
assert ga == pow(g1, a, p1)
def test_dh_shared_key():
prk = dh_private_key(digit = 100)
p, _, ga = dh_public_key(prk)
b = randrange(2, p)
sk = dh_shared_key((p, _, ga), b)
assert sk == pow(ga, b, p)
raises(ValueError, lambda: dh_shared_key((1031, 14, 565), 2000))
def test_padded_key():
assert padded_key('b', 'ab') == 'ba'
raises(ValueError, lambda: padded_key('ab', 'ace'))
raises(ValueError, lambda: padded_key('ab', 'abba'))
def test_bifid():
raises(ValueError, lambda: encipher_bifid('abc', 'b', 'abcde'))
assert encipher_bifid('abc', 'b', 'abcd') == 'bdb'
raises(ValueError, lambda: decipher_bifid('bdb', 'b', 'abcde'))
assert encipher_bifid('bdb', 'b', 'abcd') == 'abc'
raises(ValueError, lambda: bifid_square('abcde'))
assert bifid5_square("B") == \
bifid5_square('BACDEFGHIKLMNOPQRSTUVWXYZ')
assert bifid6_square('B0') == \
bifid6_square('B0ACDEFGHIJKLMNOPQRSTUVWXYZ123456789')
def test_encipher_decipher_gm():
ps = [131, 137, 139, 149, 151, 157, 163, 167,
173, 179, 181, 191, 193, 197, 199]
qs = [89, 97, 101, 103, 107, 109, 113, 127,
131, 137, 139, 149, 151, 157, 47]
messages = [
0, 32855, 34303, 14805, 1280, 75859, 38368,
724, 60356, 51675, 76697, 61854, 18661,
]
for p, q in zip(ps, qs):
pri = gm_private_key(p, q)
for msg in messages:
pub = gm_public_key(p, q)
enc = encipher_gm(msg, pub)
dec = decipher_gm(enc, pri)
assert dec == msg
def test_gm_private_key():
raises(ValueError, lambda: gm_public_key(13, 15))
raises(ValueError, lambda: gm_public_key(0, 0))
raises(ValueError, lambda: gm_public_key(0, 5))
assert 17, 19 == gm_public_key(17, 19)
def test_gm_public_key():
assert 323 == gm_public_key(17, 19)[1]
assert 15 == gm_public_key(3, 5)[1]
raises(ValueError, lambda: gm_public_key(15, 19))
| gpl-2.0 | -7,391,570,844,300,586,000 | 32.74184 | 92 | 0.58403 | false |
genixpro/formencode | setup.py | 1 | 1696 | """FormEncode validates and converts nested structures.
It allows for a declarative form of defining the validation,
and decoupled processes for filling and generating forms.
The official repo is at GitHub: https://github.com/formencode/formencode
"""
import sys
from setuptools import setup, find_packages
version = '1.3.0'
if not '2.6' <= sys.version < '3.0' and not '3.2' <= sys.version:
raise ImportError('Python version not supported')
tests_require = ['nose', 'pycountry',
'dnspython' if sys.version < '3.0' else 'dnspython3']
doctests = ['docs/htmlfill.txt', 'docs/Validator.txt',
'formencode/tests/non_empty.txt']
setup(name='FormEncode',
version=version,
# requires_python='>=2.6,!=3.0,!=3.1', # PEP345
description="HTML form validation, generation, and conversion package",
long_description=__doc__,
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Libraries :: Python Modules",
],
author='Ian Bicking',
author_email='[email protected]',
url='http://formencode.org',
license='PSF',
zip_safe=False,
packages=find_packages(),
include_package_data=True,
package_data={'formencode': ['../docs/*.txt']},
test_suite='formencode.tests',
tests_require=tests_require,
extras_require={'testing': tests_require},
use_2to3=True,
convert_2to3_doctests=doctests,
)
| mit | 4,995,181,731,740,607,000 | 32.92 | 77 | 0.635613 | false |
kevinlee12/oppia | core/controllers/suggestion_test.py | 1 | 56805 | # coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for suggestion controllers."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from constants import constants
from core.domain import exp_domain
from core.domain import exp_fetchers
from core.domain import exp_services
from core.domain import feedback_services
from core.domain import question_domain
from core.domain import question_services
from core.domain import rights_manager
from core.domain import skill_services
from core.domain import state_domain
from core.domain import story_domain
from core.domain import story_services
from core.domain import suggestion_services
from core.domain import topic_domain
from core.domain import topic_services
from core.domain import user_services
from core.platform import models
from core.tests import test_utils
import feconf
(suggestion_models, feedback_models) = models.Registry.import_models([
models.NAMES.suggestion, models.NAMES.feedback])
class SuggestionUnitTests(test_utils.GenericTestBase):
EXP_ID = 'exp1'
TRANSLATION_LANGUAGE_CODE = 'en'
AUTHOR_EMAIL = '[email protected]'
AUTHOR_EMAIL_2 = '[email protected]'
REVIEWER_EMAIL = '[email protected]'
TRANSLATOR_EMAIL = '[email protected]'
NORMAL_USER_EMAIL = '[email protected]'
def setUp(self):
super(SuggestionUnitTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.signup(self.AUTHOR_EMAIL_2, 'author2')
self.signup(self.NORMAL_USER_EMAIL, 'normalUser')
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.signup(self.TRANSLATOR_EMAIL, 'translator')
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.author_id_2 = self.get_user_id_from_email(self.AUTHOR_EMAIL_2)
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.translator_id = self.get_user_id_from_email(self.TRANSLATOR_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
user_services.allow_user_to_review_translation_in_language(
self.reviewer_id, 'hi')
self.editor = user_services.UserActionsInfo(self.editor_id)
# Login and create exploration and suggestions.
self.login(self.EDITOR_EMAIL)
exploration = (
self.save_new_linear_exp_with_state_names_and_interactions(
self.EXP_ID, self.editor_id, ['State 1', 'State 2', 'State 3'],
['TextInput'], category='Algebra'))
self.old_content = state_domain.SubtitledHtml(
'content', '<p>old content html</p>').to_dict()
exploration.states['State 1'].update_content(
state_domain.SubtitledHtml.from_dict(self.old_content))
exploration.states['State 2'].update_content(
state_domain.SubtitledHtml.from_dict(self.old_content))
exploration.states['State 3'].update_content(
state_domain.SubtitledHtml.from_dict(self.old_content))
exp_services._save_exploration(self.editor_id, exploration, '', []) # pylint: disable=protected-access
rights_manager.publish_exploration(self.editor, self.EXP_ID)
rights_manager.assign_role_for_exploration(
self.editor, self.EXP_ID, self.owner_id,
rights_manager.ROLE_EDITOR)
self.new_content = state_domain.SubtitledHtml(
'content', '<p>new content html</p>').to_dict()
self.resubmit_change_content = state_domain.SubtitledHtml(
'content', '<p>resubmit change content html</p>').to_dict()
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models
.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
suggestion_models.TARGET_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'old_value': self.old_content,
'new_value': self.new_content
},
'description': 'change to state 1',
}, csrf_token=csrf_token)
self.logout()
self.login(self.AUTHOR_EMAIL_2)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models
.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
suggestion_models.TARGET_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 2',
'old_value': self.old_content,
'new_value': self.new_content
},
'description': 'change to state 2',
}, csrf_token=csrf_token)
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models
.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
suggestion_models.TARGET_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 3',
'old_value': self.old_content,
'new_value': self.new_content
},
'description': 'change to state 3',
}, csrf_token=csrf_token)
self.logout()
self.login(self.TRANSLATOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': suggestion_models.TARGET_TYPE_EXPLORATION,
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'State 3',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>old content html</p>',
'translation_html': '<p>In Hindi</p>'
},
'description': 'change to state 3',
}, csrf_token=csrf_token)
self.logout()
def test_create_suggestion(self):
self.login(self.AUTHOR_EMAIL_2)
csrf_token = self.get_new_csrf_token()
exploration = exp_fetchers.get_exploration_by_id(self.EXP_ID)
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models
.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
suggestion_models.TARGET_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 3',
'new_value': self.new_content
},
'description': 'change again to state 3',
}, csrf_token=csrf_token)
suggestions = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions']
self.assertEqual(len(suggestions), 3)
self.logout()
def test_create_suggestion_invalid_target_version_input(self):
self.login(self.AUTHOR_EMAIL_2)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models
.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
suggestion_models.TARGET_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': 'invalid target version',
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 3',
'new_value': self.new_content
},
'description': 'change again to state 3',
}, csrf_token=csrf_token, expected_status_int=400)
suggestions = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions']
self.assertEqual(
response['error'],
'Expected target_version_at_submission to be an int, received <type'
' \'unicode\'>')
self.assertEqual(len(suggestions), 2)
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_suggestion_id(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
# Invalid format of suggestion id.
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'], 'invalid_suggestion_id'), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'Invalid format for suggestion_id. It must contain 3 parts '
'separated by \'.\'')
csrf_token = self.get_new_csrf_token()
# Suggestion does not exist.
self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
'exploration.target_id.id'), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token,
expected_status_int=404)
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_target_type(self):
self.login(self.EDITOR_EMAIL)
question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_id']
}
exp_id = 'new_exp_id'
self.save_new_default_exploration(exp_id, self.editor_id)
suggestion_services.create_suggestion(
suggestion_models.SUGGESTION_TYPE_ADD_QUESTION,
suggestion_models.TARGET_TYPE_TOPIC, exp_id, 1,
self.author_id, {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': question_dict,
'skill_id': None,
'skill_difficulty': 0.3
}, None)
suggestion_id = suggestion_services.query_suggestions(
[('author_id', self.author_id), (
'target_id', exp_id)])[0].suggestion_id
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, exp_id,
suggestion_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'This handler allows actions only on suggestions to explorations.')
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_target_id(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
self.save_new_default_exploration('exp_id', self.editor_id)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, 'exp_id',
suggestion_to_accept['suggestion_id']), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'The exploration id provided does not match the exploration id '
'present as part of the suggestion_id')
self.logout()
def test_owner_of_exploration_cannot_repond_to_own_suggestion(self):
self.login(self.EDITOR_EMAIL)
exp_id = 'new_exp_id'
self.save_new_default_exploration(exp_id, self.editor_id)
new_content = state_domain.SubtitledHtml(
'content', '<p>new content html</p>').to_dict()
change_cmd = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': new_content
}
suggestion_services.create_suggestion(
suggestion_models.SUGGESTION_TYPE_EDIT_STATE_CONTENT,
suggestion_models.TARGET_TYPE_EXPLORATION, exp_id, 1,
self.editor_id, change_cmd, 'sample description')
suggestion_id = suggestion_services.query_suggestions(
[('author_id', self.editor_id), (
'target_id', exp_id)])[0].suggestion_id
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
exp_id, suggestion_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=401)
self.assertEqual(
response['error'], 'You cannot accept/reject your own suggestion.')
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_action(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']),
{'action': 'invalid_action'}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid action.')
self.logout()
def test_reject_suggestion_to_exploration(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_reject = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_reject['target_id'],
suggestion_to_reject['suggestion_id']), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_REJECTED)
self.logout()
def test_accept_suggestion(self):
exploration = exp_fetchers.get_exploration_by_id(self.EXP_ID)
# Test editor can accept successfully.
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
exploration = exp_fetchers.get_exploration_by_id(self.EXP_ID)
self.assertEqual(
exploration.states[suggestion_to_accept[
'change']['state_name']].content.html,
suggestion_to_accept['change']['new_value']['html'])
self.logout()
# Testing user without permissions cannot accept.
self.login(self.NORMAL_USER_EMAIL)
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token, expected_status_int=401)
self.logout()
# Testing that author cannot accept own suggestion.
self.login(self.AUTHOR_EMAIL_2)
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token, expected_status_int=401)
# Testing users with scores above threshold can accept.
self.login(self.AUTHOR_EMAIL)
suggestion_services.increment_score_for_user(
self.author_id, 'content.Algebra', 15)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
self.logout()
# Testing admins can accept suggestions.
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][1]
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][1]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
self.logout()
def test_suggestion_list_handler_with_invalid_query_field(self):
response = self.get_json(
'%s?invalid_query_field=value' % (
feconf.SUGGESTION_LIST_URL_PREFIX), expected_status_int=400)
self.assertEqual(
response['error'],
'Not allowed to query on field invalid_query_field')
def test_suggestion_list_handler(self):
suggestions = self.get_json(
'%s?author_id=%s&target_type=%s&target_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX, self.author_id_2,
suggestion_models.TARGET_TYPE_EXPLORATION, self.EXP_ID)
)['suggestions']
self.assertEqual(len(suggestions), 2)
def test_cannot_resubmit_suggestion_with_invalid_suggestion_id(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/resubmit/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, 'invalid_suggestion_id'), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'No suggestion found with given suggestion id')
def test_resubmit_rejected_suggestion(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion = suggestion_services.query_suggestions(
[('author_id', self.author_id), ('target_id', self.EXP_ID)])[0]
suggestion_services.reject_suggestion(
suggestion, self.reviewer_id, 'reject message')
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/resubmit/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, suggestion.suggestion_id), {
'summary_message': 'summary message',
'action': u'resubmit',
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': self.resubmit_change_content,
'old_value': self.old_content
}
}, csrf_token=csrf_token)
suggestion = suggestion_services.query_suggestions(
[('author_id', self.author_id), ('target_id', self.EXP_ID)])[0]
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(
suggestion.change.new_value['html'],
self.resubmit_change_content['html'])
self.assertEqual(
suggestion.change.cmd, exp_domain.CMD_EDIT_STATE_PROPERTY)
self.assertEqual(
suggestion.change.property_name, exp_domain.STATE_PROPERTY_CONTENT)
self.assertEqual(
suggestion.change.state_name, 'State 1')
self.logout()
def test_translation_accept_suggestion_by_reviewer(self):
# Test reviewer can accept successfully.
self.login(self.REVIEWER_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.translator_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.translator_id))['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
self.logout()
class QuestionSuggestionTests(test_utils.GenericTestBase):
AUTHOR_EMAIL = '[email protected]'
AUTHOR_EMAIL_2 = '[email protected]'
# Needs to be 12 characters long.
SKILL_ID = 'skill1234567'
SKILL_DESCRIPTION = 'skill to link question to'
def setUp(self):
super(QuestionSuggestionTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.save_new_skill(
self.SKILL_ID, self.admin_id, description=self.SKILL_DESCRIPTION)
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.SKILL_ID]
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': suggestion_models.TARGET_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.SKILL_ID,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_query_question_suggestions(self):
suggestions = self.get_json(
'%s?suggestion_type=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
suggestion_models.SUGGESTION_TYPE_ADD_QUESTION)
)['suggestions']
self.assertEqual(len(suggestions), 1)
suggestion = suggestions[0]
self.assertEqual(
suggestion['suggestion_type'],
suggestion_models.SUGGESTION_TYPE_ADD_QUESTION)
self.assertEqual(suggestion['target_id'], self.SKILL_ID)
self.assertEqual(
suggestion['target_type'], suggestion_models.TARGET_TYPE_SKILL)
self.assertEqual(
suggestion['change']['cmd'],
question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION)
def test_accept_question_suggestion(self):
suggestion_to_accept = self.get_json(
'%s?suggestion_type=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
suggestion_models.SUGGESTION_TYPE_ADD_QUESTION)
)['suggestions'][0]
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'This looks good!',
'skill_id': self.SKILL_ID
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?suggestion_type=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
suggestion_models.SUGGESTION_TYPE_ADD_QUESTION)
)['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
(
questions, merged_question_skill_links, _) = (
question_services.get_displayable_question_skill_link_details(
1, [self.SKILL_ID], ''))
self.assertEqual(len(questions), 1)
self.assertEqual(
merged_question_skill_links[0].skill_descriptions,
[self.SKILL_DESCRIPTION])
self.assertEqual(
merged_question_skill_links[0].skill_difficulties, [0.3])
self.assertEqual(
questions[0].question_content,
self.question_dict['question_state_data']['content']['html']
)
thread_messages = feedback_services.get_messages(
suggestion_to_accept['suggestion_id'])
last_message = thread_messages[len(thread_messages) - 1]
self.assertEqual(last_message.text, 'This looks good!')
class SkillSuggestionTests(test_utils.GenericTestBase):
AUTHOR_EMAIL = '[email protected]'
REVIEWER_EMAIL = '[email protected]'
def setUp(self):
super(SkillSuggestionTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
user_services.allow_user_to_review_question(self.reviewer_id)
self.skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
self.skill_id, self.admin_id, description='Description')
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.skill_id]
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': suggestion_models.TARGET_TYPE_SKILL,
'target_id': self.skill_id,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.skill_id,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_cannot_access_suggestion_to_skill_handler(self):
self.login(self.ADMIN_EMAIL)
thread_id = feedback_services.create_thread(
suggestion_models.TARGET_TYPE_QUESTION, self.skill_id,
self.author_id, 'description', '', has_suggestion=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, self.skill_id,
thread_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.logout()
def test_suggestion_to_skill_handler_with_invalid_target_type(self):
self.login(self.ADMIN_EMAIL)
exp_id = 'new_exp_id'
self.save_new_default_exploration(exp_id, self.admin_id)
new_content = state_domain.SubtitledHtml(
'content', '<p>new content html</p>').to_dict()
change_cmd = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': new_content
}
suggestion_services.create_suggestion(
suggestion_models.SUGGESTION_TYPE_EDIT_STATE_CONTENT,
suggestion_models.TARGET_TYPE_EXPLORATION, exp_id, 1,
self.author_id, change_cmd, 'sample description')
suggestion_id = suggestion_services.query_suggestions(
[('author_id', self.author_id), (
'target_id', exp_id)])[0].suggestion_id
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
response = self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
self.skill_id, suggestion_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'This handler allows actions only on suggestions to skills.')
self.logout()
def test_suggestion_to_skill_handler_with_invalid_target_id(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
response = self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
'skill_id', suggestion_to_accept['suggestion_id']),
{
'action': u'reject',
'review_message': u'Rejected!'
},
csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'The skill id provided does not match the skill id '
'present as part of the suggestion_id')
self.logout()
def test_suggestion_to_skill_handler_with_invalid_action(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
response = self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']),
{'action': 'invalid_action'}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid action.')
self.logout()
def test_reject_suggestion_to_skill(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_reject = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_reject['target_id'],
suggestion_to_reject['suggestion_id']), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_REJECTED)
self.logout()
def test_accept_suggestion_to_skill(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted!',
'skill_id': self.skill_id
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_ACCEPTED)
self.logout()
def test_reviewer_accept_suggestion_to_skill(self):
self.login(self.REVIEWER_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted!',
'skill_id': self.skill_id
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_ACCEPTED)
self.logout()
class UserSubmittedSuggestionsHandlerTest(test_utils.GenericTestBase):
"""Unit test for the UserSubmittedSuggestionsHandler."""
AUTHOR_EMAIL = '[email protected]'
def setUp(self):
super(UserSubmittedSuggestionsHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.TOPIC_ID = 'topic'
self.STORY_ID = 'story'
self.EXP_ID = 'exp1'
# Needs to be 12 characters long.
self.SKILL_ID = 'skill1234567'
self.SKILL_DESCRIPTION = 'skill to link question to'
exploration = self.save_new_valid_exploration(
self.EXP_ID, self.owner_id, title='Exploration title',
category='Algebra', end_state_name='End State')
self.publish_exploration(self.owner_id, self.EXP_ID)
topic = topic_domain.Topic.create_default_topic(
self.TOPIC_ID, 'topic', 'abbrev', 'description')
topic_services.save_new_topic(self.owner_id, topic)
story = story_domain.Story.create_default_story(
self.STORY_ID, 'A story', 'Description', self.TOPIC_ID, 'story-a')
story_services.save_new_story(self.owner_id, story)
topic_services.add_canonical_story(
self.owner_id, self.TOPIC_ID, self.STORY_ID)
story_services.update_story(
self.owner_id, self.STORY_ID, [story_domain.StoryChange({
'cmd': 'add_story_node',
'node_id': 'node_1',
'title': 'Node1',
}), story_domain.StoryChange({
'cmd': 'update_story_node_property',
'property_name': 'exploration_id',
'node_id': 'node_1',
'old_value': None,
'new_value': self.EXP_ID
})], 'Changes.')
self.save_new_skill(
self.SKILL_ID, self.owner_id, description=self.SKILL_DESCRIPTION)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.reviewer_id = self.editor_id
self.set_admins([self.ADMIN_USERNAME])
self.editor = user_services.UserActionsInfo(self.editor_id)
# Login and create exploration and suggestions.
self.login(self.EDITOR_EMAIL)
exp_services.update_exploration(
self.owner_id, self.EXP_ID, [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'Introduction',
'new_value': {
'content_id': 'content',
'html': '<p>new content html</p>'
}
})], 'Add content')
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': (suggestion_models.TARGET_TYPE_EXPLORATION),
'target_id': self.EXP_ID,
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>new content html</p>',
'translation_html': '<p>new content html in Hindi</p>'
},
'description': 'Adds translation',
}, csrf_token=csrf_token)
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.SKILL_ID]
}
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': suggestion_models.TARGET_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': None,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_exploration_handler_returns_data(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getsubmittedsuggestions/topic/translate_content')
self.assertEqual(response, {})
def test_skill_handler_returns_data(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/skill/add_question')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getsubmittedsuggestions/topic/add_question')
self.assertEqual(response, {})
def test_handler_with_invalid_suggestion_type_raise_error(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getsubmittedsuggestions/exploration/invalid_suggestion_type',
expected_status_int=400)
def test_handler_with_invalid_target_type_raise_error(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getsubmittedsuggestions/invalid_target_type'
'/translate_content', expected_status_int=400)
class ReviewableSuggestionsHandlerTest(test_utils.GenericTestBase):
"""Unit test for the ReviewableSuggestionsHandler."""
def setUp(self):
super(ReviewableSuggestionsHandlerTest, self).setUp()
self.AUTHOR_EMAIL = '[email protected]'
self.REVIEWER_EMAIL = '[email protected]'
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.TOPIC_ID = 'topic'
self.STORY_ID = 'story'
self.EXP_ID = 'exp1'
# Needs to be 12 characters long.
self.SKILL_ID = 'skill1234567'
self.SKILL_DESCRIPTION = 'skill to link question to'
exploration = self.save_new_valid_exploration(
self.EXP_ID, self.owner_id, title='Exploration title',
category='Algebra', end_state_name='End State')
self.publish_exploration(self.owner_id, self.EXP_ID)
topic = topic_domain.Topic.create_default_topic(
self.TOPIC_ID, 'topic', 'abbrev', 'description')
topic_services.save_new_topic(self.owner_id, topic)
story = story_domain.Story.create_default_story(
self.STORY_ID, 'A story', 'Description', self.TOPIC_ID, 'story-b')
story_services.save_new_story(self.owner_id, story)
topic_services.add_canonical_story(
self.owner_id, self.TOPIC_ID, self.STORY_ID)
story_services.update_story(
self.owner_id, self.STORY_ID, [story_domain.StoryChange({
'cmd': 'add_story_node',
'node_id': 'node_1',
'title': 'Node1',
}), story_domain.StoryChange({
'cmd': 'update_story_node_property',
'property_name': 'exploration_id',
'node_id': 'node_1',
'old_value': None,
'new_value': self.EXP_ID
})], 'Changes.')
self.save_new_skill(
self.SKILL_ID, self.owner_id, description=self.SKILL_DESCRIPTION)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.editor = user_services.UserActionsInfo(self.editor_id)
user_services.allow_user_to_review_question(self.reviewer_id)
user_services.allow_user_to_review_translation_in_language(
self.reviewer_id, 'hi')
# Login and update exploration and suggestions.
self.login(self.EDITOR_EMAIL)
exp_services.update_exploration(
self.owner_id, self.EXP_ID, [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'Introduction',
'new_value': {
'content_id': 'content',
'html': '<p>new content html</p>'
}
})], 'Add content')
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': (suggestion_models.TARGET_TYPE_EXPLORATION),
'target_id': self.EXP_ID,
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>new content html</p>',
'translation_html': '<p>new content html in Hindi</p>'
},
'description': 'Adds translation',
}, csrf_token=csrf_token)
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.SKILL_ID]
}
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
suggestion_models.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': suggestion_models.TARGET_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': None,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_exploration_handler_returns_data(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getreviewablesuggestions/topic/translate_content')
self.assertEqual(response, {})
def test_skill_handler_returns_data(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/skill/add_question')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getreviewablesuggestions/topic/add_question')
self.assertEqual(response, {})
def test_handler_with_invalid_suggestion_type_raise_error(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getreviewablesuggestions/exploration/invalid_suggestion_type',
expected_status_int=404)
def test_handler_with_invalid_target_type_raise_error(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getreviewablesuggestions/invalid_target_type'
'/translate_content', expected_status_int=400)
| apache-2.0 | -9,033,852,202,733,182,000 | 39.430605 | 111 | 0.565056 | false |
grupoirona/django-date-validators | test_project/test_project/settings.py | 1 | 2095 | import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = '6+dqad9^b51rix$3hc#rdn9@%6uhat+@$9udx^yh=j-1+8+2n*'
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_date_validators'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'test_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'test_project.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
| isc | 8,153,460,069,941,095,000 | 23.360465 | 91 | 0.64821 | false |
abhaystoic/barati | barati/vendors/views_cluster/list_product.py | 1 | 1399 | from django.shortcuts import render
from django.template import RequestContext
from django.shortcuts import render, render_to_response
from django.views.generic import View
from django.http import HttpResponse
from customers.models import Users as users
from customers.models import Vendors as vendors
from customers.models import Orders as orders
from customers.models import Address, Venue_Types, Card_Types, Beautician_Types
import sys, json
class List_Product(View):
try:
template_name = 'vendors/list_product.html'
def get(self, request):
context_dict = {}
orders_list = []
user = users.objects.get(username=request.user.username)
#Allow only admin and vendors to see the vendor pages otherwise redirect to the customer index page
if user.role == 'customer':
self.template_name = 'customers/index.html'
venue_subtypes = Venue_Types.objects.all()
card_subtypes = Card_Types.objects.all()
beautician_subtypes = Beautician_Types.objects.all()
context_dict.update({
'venue_subtypes' : venue_subtypes,
'card_subtypes' : card_subtypes,
'beautician_subtypes' : beautician_subtypes
})
return render(request, self.template_name, context_dict)
except Exception as e:
print e
print sys.exc_traceback.tb_lineno
| apache-2.0 | 9,219,910,691,958,777,000 | 38.971429 | 108 | 0.68549 | false |
ikargis/horizon_fod | openstack_dashboard/dashboards/project/images_and_snapshots/images/forms.py | 1 | 10129 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing images.
"""
from django.conf import settings # noqa
from django.forms import ValidationError # noqa
from django.forms.widgets import HiddenInput # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
IMAGE_BACKEND_SETTINGS = getattr(settings, 'OPENSTACK_IMAGE_BACKEND', {})
IMAGE_FORMAT_CHOICES = IMAGE_BACKEND_SETTINGS.get('image_formats', [])
class CreateImageForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Name"), required=True)
description = forms.CharField(widget=forms.widgets.Textarea(),
label=_("Description"),
required=False)
source_type = forms.ChoiceField(
label=_('Image Source'),
choices=[('url', _('Image Location')),
('file', _('Image File'))],
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'source'}))
copy_from = forms.CharField(max_length="255",
label=_("Image Location"),
help_text=_("An external (HTTP) URL to load "
"the image from."),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'source',
'data-source-url': _('Image Location')}),
required=False)
image_file = forms.FileField(label=_("Image File"),
help_text=_("A local image to upload."),
widget=forms.FileInput(attrs={
'class': 'switched',
'data-switch-on': 'source',
'data-source-file': _('Image File')}),
required=False)
disk_format = forms.ChoiceField(label=_('Format'),
required=True,
choices=[],
widget=forms.Select(attrs={'class':
'switchable'}))
architecture = forms.CharField(max_length="255", label=_("Architecture"),
required=False)
minimum_disk = forms.IntegerField(label=_("Minimum Disk (GB)"),
help_text=_('The minimum disk size'
' required to boot the'
' image. If unspecified, this'
' value defaults to 0'
' (no minimum).'),
required=False)
minimum_ram = forms.IntegerField(label=_("Minimum Ram (MB)"),
help_text=_('The minimum memory size'
' required to boot the'
' image. If unspecified, this'
' value defaults to 0 (no'
' minimum).'),
required=False)
is_public = forms.BooleanField(label=_("Public"), required=False)
protected = forms.BooleanField(label=_("Protected"), required=False)
def __init__(self, *args, **kwargs):
super(CreateImageForm, self).__init__(*args, **kwargs)
if not settings.HORIZON_IMAGES_ALLOW_UPLOAD:
self._hide_file_source_type()
self.fields['disk_format'].choices = IMAGE_FORMAT_CHOICES
def _hide_file_source_type(self):
self.fields['image_file'].widget = HiddenInput()
source_type = self.fields['source_type']
source_type.choices = [choice for choice in source_type.choices
if choice[0] != 'file']
if len(source_type.choices) == 1:
source_type.widget = HiddenInput()
def clean(self):
data = super(CreateImageForm, self).clean()
# The image_file key can be missing based on particular upload
# conditions. Code defensively for it here...
image_file = data.get('image_file', None)
if not data['copy_from'] and not image_file:
raise ValidationError(
_("A image or external image location must be specified."))
elif data['copy_from'] and image_file:
raise ValidationError(
_("Can not specify both image and external image location."))
else:
return data
def handle(self, request, data):
# Glance does not really do anything with container_format at the
# moment. It requires it is set to the same disk_format for the three
# Amazon image types, otherwise it just treats them as 'bare.' As such
# we will just set that to be that here instead of bothering the user
# with asking them for information we can already determine.
if data['disk_format'] in ('ami', 'aki', 'ari',):
container_format = data['disk_format']
else:
container_format = 'bare'
meta = {'is_public': data['is_public'],
'protected': data['protected'],
'disk_format': data['disk_format'],
'container_format': container_format,
'min_disk': (data['minimum_disk'] or 0),
'min_ram': (data['minimum_ram'] or 0),
'name': data['name'],
'properties': {}}
if data['description']:
meta['properties']['description'] = data['description']
if data['architecture']:
meta['properties']['architecture'] = data['architecture']
if (settings.HORIZON_IMAGES_ALLOW_UPLOAD and
data.get('image_file', None)):
meta['data'] = self.files['image_file']
else:
meta['copy_from'] = data['copy_from']
try:
image = api.glance.image_create(request, **meta)
messages.success(request,
_('Your image %s has been queued for creation.') %
data['name'])
return image
except Exception:
exceptions.handle(request, _('Unable to create new image.'))
class UpdateImageForm(forms.SelfHandlingForm):
image_id = forms.CharField(widget=forms.HiddenInput())
name = forms.CharField(max_length="255", label=_("Name"))
description = forms.CharField(widget=forms.widgets.Textarea(),
label=_("Description"),
required=False)
kernel = forms.CharField(max_length="36", label=_("Kernel ID"),
required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
ramdisk = forms.CharField(max_length="36", label=_("Ramdisk ID"),
required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
architecture = forms.CharField(label=_("Architecture"), required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
disk_format = forms.CharField(label=_("Format"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
public = forms.BooleanField(label=_("Public"), required=False)
protected = forms.BooleanField(label=_("Protected"), required=False)
def handle(self, request, data):
image_id = data['image_id']
error_updating = _('Unable to update image "%s".')
if data['disk_format'] in ['aki', 'ari', 'ami']:
container_format = data['disk_format']
else:
container_format = 'bare'
meta = {'is_public': data['public'],
'protected': data['protected'],
'disk_format': data['disk_format'],
'container_format': container_format,
'name': data['name'],
'properties': {'description': data['description']}}
if data['kernel']:
meta['properties']['kernel_id'] = data['kernel']
if data['ramdisk']:
meta['properties']['ramdisk_id'] = data['ramdisk']
if data['architecture']:
meta['properties']['architecture'] = data['architecture']
# Ensure we do not delete properties that have already been
# set on an image.
meta['purge_props'] = False
try:
image = api.glance.image_update(request, image_id, **meta)
messages.success(request, _('Image was successfully updated.'))
return image
except Exception:
exceptions.handle(request, error_updating % image_id)
| apache-2.0 | 7,224,893,702,934,788,000 | 44.832579 | 78 | 0.51782 | false |
eljrax/autoscale_setup | load_balancing/add_self_to_lb.py | 1 | 5035 | #!/usr/bin/env python
###################################################################################
# #
# This script should be executed as the last thing that happens during #
# the configuration phase of a server. It will perform the health check #
# defined in the load balanceri(s) configured below, and add itself as a #
# node if successful. #
# For example: if the load balancer has a HTTP health check expecting a #
# 200 response from a request to /, it will make this call and verify the #
# status code before adding itself as an ENABLED/ONLINE node. #
# #
# Please modify the variables in the CONFIGURATION section below before executing #
# Author: Erik Ljungstrom #
# License: Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0 #
###################################################################################
from __future__ import print_function
import os
import pyrax
import netifaces as ni
import urllib2
import socket
import re
import random
from time import sleep
####################### CONFIGURATION #######################
# Network interface to grab the IP address from. This is the IP address
# that will be used in the health check and ultimately added to the
# load balancer. (REQUIRED)
# e.g. iface = "eth1"
iface = ""
# LOAD BALANCER(S) (REQUIRED)
#
# e.g.
# Single Load Balancer
# lbs = [1234]
# Multiple Load balancers
# lbs = [1234, 5678]
lbs = []
# Path to file containing credentials (REQUIRED)
# e.g. credentials = '/opt/autoscale/.cloud_credentials'
# File format:
#
# [rackspace_cloud]
# username =
# api_key =
#
credentials = ''
# Name to send as Host: header with health check request (optional)
host_header = None
# Protocol to utilise in url check (override LB health check) (optional)
protocol = None
######################################################################
def get_addr(iface):
ni.ifaddresses(iface)
ip = ni.ifaddresses(iface)[2][0]['addr']
return ip
def health_check(health_check, port=80):
addr = get_addr(iface)
if not health_check.has_key('type'):
print ("No health check present on load balancer")
return
if health_check.get('type') == 'CONNECT':
check_port(addr, port, health_check.get('timeout'))
elif health_check.get('type') in ['HTTP', 'HTTPS']:
check_url(health_check, addr)
else:
raise Exception("Unsupported health check, please implement your own")
def check_port(addr, port, timeout):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(timeout)
result = sock.connect_ex((addr, port))
if result != 0:
raise Exception("Error connecting to port %s: error: %s" % (port, result))
return result
def check_url(health_check, addr):
global host_header
expected_resp = re.compile(health_check.get('bodyRegex', '.*'))
expected_code = re.compile(health_check.get('statusRegex', '.*'))
proto = protocol if protocol else health_check.get('type').lower()
url = ("%s://%s/%s" % (proto, addr, health_check.get('path', '/')))
if not host_header:
host_header = addr
headers = { 'Host': host_header }
req = urllib2.Request(url, headers=headers)
response = urllib2.urlopen(req)
contents_result = expected_resp.search(response.read())
status_result = expected_code.match(str(response.getcode()))
if not contents_result or not status_result:
raise Exception("check_url(): Response content does not match expected result")
return True
def main():
pyrax.set_setting("identity_type", "rackspace")
pyrax.set_credential_file(credentials)
clb = pyrax.cloud_loadbalancers
my_ip = get_addr(iface)
for lb_id in lbs:
retry = 5
lb=clb.get(lb_id)
try:
health_check(lb.get_health_monitor(), lb.port)
except Exception as e:
print("Health check for LB %s failed with error: %s Not adding..." % (lb_id, str(e)))
continue
while retry > 0:
try:
pyrax.utils.wait_until(lb, "status", "ACTIVE", interval=1, attempts=30, verbose=False)
node = clb.Node(address = my_ip, port = lb.port, condition = "ENABLED")
res = lb.add_nodes([node])
print ("Node added to LB %s" % lb_id)
break
except pyrax.exceptions.ClientException as e:
if "PENDING" in e.message:
print ("Race condition hit, another server is adding itself. Retrying...")
sleep(random.random())
if "Duplicate nodes" in e.message:
print ("Node %s:%s already in LB %s.." % (my_ip, lb.port, lb_id))
break
else:
print ("Exception: %s" % e.message)
break
retry -= 1
if __name__ == "__main__":
main()
| apache-2.0 | -7,555,560,228,802,524,000 | 32.125 | 102 | 0.57994 | false |
abztrakt/uw-skilltree | setup.py | 1 | 1266 | import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-skilltreeapp',
version='0.1',
packages=['skilltreeapp'],
include_package_data=True,
install_requires = [
'setuptools',
'Django',
'django-compressor',
'django-mobility',
'django-templatetag-handlebars',
],
license='Apache License, Version 2.0', # example license
description='A Django app to ...',
long_description=README,
url='http://www.example.com/',
author='Your Name',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License', # example license
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| apache-2.0 | -503,293,881,132,608,450 | 31.461538 | 79 | 0.612164 | false |
coin-or/GiMPy | src/gimpy/graph.py | 1 | 139676 | '''
A Graph class implementation. The aim for this implementation is
1. To reflect implementation methods in literature as much as possible
3. To have something close to a "classic" object-oriented design
(compared to previous versions)
This implementation can be considered as a compromise between a graph
class designed for visualization and an efficient graph data structure.
One deviation from standard Graph implementations is to keep in neighbors in
an other adjacency list. We do this for efficiency reasons considering
traversing residual graphs.
We have a class for Graph and a class for Node. Edges are not represented as
objects. They are kept in a dictionary which also keeps their attributes.
Graph display related methods are inspired from Pydot. They are re-written
considering GIMPy needs. We also borrow two methods from Pydot, see
global_constants.py for details.
Default graph type is an undirected graph.
No custom exception will raise when the user tries to get in_neighbors of an
undirected graph. She should be aware of this. Python will raise an exception
since user is trying to read an attribute that does not exits.
Methods that implement algorithms has display argument in their API. If this
argument is not specified global display setting will be used for display
purposes of the algorithm method implements. You can use display argument to
get visualization of algorithm without changing global display behavior of your
Graph/Tree object.
Method documentation strings are orginized as follows.
API: method_name(arguments)
Description: Description of the method.
Input: Arguments and their explanation.
Pre: Necessary class attributes that should exists, methods to be called
before this method.
Post: Class attributes changed within the method.
Return: Return value of the method.
TODO(aykut):
-> svg display mode
-> label_strong_components() API change. Check backward compatibilty.
-> dfs should use search()?
-> display mode svg is not supported.
future:
-> The solution we find is not strongly feasible. Fix this.
'''
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import range
from past.utils import old_div
from builtins import object
from .global_constants import *
try:
from src.blimpy import Stack, Queue, PriorityQueue
except ImportError:
from coinor.blimpy import Stack, Queue, PriorityQueue
import subprocess # for call()
import io # for StringIO()
import copy # for deepcopy()
import sys # for exit()
import random # for seed, random, randint
import tempfile # for mkstemp()
import os # for close()
import operator # for itemgetter()
try:
import pygtk
import gtk
import xdot
except ImportError:
XDOT_INSTALLED = False
else:
XDOT_INSTALLED = True
try:
import dot2tex # for dot2tex method
except ImportError:
DOT2TEX_INSTALLED = False
else:
DOT2TEX_INSTALLED = True
try:
from PIL import Image as PIL_Image
except ImportError:
PIL_INSTALLED = False
else:
PIL_INSTALLED = True
try:
import matplotlib
except ImportError:
MATPLOTLIB_INSTALLED = False
else:
MATPLOTLIB_INSTALLED = True
# matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
plt.rcParams['figure.dpi'] = 300
def handle_close(evt):
print('Figure closed. Exiting!')
exit()
class Node(object):
'''
Node class. A node object keeps node attributes. Has a method to write
node in Dot language grammer.
'''
def __init__(self, name, **attr):
'''
API: __init__(self, name, **attrs)
Description:
Node class constructor. Sets name and attributes using arguments.
Input:
name: Name of node.
**attrs: Node attributes.
Post:
Sets self.name and self.attr.
'''
self.name = name
self.attr = copy.deepcopy(DEFAULT_NODE_ATTRIBUTES)
for a in attr:
self.attr[a] = attr[a]
def get_attr(self, attr):
'''
API: get_attr(self, attr)
Description:
Returns node attribute attr.
Input:
attr: Node attribute to get.
Return:
Returns Node attribute attr if exists returns None, otherwise.
'''
if attr in self.attr:
return self.attr[attr]
else:
return None
def set_attr(self, attr, value):
'''
API: set_attr(self, attr, value)
Description:
Sets node attribute attr to value.
Input:
attr: Node attribute to set.
value: New value of the attribute.
Post:
Updates self.attr[attr].
'''
self.attr[attr] = value
def to_string(self):
'''
API: to_string(self)
Description:
Returns string representation of node in dot language.
Return:
String representation of node.
'''
node = list()
node.append(quote_if_necessary(str(self.name)))
node.append(' [')
flag = False
for a in self.attr:
flag = True
node.append(a)
node.append('=')
node.append(quote_if_necessary(str(self.attr[a])))
node.append(', ')
if flag is True:
node = node[:-1]
node.append(']')
return ''.join(node)
def __repr__(self):
'''
API: __repr__(self)
Description:
Returns string representation of node in dot language.
Return:
String representation of node.
'''
return self.to_string()
class Graph(object):
'''
Graph class, implemented using adjacency list. See GIMPy README for more
information.
'''
def __init__(self, **attr):
'''
API: __init__(self, **attrs)
Description:
Graph class constructor. Sets attributes using argument.
Input:
**attrs: Graph attributes.
Post:
Sets following attributes using **attrs; self.attr,
self.graph_type. Creates following initial attributes;
self.neighbors, self.in_neighbors, self.nodes, self.out_neighbors,
self.cluster
'''
# graph attributes
self.attr = copy.deepcopy(DEFAULT_GRAPH_ATTRIBUTES)
# set attributes using constructor
for a in attr:
self.attr[a] = attr[a]
# set name
if 'name' in self.attr:
self.name = self.attr['name']
else:
self.name = 'G'
# edge attributes
self.edge_attr = dict()
# we treat type attribute and keep it in a separate class attribute
if 'type' in self.attr:
self.graph_type = self.attr['type']
else:
self.graph_type = UNDIRECTED_GRAPH
# adjacency list of nodes, it is a dictionary of lists
self.neighbors = {}
# if the graph is undirected we do not need in_neighbor
if self.graph_type is DIRECTED_GRAPH:
self.in_neighbors = {}
self.nodes = {}
self.edge_connect_symbol = EDGE_CONNECT_SYMBOL[self.graph_type]
self.out_neighbors = self.neighbors
if 'display' not in self.attr:
self.attr['display']='off'
if 'layout' not in self.attr:
self.attr['layout'] = 'fdp'
self.attr['cluster_count'] = 0
self.cluster = {}
def __repr__(self):
'''
API: __repr__(self)
Description:
Returns string representation of the graph.
Return:
String representation of the graph.
'''
data = str()
for n in self.nodes:
data += str(n)
data += ' -> '
data += self.neighbors[n].__repr__()
data += '\n'
data = data[:-1]
return data
def __contains__(self, item):
'''
API: __contains__(self, item)
Description:
Return true if item is in graph. item can be a node name or a tuple
that represents an edge.
Return:
True if item is in graph.
'''
if isinstance(item, tuple):
name1 = item[0]
name2 = item[1]
if self.graph_type is DIRECTED_GRAPH:
return (name1, name2) in self.edge_attr
else:
return ((name1, name2) in self.edge_attr or
(name2, name1) in self.edge_attr)
else:
return item in self.nodes
def add_node(self, name, **attr):
'''
API: add_node(self, name, **attr)
Description:
Adds node to the graph.
Pre:
Graph should not contain a node with this name. We do not allow
multiple nodes with the same name.
Input:
name: Name of the node.
attr: Node attributes.
Post:
self.neighbors, self.nodes and self.in_neighbors are updated.
Return:
Node (a Node class instance) added to the graph.
'''
if name in self.neighbors:
raise MultipleNodeException
self.neighbors[name] = list()
if self.graph_type is DIRECTED_GRAPH:
self.in_neighbors[name] = list()
self.nodes[name] = Node(name, **attr)
return self.nodes[name]
def del_node(self, name):
'''
API: del_node(self, name)
Description:
Removes node from Graph.
Input:
name: Name of the node.
Pre:
Graph should contain a node with this name.
Post:
self.neighbors, self.nodes and self.in_neighbors are updated.
'''
if name not in self.neighbors:
raise Exception('Node %s does not exist!' %str(name))
for n in self.neighbors[name]:
del self.edge_attr[(name, n)]
if self.graph_type == UNDIRECTED_GRAPH:
self.neighbors[n].remove(name)
else:
self.in_neighbors[n].remove(name)
if self.graph_type is DIRECTED_GRAPH:
for n in self.in_neighbors[name]:
del self.edge_attr[(n, name)]
self.neighbors[n].remove(name)
del self.neighbors[name]
del self.in_neighbors[name]
del self.nodes[name]
def add_edge(self, name1, name2, **attr):
'''
API: add_edge(self, name1, name2, **attr)
Description:
Adds edge to the graph. Sets edge attributes using attr argument.
Input:
name1: Name of the source node (if directed).
name2: Name of the sink node (if directed).
attr: Edge attributes.
Pre:
Graph should not already contain this edge. We do not allow
multiple edges with same source and sink nodes.
Post:
self.edge_attr is updated.
self.neighbors, self.nodes and self.in_neighbors are updated if
graph was missing at least one of the nodes.
'''
if (name1, name2) in self.edge_attr:
raise MultipleEdgeException
if self.graph_type is UNDIRECTED_GRAPH and (name2,name1) in self.edge_attr:
raise MultipleEdgeException
self.edge_attr[(name1,name2)] = copy.deepcopy(DEFAULT_EDGE_ATTRIBUTES)
for a in attr:
self.edge_attr[(name1,name2)][a] = attr[a]
if name1 not in self.nodes:
self.add_node(name1)
if name2 not in self.nodes:
self.add_node(name2)
self.neighbors[name1].append(name2)
if self.graph_type is UNDIRECTED_GRAPH:
self.neighbors[name2].append(name1)
else:
self.in_neighbors[name2].append(name1)
def del_edge(self, e):
'''
API: del_edge(self, e)
Description:
Removes edge from graph.
Input:
e: Tuple that represents edge, in (source,sink) form.
Pre:
Graph should contain this edge.
Post:
self.edge_attr, self.neighbors and self.in_neighbors are updated.
'''
if self.graph_type is DIRECTED_GRAPH:
try:
del self.edge_attr[e]
except KeyError:
raise Exception('Edge %s does not exists!' %str(e))
self.neighbors[e[0]].remove(e[1])
self.in_neighbors[e[1]].remove(e[0])
else:
try:
del self.edge_attr[e]
except KeyError:
try:
del self.edge_attr[(e[1],e[0])]
except KeyError:
raise Exception('Edge %s does not exists!' %str(e))
self.neighbors[e[0]].remove(e[1])
self.neighbors[e[1]].remove(e[0])
def get_node(self, name):
'''
API: get_node(self, name)
Description:
Returns node object with the provided name.
Input:
name: Name of the node.
Return:
Returns node object if node exists, returns None otherwise.
'''
if name in self.nodes:
return self.nodes[name]
else:
return None
def get_edge_cost(self, edge):
'''
API: get_edge_cost(self, edge)
Description:
Returns cost attr of edge, required for minimum_spanning_tree_kruskal().
Input:
edge: Tuple that represents edge, in (source,sink) form.
Return:
Returns cost attribute value of the edge.
'''
return self.get_edge_attr(edge[0], edge[1], 'cost')
def check_edge(self, name1, name2):
'''
API: check_edge(self, name1, name2)
Description:
Return True if edge exists, False otherwise.
Input:
name1: name of the source node.
name2: name of the sink node.
Return:
Returns True if edge exists, False otherwise.
'''
if self.graph_type is DIRECTED_GRAPH:
return (name1, name2) in self.edge_attr
else:
return ((name1, name2) in self.edge_attr or
(name2, name1) in self.edge_attr)
def get_node_list(self):
'''
API: get_node_list(self)
Description:
Returns node list.
Return:
List of nodes.
'''
return list(self.neighbors.keys())
def get_edge_list(self):
'''
API: get_edge_list(self)
Description:
Returns edge list.
Return:
List of edges, edges are tuples and in (source,sink) format.
'''
return list(self.edge_attr.keys())
def get_node_num(self):
'''
API: get_node_num(self)
Description:
Returns number of nodes.
Return:
Number of nodes.
'''
return len(self.neighbors)
def get_edge_num(self):
'''
API: get_edge_num(self)
Description:
Returns number of edges.
Return:
Number of edges.
'''
return len(self.edge_attr)
def get_node_attr(self, name, attr):
'''
API: get_node_attr(self, name, attr)
Description:
Returns attribute attr of given node.
Input:
name: Name of node.
attr: Attribute of node.
Pre:
Graph should have this node.
Return:
Value of node attribute attr.
'''
return self.get_node(name).get_attr(attr)
def get_edge_attr(self, n, m, attr):
'''
API: get_edge_attr(self, n, m, attr)
Description:
Returns attribute attr of edge (n,m).
Input:
n: Source node name.
m: Sink node name.
attr: Attribute of edge.
Pre:
Graph should have this edge.
Return:
Value of edge attribute attr.
'''
if self.graph_type is DIRECTED_GRAPH:
return self.edge_attr[(n,m)][attr]
else:
try:
return self.edge_attr[(n,m)][attr]
except KeyError:
return self.edge_attr[(m,n)][attr]
def set_node_attr(self, name, attr, value):
'''
API: set_node_attr(self, name, attr)
Description:
Sets attr attribute of node named name to value.
Input:
name: Name of node.
attr: Attribute of node to set.
Pre:
Graph should have this node.
Post:
Node attribute will be updated.
'''
self.get_node(name).set_attr(attr, value)
def set_edge_attr(self, n, m, attr, value):
'''
API: set_edge_attr(self, n, m, attr, value)
Description:
Sets attr attribute of edge (n,m) to value.
Input:
n: Source node name.
m: Sink node name.
attr: Attribute of edge to set.
value: New value of attribute.
Pre:
Graph should have this edge.
Post:
Edge attribute will be updated.
'''
if self.graph_type is DIRECTED_GRAPH:
self.edge_attr[(n,m)][attr] = value
else:
try:
self.edge_attr[(n,m)][attr] = value
except KeyError:
self.edge_attr[(m,n)][attr] = value
def get_neighbors(self, name):
'''
API: get_neighbors(self, name)
Description:
Returns list of neighbors of given node.
Input:
name: Node name.
Pre:
Graph should have this node.
Return:
List of neighbor node names.
'''
return self.neighbors[name]
def get_in_neighbors(self, name):
'''
API: get_in_neighbors(self, name)
Description:
Returns list of in neighbors of given node.
Input:
name: Node name.
Pre:
Graph should have this node.
Return:
List of in-neighbor node names.
'''
return self.in_neighbors[name]
def get_out_neighbors(self, name):
'''
API: get_out_neighbors(self, name)
Description:
Returns list of out-neighbors of given node.
Input:
name: Node name.
Pre:
Graph should have this node.
Return:
List of out-neighbor node names.
'''
return self.neighbors[name]
def edge_to_string(self, e):
'''
API: edge_to_string(self, e)
Description:
Return string that represents edge e in dot language.
Input:
e: Edge tuple in (source,sink) format.
Pre:
Graph should have this edge.
Return:
String that represents given edge.
'''
edge = list()
edge.append(quote_if_necessary(str(e[0])))
edge.append(self.edge_connect_symbol)
edge.append(quote_if_necessary(str(e[1])))
# return if there is nothing in self.edge_attr[e]
if len(self.edge_attr[e]) == 0:
return ''.join(edge)
edge.append(' [')
for a in self.edge_attr[e]:
edge.append(a)
edge.append('=')
edge.append(quote_if_necessary(str(self.edge_attr[e][a])))
edge.append(', ')
edge = edge[:-1]
edge.append(']')
return ''.join(edge)
def to_string(self):
'''
API: to_string(self)
Description:
This method is based on pydot Graph class with the same name.
Returns a string representation of the graph in dot language.
It will return the graph and all its subelements in string form.
Return:
String that represents graph in dot language.
'''
graph = list()
processed_edges = {}
graph.append('%s %s {\n' %(self.graph_type, self.name))
for a in self.attr:
if a not in GRAPH_ATTRIBUTES:
continue
val = self.attr[a]
if val is not None:
graph.append( '%s=%s' % (a, quote_if_necessary(val)) )
else:
graph.append(a)
graph.append( ';\n' )
# clusters
for c in self.cluster:
graph.append('subgraph cluster_%s {\n' %c)
for a in self.cluster[c]['attrs']:
if a=='label':
graph.append(a+'='+quote_if_necessary(self.cluster[c]['attrs'][a])+';\n')
continue
graph.append(a+'='+self.cluster[c]['attrs'][a]+';\n')
if len(self.cluster[c]['node_attrs'])!=0:
graph.append('node [')
for a in self.cluster[c]['node_attrs']:
graph.append(a+'='+self.cluster[c]['node_attrs'][a])
graph.append(',')
if len(self.cluster[c]['node_attrs'])!=0:
graph.pop()
graph.append('];\n')
# process cluster nodes
for n in self.cluster[c]['node_list']:
data = self.get_node(n).to_string()
graph.append(data + ';\n')
# process cluster edges
for n in self.cluster[c]['node_list']:
for m in self.cluster[c]['node_list']:
if self.check_edge(n,m):
data = self.edge_to_string((n,m))
graph.append(data + ';\n')
processed_edges[(n,m)]=None
graph.append('}\n')
# process remaining (non-cluster) nodes
for n in self.neighbors:
for c in self.cluster:
if n in self.cluster[c]['node_list']:
break
else:
data = self.get_node(n).to_string()
graph.append(data + ';\n')
# process edges
for e in self.edge_attr:
if e in processed_edges:
continue
data = self.edge_to_string(e)
graph.append(data + ';\n')
graph.append( '}\n' )
return ''.join(graph)
def label_components(self, display = None):
'''
API: label_components(self, display=None)
Description:
This method labels the nodes of an undirected graph with component
numbers so that each node has the same label as all nodes in the
same component. It will display the algortihm if display argument is
provided.
Input:
display: display method.
Pre:
self.graph_type should be UNDIRECTED_GRAPH.
Post:
Nodes will have 'component' attribute that will have component
number as value.
'''
if self.graph_type == DIRECTED_GRAPH:
raise Exception("label_components only works for ",
"undirected graphs")
self.num_components = 0
for n in self.get_node_list():
self.get_node(n).set_attr('component', None)
for n in self.neighbors:
self.get_node(n).set_attr('label', '-')
for n in self.get_node_list():
if self.get_node(n).get_attr('component') == None:
self.search(n, display=display,
component=self.num_components, algo='DFS')
self.num_components += 1
def tarjan(self):
'''
API: tarjan(self)
Description:
Implements Tarjan's algorithm for determining strongly connected set of
nodes.
Pre:
self.graph_type should be DIRECTED_GRAPH.
Post:
Nodes will have 'component' attribute that will have component
number as value. Changes 'index' attribute of nodes.
'''
index = 0
component = 0
q = []
for n in self.get_node_list():
if self.get_node_attr(n, 'index') is None:
index, component = self.strong_connect(q, n, index, component)
def strong_connect(self, q, node, index, component):
'''
API: strong_connect (self, q, node, index, component)
Description:
Used by tarjan method. This method should not be called directly by
user.
Input:
q: Node list.
node: Node that is being connected to nodes in q.
index: Index used by tarjan method.
component: Current component number.
Pre:
Should be called by tarjan and itself (recursive) only.
Post:
Nodes will have 'component' attribute that will have component
number as value. Changes 'index' attribute of nodes.
Return:
Returns new index and component numbers.
'''
self.set_node_attr(node, 'index', index)
self.set_node_attr(node, 'lowlink', index)
index += 1
q.append(node)
for m in self.get_neighbors(node):
if self.get_node_attr(m, 'index') is None:
index, component = self.strong_connect(q, m, index, component)
self.set_node_attr(node, 'lowlink',
min([self.get_node_attr(node, 'lowlink'),
self.get_node_attr(m, 'lowlink')]))
elif m in q:
self.set_node_attr(node, 'lowlink',
min([self.get_node_attr(node, 'lowlink'),
self.get_node_attr(m, 'index')]))
if self.get_node_attr(node, 'lowlink') == self.get_node_attr(node, 'index'):
m = q.pop()
self.set_node_attr(m, 'component', component)
while (node!=m):
m = q.pop()
self.set_node_attr(m, 'component', component)
component += 1
self.num_components = component
return (index, component)
def label_strong_component(self):
'''
API: label_strong_component(self)
Description:
This method labels the nodes of a directed graph with component
numbers so that each node has the same label as all nodes in the
same component.
Pre:
self.graph_type should be DIRECTED_GRAPH.
Post:
Nodes will have 'component' attribute that will have component
number as value. Changes 'index' attribute of nodes.
'''
self.num_components = 0
self.tarjan()
def dfs(self, root, disc_count = 0, finish_count = 1, component = None,
transpose = False, display = None, pred = None):
'''
API: dfs(self, root, disc_count = 0, finish_count = 1, component=None,
transpose=False)
Description:
Make a depth-first search starting from node with name root.
Input:
root: Starting node name.
disc_count: Discovery time.
finish_count: Finishing time.
component: component number.
transpose: Goes in the reverse direction along edges if transpose
is True.
Post:
Nodes will have 'component' attribute that will have component
number as value. Updates 'disc_time' and 'finish_time' attributes
of nodes which represents discovery time and finishing time.
Return:
Returns a tuple that has discovery time and finish time of the
last node in the following form (disc_time,finish_time).
'''
if pred == None:
pred = {}
if display == None:
display = self.attr['display']
else:
self.set_display_mode(display)
neighbors = self.neighbors
if self.graph_type == DIRECTED_GRAPH and transpose:
neighbors = self.in_neighbors
self.get_node(root).set_attr('component', component)
disc_count += 1
self.get_node(root).set_attr('disc_time', disc_count)
self.get_node(root).set_attr('label', str(disc_count)+',-')
self.get_node(root).set_attr('color', 'blue')
if root in pred:
self.set_edge_attr(pred[root], root, 'color', 'green')
self.display()
if transpose:
fTime = []
for n in neighbors[root]:
fTime.append((n,self.get_node(n).get_attr('finish_time')))
neighbor_list = sorted(fTime, key=operator.itemgetter(1))
neighbor_list = list(t[0] for t in neighbor_list)
neighbor_list.reverse()
else:
neighbor_list = neighbors[root]
for i in neighbor_list:
if not transpose:
if self.get_node(i).get_attr('disc_time') is None:
pred[i] = root
disc_count, finish_count = self.dfs(i, disc_count,
finish_count,
component, transpose,
pred = pred)
else:
if self.get_node(i).get_attr('component') is None:
disc_count, finish_count = self.dfs(i, disc_count,
finish_count,
component, transpose,
pred = pred)
self.get_node(root).set_attr('finish_time', finish_count)
d_time = self.get_node(root).get_attr('disc_time')
label = '"' + str(d_time) + ',' + str(finish_count) + '"'
self.get_node(root).set_attr('label', label)
self.get_node(root).set_attr('color', 'green')
self.display()
finish_count += 1
return disc_count, finish_count
def bfs(self, root, display = None, component = None):
'''
API: bfs(self, root, display = None, component=None)
Description:
Make a breadth-first search starting from node with name root.
Input:
root: Starting node name.
display: display method.
component: component number.
Post:
Nodes will have 'component' attribute that will have component
number as value.
'''
self.search(root, display = display, component = component, q = Queue())
def search(self, source, destination = None, display = None,
component = None, q = None,
algo = 'DFS', reverse = False, **kargs):
'''
API: search(self, source, destination = None, display = None,
component = None, q = Stack(),
algo = 'DFS', reverse = False, **kargs)
Description:
Generic search method. Changes behavior (dfs,bfs,dijkstra,prim)
according to algo argument.
if destination is not specified:
This method determines all nodes reachable from "source" ie. creates
precedence tree and returns it (dictionary).
if destionation is given:
If there exists a path from "source" to "destination" it will return
list of the nodes is this path. If there is no such path, it will
return the precedence tree constructed from source (dictionary).
Optionally, it marks all nodes reachable from "source" with a component
number. The variable "q" determines the order in which the nodes are
searched.
Input:
source: Search starts from node with this name.
destination: Destination node name.
display: Display method.
algo: Algortihm that specifies search. Available algortihms are
'DFS', 'BFS', 'Dijkstra' and 'Prim'.
reverse: Search goes in reverse arc directions if True.
kargs: Additional keyword arguments.
Post:
Nodes will have 'component' attribute that will have component
number as value (if component argument provided). Color attribute
of nodes and edges may change.
Return:
Returns predecessor tree in dictionary form if destination is
not specified, returns list of node names in the path from source
to destionation if destionation is specified and there is a path.
If there is no path returns predecessor tree in dictionary form.
See description section.
'''
if display == None:
display = self.attr['display']
else:
self.set_display_mode(display)
if algo == 'DFS':
if q is None:
q = Stack()
self.get_node(source).set_attr('component', component)
elif algo == 'BFS' or algo == 'UnweightedSPT':
if q is None:
q = Queue()
self.get_node(source).set_attr('component', component)
elif algo == 'Dijkstra' or algo == 'Prim':
if q is None:
q = PriorityQueue()
else:
print("Unknown search algorithm...exiting")
return
neighbors = self.neighbors
if self.graph_type == DIRECTED_GRAPH and reverse:
neighbors = self.in_neighbors
for i in self.get_node_list():
self.get_node(i).set_attr('label', '-')
self.get_node(i).attr.pop('priority', None)
self.get_node(i).set_attr('distance', None)
self.get_node(i).set_attr('color', 'black')
for j in neighbors[i]:
if reverse:
self.set_edge_attr(j, i, 'color', 'black')
else:
self.set_edge_attr(i, j, 'color', 'black')
self.display()
pred = {}
self.process_edge_search(None, source, pred, q, component, algo,
**kargs)
found = True
if source != destination:
found = False
while not q.isEmpty() and not found:
current = q.peek()
if self.get_node(current).get_attr('color') == 'green':
q.remove(current)
continue
self.process_node_search(current, q, **kargs)
self.get_node(current).set_attr('color', 'blue')
if current != source:
if reverse:
self.set_edge_attr(current, pred[current], 'color', 'green')
else:
self.set_edge_attr(pred[current], current, 'color', 'green')
if current == destination:
found = True
break
self.display()
for n in neighbors[current]:
if self.get_node(n).get_attr('color') != 'green':
if reverse:
self.set_edge_attr(n, current, 'color', 'yellow')
else:
self.set_edge_attr(current, n, 'color', 'yellow')
self.display()
self.process_edge_search(current, n, pred, q, component,
algo, **kargs)
if reverse:
self.set_edge_attr(n, current, 'color', 'black')
else:
self.set_edge_attr(current, n, 'color', 'black')
q.remove(current)
self.get_node(current).set_attr('color', 'green')
self.display()
if found:
path = [destination]
current = destination
while current != source:
path.insert(0, pred[current])
current = pred[current]
return path
if destination == None:
return pred
else:
return None
def process_node_search(self, node, q, **kwargs):
'''
API: process_node_search(self, node, q, **kwargs)
Description:
Used by search() method. Process nodes along the search. Should not be
called by user directly.
Input:
node: Name of the node being processed.
q: Queue data structure.
kwargs: Keyword arguments.
Post:
'priority' attribute of the node may get updated.
'''
if isinstance(q, PriorityQueue):
self.get_node(node).set_attr('priority', q.get_priority(node))
def process_edge_dijkstra(self, current, neighbor, pred, q, component):
'''
API: process_edge_dijkstra(self, current, neighbor, pred, q, component)
Description:
Used by search() method if the algo argument is 'Dijkstra'. Processes
edges along Dijkstra's algorithm. User does not need to call this
method directly.
Input:
current: Name of the current node.
neighbor: Name of the neighbor node.
pred: Predecessor tree.
q: Data structure that holds nodes to be processed in a queue.
component: component number.
Post:
'color' attribute of nodes and edges may change.
'''
if current is None:
self.get_node(neighbor).set_attr('color', 'red')
self.get_node(neighbor).set_attr('label', 0)
q.push(neighbor, 0)
self.display()
self.get_node(neighbor).set_attr('color', 'black')
return
new_estimate = (q.get_priority(current) +
self.get_edge_attr(current, neighbor, 'cost'))
if neighbor not in pred or new_estimate < q.get_priority(neighbor):
pred[neighbor] = current
self.get_node(neighbor).set_attr('color', 'red')
self.get_node(neighbor).set_attr('label', new_estimate)
q.push(neighbor, new_estimate)
self.display()
self.get_node(neighbor).set_attr('color', 'black')
def process_edge_prim(self, current, neighbor, pred, q, component):
'''
API: process_edge_prim(self, current, neighbor, pred, q, component)
Description:
Used by search() method if the algo argument is 'Prim'. Processes
edges along Prim's algorithm. User does not need to call this method
directly.
Input:
current: Name of the current node.
neighbor: Name of the neighbor node.
pred: Predecessor tree.
q: Data structure that holds nodes to be processed in a queue.
component: component number.
Post:
'color' attribute of nodes and edges may change.
'''
if current is None:
self.get_node(neighbor).set_attr('color', 'red')
self.get_node(neighbor).set_attr('label', 0)
q.push(neighbor, 0)
self.display()
self.get_node(neighbor).set_attr('color', 'black')
return
new_estimate = self.get_edge_attr(current, neighbor, 'cost')
if not neighbor in pred or new_estimate < q.get_priority(neighbor):
pred[neighbor] = current
self.get_node(neighbor).set_attr('color', 'red')
self.get_node(neighbor).set_attr('label', new_estimate)
q.push(neighbor, new_estimate)
self.display()
self.get_node(neighbor).set_attr('color', 'black')
def process_edge_search(self, current, neighbor, pred, q, component, algo,
**kargs):
'''
API: process_edge_search(self, current, neighbor, pred, q, component,
algo, **kargs)
Description:
Used by search() method. Processes edges according to the underlying
algortihm. User does not need to call this method directly.
Input:
current: Name of the current node.
neighbor: Name of the neighbor node.
pred: Predecessor tree.
q: Data structure that holds nodes to be processed in a queue.
component: component number.
algo: Search algorithm. See search() documentation.
kwargs: Keyword arguments.
Post:
'color', 'distance', 'component' attribute of nodes and edges may
change.
'''
if algo == 'Dijkstra':
return self.process_edge_dijkstra(current, neighbor, pred, q,
component)
if algo == 'Prim':
return self.process_edge_prim(current, neighbor, pred, q,
component)
neighbor_node = self.get_node(neighbor)
if current == None:
neighbor_node.set_attr('distance', 0)
if isinstance(q, PriorityQueue):
q.push(neighbor, 0)
else:
q.push(neighbor)
if component != None:
neighbor_node.set_attr('component', component)
neighbor_node.set_attr('label', component)
else:
neighbor_node.set_attr('label', 0)
return
if isinstance(q, PriorityQueue):
current_priority = q.get_priority(neighbor)
if algo == 'UnweightedSPT' or algo == 'BFS':
priority = self.get_node(current).get_attr('distance') + 1
if algo == 'DFS':
priority = -self.get_node(current).get_attr('distance') - 1
if current_priority is not None and priority >= current_priority:
return
q.push(neighbor, priority)
if algo == 'UnweightedSPT' or algo == 'BFS':
neighbor_node.set_attr('distance', priority)
if algo == 'DFS':
neighbor_node.set_attr('depth', -priority)
else:
distance = self.get_node(current).get_attr('distance') + 1
if ((algo == 'UnweightedSPT' or algo == 'BFS') and
neighbor_node.get_attr('distance') is not None):
return
neighbor_node.set_attr('distance', distance)
neighbor_node.set_attr('label', str(distance))
q.push(neighbor)
pred[neighbor] = current
neighbor_node.set_attr('color', 'red')
if component != None:
neighbor_node.set_attr('component', component)
neighbor_node.set_attr('label', component)
self.display()
def minimum_spanning_tree_prim(self, source, display = None,
q = PriorityQueue()):
'''
API: minimum_spanning_tree_prim(self, source, display = None,
q = PriorityQueue())
Description:
Determines a minimum spanning tree of all nodes reachable
from source using Prim's Algorithm.
Input:
source: Name of source node.
display: Display method.
q: Data structure that holds nodes to be processed in a queue.
Post:
'color', 'distance', 'component' attribute of nodes and edges may
change.
Return:
Returns predecessor tree in dictionary format.
'''
if display == None:
display = self.attr['display']
else:
self.set_display_mode(display)
if isinstance(q, PriorityQueue):
addToQ = q.push
removeFromQ = q.pop
peek = q.peek
isEmpty = q.isEmpty
neighbors = self.get_neighbors
pred = {}
addToQ(source)
done = False
while not isEmpty() and not done:
current = removeFromQ()
self.set_node_attr(current, 'color', 'blue')
if current != source:
self.set_edge_attr(pred[current], current, 'color', 'green')
self.display()
for n in neighbors(current):
if self.get_node_attr(n, 'color') != 'green':
self.set_edge_attr(current, n, 'color', 'yellow')
self.display()
new_estimate = self.get_edge_attr(current, n, 'cost')
if not n in pred or new_estimate < peek(n)[0]:
pred[n] = current
self.set_node_attr(n, 'color', 'red')
self.set_node_attr(n, 'label', new_estimate)
addToQ(n, new_estimate)
self.display()
self.set_node_attr(n, 'color', 'black')
self.set_edge_attr(current, n, 'color', 'black')
self.set_node_attr(current, 'color', 'green')
self.display()
return pred
def minimum_spanning_tree_kruskal(self, display = None, components = None):
'''
API: minimum_spanning_tree_kruskal(self, display = None,
components = None)
Description:
Determines a minimum spanning tree using Kruskal's Algorithm.
Input:
display: Display method.
component: component number.
Post:
'color' attribute of nodes and edges may change.
Return:
Returns list of edges where edges are tuples in (source,sink)
format.
'''
if display == None:
display = self.attr['display']
else:
self.set_display_mode(display)
if components is None:
components = DisjointSet(display = display, layout = 'dot',
optimize = False)
sorted_edge_list = sorted(self.get_edge_list(), key=self.get_edge_cost)
edges = []
for n in self.get_node_list():
components.add([n])
components.display()
for e in sorted_edge_list:
if len(edges) == len(self.get_node_list()) - 1:
break
self.set_edge_attr(e[0], e[1], 'color', 'yellow')
self.display()
if components.union(e[0], e[1]):
self.set_edge_attr(e[0], e[1], 'color', 'green')
self.display()
edges.append(e)
else:
self.set_edge_attr(e[0], e[1], 'color', 'black')
self.display()
components.display()
return edges
def max_flow_preflowpush(self, source, sink, algo = 'FIFO', display = None):
'''
API: max_flow_preflowpush(self, source, sink, algo = 'FIFO',
display = None)
Description:
Finds maximum flow from source to sink by a depth-first search based
augmenting path algorithm.
Pre:
Assumes a directed graph in which each arc has a 'capacity'
attribute and for which there does does not exist both arcs (i,j)
and (j,i) for any pair of nodes i and j.
Input:
source: Source node name.
sink: Sink node name.
algo: Algorithm choice, 'FIFO', 'SAP' or 'HighestLabel'.
display: display method.
Post:
The 'flow' attribute of each arc gives a maximum flow.
'''
if display == None:
display = self.attr['display']
else:
self.set_display_mode(display)
nl = self.get_node_list()
# set excess of all nodes to 0
for n in nl:
self.set_node_attr(n, 'excess', 0)
# set flow of all edges to 0
for e in self.edge_attr:
self.edge_attr[e]['flow'] = 0
if 'capacity' in self.edge_attr[e]:
capacity = self.edge_attr[e]['capacity']
self.edge_attr[e]['label'] = str(capacity)+'/0'
else:
self.edge_attr[e]['capacity'] = INF
self.edge_attr[e]['label'] = 'INF/0'
self.display()
self.set_display_mode('off')
self.search(sink, algo = 'UnweightedSPT', reverse = True)
self.set_display_mode(display)
disconnect = False
for n in nl:
if self.get_node_attr(n, 'distance') is None:
disconnect = True
self.set_node_attr(n, 'distance',
2*len(nl) + 1)
if disconnect:
print('Warning: graph contains nodes not connected to the sink...')
if algo == 'FIFO':
q = Queue()
elif algo == 'SAP':
q = Stack()
elif algo == 'HighestLabel':
q = PriorityQueue()
for n in self.get_neighbors(source):
capacity = self.get_edge_attr(source, n, 'capacity')
self.set_edge_attr(source, n, 'flow', capacity)
self.set_node_attr(n, 'excess', capacity)
excess = self.get_node_attr(source, 'excess')
self.set_node_attr(source, 'excess', excess - capacity)
if algo == 'FIFO' or algo == 'SAP':
q.push(n)
elif algo == 'HighestLabel':
q.push(n, -1)
self.set_node_attr(source, 'distance', len(nl))
self.show_flow()
while not q.isEmpty():
relabel = True
current = q.peek()
neighbors = (self.get_neighbors(current) +
self.get_in_neighbors(current))
for n in neighbors:
pushed = self.process_edge_flow(source, sink, current, n, algo,
q)
if pushed:
self.show_flow()
if algo == 'FIFO':
'''With FIFO, we need to add the neighbors to the queue
before the current is added back in or the nodes will
be out of order
'''
if q.peek(n) is None and n != source and n != sink:
q.push(n)
'''Keep pushing while there is excess'''
if self.get_node_attr(current, 'excess') > 0:
continue
'''If we were able to push, then there we should not
relabel
'''
relabel = False
break
q.remove(current)
if current != sink:
if relabel:
self.relabel(current)
self.show_flow()
if self.get_node_attr(current, 'excess') > 0:
if algo == 'FIFO' or algo == 'SAP':
q.push(current)
elif algo == 'HighestLabel':
q.push(current, -self.get_node_attr(current,
'distance'))
if pushed and q.peek(n) is None and n != source:
if algo == 'SAP':
q.push(n)
elif algo == 'HighestLabel':
q.push(n, -self.get_node_attr(n, 'distance'))
def process_edge_flow(self, source, sink, i, j, algo, q):
'''
API: process_edge_flow(self, source, sink, i, j, algo, q)
Description:
Used by by max_flow_preflowpush() method. Processes edges along
prefolow push.
Input:
source: Source node name of flow graph.
sink: Sink node name of flow graph.
i: Source node in the processed edge (tail of arc).
j: Sink node in the processed edge (head of arc).
Post:
The 'flow' and 'excess' attributes of nodes may get updated.
Return:
Returns False if residual capacity is 0, True otherwise.
'''
if (self.get_node_attr(i, 'distance') !=
self.get_node_attr(j, 'distance') + 1):
return False
if (i, j) in self.edge_attr:
edge = (i, j)
capacity = self.get_edge_attr(i, j, 'capacity')
mult = 1
else:
edge = (j, i)
capacity = 0
mult = -1
flow = mult*self.edge_attr[edge]['flow']
residual_capacity = capacity - flow
if residual_capacity == 0:
return False
excess_i = self.get_node_attr(i, 'excess')
excess_j = self.get_node_attr(j, 'excess')
push_amount = min(excess_i, residual_capacity)
self.edge_attr[edge]['flow'] = mult*(flow + push_amount)
self.set_node_attr(i, 'excess', excess_i - push_amount)
self.set_node_attr(j, 'excess', excess_j + push_amount)
return True
def relabel(self, i):
'''
API: relabel(self, i)
Description:
Used by max_flow_preflowpush() method for relabelling node i.
Input:
i: Node that is being relabelled.
Post:
'distance' attribute of node i is updated.
'''
min_distance = 2*len(self.get_node_list()) + 1
for j in self.get_neighbors(i):
if (self.get_node_attr(j, 'distance') < min_distance and
(self.get_edge_attr(i, j, 'flow') <
self.get_edge_attr(i, j, 'capacity'))):
min_distance = self.get_node_attr(j, 'distance')
for j in self.get_in_neighbors(i):
if (self.get_node_attr(j, 'distance') < min_distance and
self.get_edge_attr(j, i, 'flow') > 0):
min_distance = self.get_node_attr(j, 'distance')
self.set_node_attr(i, 'distance', min_distance + 1)
def show_flow(self):
'''
API: relabel(self, i)
Description:
Used by max_flow_preflowpush() method for display purposed.
Post:
'color' and 'label' attribute of edges/nodes are updated.
'''
for n in self.get_node_list():
excess = self.get_node_attr(n, 'excess')
distance = self.get_node_attr(n, 'distance')
self.set_node_attr(n, 'label', str(excess)+'/'+str(distance))
for neighbor in self.get_neighbors(n):
capacity = self.get_edge_attr(n, neighbor, 'capacity')
flow = self.get_edge_attr(n, neighbor, 'flow')
if capacity == INF:
self.set_edge_attr(n, neighbor, 'label',
'INF'+'/'+str(flow))
else:
self.set_edge_attr(n, neighbor, 'label',
str(capacity)+'/'+str(flow))
if capacity == flow:
self.set_edge_attr(n, neighbor, 'color', 'red')
elif flow > 0:
self.set_edge_attr(n, neighbor, 'color', 'green')
else:
self.set_edge_attr(n, neighbor, 'color', 'black')
self.display()
def create_residual_graph(self):
'''
API: create_residual_graph(self)
Description:
Creates and returns residual graph, which is a Graph instance
itself.
Pre:
(1) Arcs should have 'flow', 'capacity' and 'cost' attribute
(2) Graph should be a directed graph
Return:
Returns residual graph, which is a Graph instance.
'''
if self.graph_type is UNDIRECTED_GRAPH:
raise Exception('residual graph is defined for directed graphs.')
residual_g = Graph(type = DIRECTED_GRAPH)
for e in self.get_edge_list():
capacity_e = self.get_edge_attr(e[0], e[1], 'capacity')
flow_e = self.get_edge_attr(e[0], e[1], 'flow')
cost_e = self.get_edge_attr(e[0], e[1], 'cost')
if flow_e > 0:
residual_g.add_edge(e[1], e[0], cost=-1*cost_e,
capacity=flow_e)
if capacity_e - flow_e > 0:
residual_g.add_edge(e[0], e[1], cost=cost_e,
capacity=capacity_e-flow_e)
return residual_g
def cycle_canceling(self, display):
'''
API:
cycle_canceling(self, display)
Description:
Solves minimum cost feasible flow problem using cycle canceling
algorithm. Returns True when an optimal solution is found, returns
False otherwise. 'flow' attribute values of arcs should be
considered as junk when returned False.
Input:
display: Display method.
Pre:
(1) Arcs should have 'capacity' and 'cost' attribute.
(2) Nodes should have 'demand' attribute, this value should be
positive if the node is a supply node, negative if it is demand
node and 0 if it is transhipment node.
(3) graph should not have node 's' and 't'.
Post:
Changes 'flow' attributes of arcs.
Return:
Returns True when an optimal solution is found, returns False
otherwise.
'''
# find a feasible solution to flow problem
if not self.find_feasible_flow():
return False
# create residual graph
residual_g = self.create_residual_graph()
# identify a negative cycle in residual graph
ncycle = residual_g.get_negative_cycle()
# loop while residual graph has a negative cycle
while ncycle is not None:
# find capacity of cycle
cap = residual_g.find_cycle_capacity(ncycle)
# augment capacity amount along the cycle
self.augment_cycle(cap, ncycle)
# create residual graph
residual_g = self.create_residual_graph()
# identify next negative cycle
ncycle = residual_g.get_negative_cycle()
return True
def find_feasible_flow(self):
'''
API:
find_feasible_flow(self)
Description:
Solves feasible flow problem, stores solution in 'flow' attribute
or arcs. This method is used to get an initial feasible flow for
simplex and cycle canceling algorithms. Uses max_flow() method.
Other max flow methods can also be used. Returns True if a feasible
flow is found, returns False, if the problem is infeasible. When
the problem is infeasible 'flow' attributes of arcs should be
considered as junk.
Pre:
(1) 'capacity' attribute of arcs
(2) 'demand' attribute of nodes
Post:
Keeps solution in 'flow' attribute of arcs.
Return:
Returns True if a feasible flow is found, returns False, if the
problem is infeasible
'''
# establish a feasible flow in the network, to do this add nodes s and
# t and solve a max flow problem.
nl = self.get_node_list()
for i in nl:
b_i = self.get_node(i).get_attr('demand')
if b_i > 0:
# i is a supply node, add (s,i) arc
self.add_edge('s', i, capacity=b_i)
elif b_i < 0:
# i is a demand node, add (i,t) arc
self.add_edge(i, 't', capacity=-1*b_i)
# solve max flow on this modified graph
self.max_flow('s', 't', 'off')
# check if all demand is satisfied, i.e. the min cost problem is
# feasible or not
for i in self.neighbors['s']:
flow = self.get_edge_attr('s', i, 'flow')
capacity = self.get_edge_attr('s', i, 'capacity')
if flow != capacity:
self.del_node('s')
self.del_node('t')
return False
# remove node 's' and node 't'
self.del_node('s')
self.del_node('t')
return True
def get_layout(self):
'''
API:
get_layout(self)
Description:
Returns layout attribute of the graph.
Return:
Returns layout attribute of the graph.
'''
return self.attr['layout']
def set_layout(self, value):
'''
API:
set_layout(self, value)
Description:
Sets layout attribute of the graph to value.
Input:
value: New value of the layout.
'''
self.attr['layout']=value
if value == 'dot2tex':
self.attr['d2tgraphstyle'] = 'every text node part/.style={align=center}'
def write(self, file_obj, layout = None, format='png'):
'''
API:
write(self, file_obj, layout = None, format='png')
Description:
Writes graph to dist using layout and format.
Input:
file_obj: a file-like object that will be written to.
layout: Dot layout for generating graph image.
format: Image format, all format supported by Dot are wellcome.
Post:
File will be written to disk.
'''
if layout == None:
layout = self.get_layout()
if format == 'dot':
file_obj.write(bytearray(self.to_string(), 'utf8'))
else:
out = self.create(layout, format)
if (out != None):
file_obj.write(out)
def create(self, layout, format, **args):
'''
API:
create(self, layout, format, **args)
Description:
Returns postscript representation of graph.
Input:
layout: Dot layout for generating graph image.
format: Image format, all format supported by Dot are wellcome.
Return:
Returns postscript representation of graph.
'''
tmp_fd, tmp_name = tempfile.mkstemp()
tmp_file = os.fdopen(tmp_fd, 'w')
tmp_file.write(self.to_string())
tmp_file.close()
try:
p = subprocess.run([layout, '-T'+format, tmp_name],
capture_output = True)
except OSError:
print('''Graphviz executable not found.
Graphviz must be installed and in your search path.
Please visit http://www.graphviz.org/ for information on installation.
After installation, ensure that the PATH variable is properly set.''')
return None
p.check_returncode()
os.remove(tmp_name)
if p.stderr:
print(p.stderr)
return p.stdout
def display(self, highlight = None, basename = 'graph', format = 'png',
pause = False, wait_for_click = True):
'''
API:
display(self, highlight = None, basename = 'graph', format = 'png',
pause = True)
Description:
Displays graph according to the arguments provided.
Current display modes: 'off', 'file', 'PIL', 'matplotlib', 'xdot',
'svg'
Current layout modes: Layouts provided by graphviz ('dot', 'fdp',
'circo', etc.) and 'dot2tex'.
Current formats: Formats provided by graphviz ('ps', 'pdf', 'png',
etc.)
Input:
highlight: List of nodes to be highlighted.
basename: File name. It will be used if display mode is 'file'.
format: Image format, all format supported by Dot are wellcome.
pause: If display is 'matplotlib', window will remain open until closed.
wait_for_click: If display is 'matplotlib', setting to True will
wait for a button click before proceeding. This is useful when
animating an algorithm.
Post:
A display window will pop up or a file will be written depending
on display mode.
'''
if self.attr['display'] == 'off':
return
if highlight != None:
for n in highlight:
if not isinstance(n, Node):
n = self.get_node(n)
n.set_attr('color', 'red')
if self.get_layout() == 'dot2tex':
if self.attr['display'] != 'file':
self.attr['display'] = 'file'
print("Warning: Dot2tex layout can only be used with display mode 'file'")
print(" Automatically changing setting")
if self.attr['display'] == 'file':
if self.get_layout() == 'dot2tex':
try:
if DOT2TEX_INSTALLED:
if format != 'pdf' or format != 'ps':
print("Dot2tex only supports pdf and ps formats, falling back to pdf")
format = 'pdf'
self.set_layout('dot')
tex = dot2tex.dot2tex(self.to_string(), autosize=True, texmode = 'math', template = DOT2TEX_TEMPLATE)
else:
print("Error: Dot2tex not installed.")
except:
try:
self.set_layout('dot')
with open(basename+'.dot', "w+b") as f:
self.write(f, self.get_layout(), 'dot')
p = subprocess.call(['dot2tex', '-t math',
basename + '.dot'])
except:
print("There was an error running dot2tex.")
with open(basename+'.tex', 'w') as f:
f.write(tex)
try:
subprocess.call(['latex', basename])
if format == 'ps':
subprocess.call(['dvips', basename])
elif format == 'pdf':
subprocess.call(['pdflatex', basename])
self.set_layout('dot2tex')
except:
print("There was an error runing latex. Is it installed?")
else:
with open(basename+'.'+format, "w+b") as f:
self.write(f, self.get_layout(), format)
return
elif self.attr['display'] == 'PIL':
if PIL_INSTALLED:
tmp_fd, tmp_name = tempfile.mkstemp()
tmp_file = os.fdopen(tmp_fd, 'w+b')
self.write(tmp_file, self.get_layout(), format)
tmp_file.close()
im = PIL_Image.open(tmp_name)
im.show()
os.remove(tmp_name)
else:
print('Error: PIL not installed. Display disabled.')
self.attr['display'] = 'off'
elif self.attr['display'] == 'matplotlib':
if MATPLOTLIB_INSTALLED and PIL_INSTALLED:
tmp_fd, tmp_name = tempfile.mkstemp()
tmp_file = os.fdopen(tmp_fd, 'w+b')
self.write(tmp_file, self.get_layout(), format)
tmp_file.close()
im = PIL_Image.open(tmp_name)
fig = plt.figure(1)
fig.canvas.mpl_connect('close_event', handle_close)
plt.clf()
plt.axis('off')
plt.imshow(im, interpolation='bilinear' #resample=True
#extent = (0, 100, 0, 100)
)
if wait_for_click == True:
plt.draw()
try:
if plt.waitforbuttonpress(timeout = 10000):
plt.close()
exit()
except:
exit()
else:
plt.show(block=pause)
im.close()
os.remove(tmp_name)
else:
print('Warning: Either matplotlib or Pillow is not installed. Display disabled.')
self.attr['display'] = 'off'
elif self.attr['display'] == 'xdot':
if XDOT_INSTALLED:
window = xdot.DotWindow()
window.set_dotcode(self.to_string())
window.connect('destroy', gtk.main_quit)
gtk.main()
else:
print('Error: xdot not installed. Display disabled.')
self.attr['display'] = 'off'
else:
print("Unknown display mode: ", end=' ')
print(self.attr['display'])
if highlight != None:
for n in highlight:
if not isinstance(n, Node):
n = self.get_node(n)
n.set_attr('color', 'black')
def set_display_mode(self, value):
'''
API:
set_display_mode(self, value)
Description:
Sets display mode to value.
Input:
value: New display mode.
Post:
Display mode attribute of graph is updated.
'''
self.attr['display'] = value
def max_flow(self, source, sink, display = None, algo = 'DFS'):
'''
API: max_flow(self, source, sink, display=None)
Description:
Finds maximum flow from source to sink by a depth-first search based
augmenting path algorithm.
Pre:
Assumes a directed graph in which each arc has a 'capacity'
attribute and for which there does does not exist both arcs (i,j)
and (j, i) for any pair of nodes i and j.
Input:
source: Source node name.
sink: Sink node name.
display: Display mode.
Post:
The 'flow" attribute of each arc gives a maximum flow.
'''
if display is not None:
old_display = self.attr['display']
self.attr['display'] = display
nl = self.get_node_list()
# set flow of all edges to 0
for e in self.edge_attr:
self.edge_attr[e]['flow'] = 0
if 'capacity' in self.edge_attr[e]:
capacity = self.edge_attr[e]['capacity']
self.edge_attr[e]['label'] = str(capacity)+'/0'
else:
self.edge_attr[e]['capacity'] = INF
self.edge_attr[e]['label'] = 'INF/0'
while True:
# find an augmenting path from source to sink using DFS
if algo == 'DFS':
q = Stack()
elif algo == 'BFS':
q = Queue()
q.push(source)
pred = {source:None}
explored = [source]
for n in nl:
self.get_node(n).set_attr('color', 'black')
for e in self.edge_attr:
if self.edge_attr[e]['flow'] == 0:
self.edge_attr[e]['color'] = 'black'
elif self.edge_attr[e]['flow']==self.edge_attr[e]['capacity']:
self.edge_attr[e]['color'] = 'red'
else:
self.edge_attr[e]['color'] = 'green'
self.display()
while not q.isEmpty():
current = q.peek()
q.remove(current)
if current == sink:
break
out_neighbor = self.neighbors[current]
in_neighbor = self.in_neighbors[current]
neighbor = out_neighbor+in_neighbor
for m in neighbor:
if m in explored:
continue
self.get_node(m).set_attr('color', 'yellow')
if m in out_neighbor:
self.set_edge_attr(current, m, 'color', 'yellow')
available_capacity = (
self.get_edge_attr(current, m, 'capacity')-
self.get_edge_attr(current, m, 'flow'))
else:
self.set_edge_attr(m, current, 'color', 'yellow')
available_capacity=self.get_edge_attr(m, current, 'flow')
self.display()
if available_capacity > 0:
self.get_node(m).set_attr('color', 'blue')
if m in out_neighbor:
self.set_edge_attr(current, m, 'color', 'blue')
else:
self.set_edge_attr(m, current, 'color', 'blue')
explored.append(m)
pred[m] = current
q.push(m)
else:
self.get_node(m).set_attr('color', 'black')
if m in out_neighbor:
if (self.get_edge_attr(current, m, 'flow') ==
self.get_edge_attr(current, m, 'capacity')):
self.set_edge_attr(current, m, 'color', 'red')
elif self.get_edge_attr(current, m, 'flow') == 0:
self.set_edge_attr(current, m, 'color', 'black')
#else:
# self.set_edge_attr(current, m, 'color', 'green')
else:
if (self.get_edge_attr(m, current, 'flow') ==
self.get_edge_attr(m, current, 'capacity')):
self.set_edge_attr(m, current, 'color', 'red')
elif self.get_edge_attr(m, current, 'flow') == 0:
self.set_edge_attr(m, current, 'color', 'black')
#else:
# self.set_edge_attr(m, current, 'color', 'green')
self.display()
# if no path with positive capacity from source sink exists, stop
if sink not in pred:
break
# find capacity of the path
current = sink
min_capacity = 'infinite'
while True:
m = pred[current]
if (m,current) in self.edge_attr:
arc_capacity = self.edge_attr[(m, current)]['capacity']
flow = self.edge_attr[(m, current)]['flow']
potential = arc_capacity-flow
if min_capacity == 'infinite':
min_capacity = potential
elif min_capacity > potential:
min_capacity = potential
else:
potential = self.edge_attr[(current, m)]['flow']
if min_capacity == 'infinite':
min_capacity = potential
elif min_capacity > potential:
min_capacity = potential
if m == source:
break
current = m
# update flows on the path
current = sink
while True:
m = pred[current]
if (m, current) in self.edge_attr:
flow = self.edge_attr[(m, current)]['flow']
capacity = self.edge_attr[(m, current)]['capacity']
new_flow = flow+min_capacity
self.edge_attr[(m, current)]['flow'] = new_flow
if capacity == INF:
self.edge_attr[(m, current)]['label'] = \
'INF' + '/'+str(new_flow)
else:
self.edge_attr[(m, current)]['label'] = \
str(capacity)+'/'+str(new_flow)
if new_flow==capacity:
self.edge_attr[(m, current)]['color'] = 'red'
else:
self.edge_attr[(m, current)]['color'] = 'green'
self.display()
else:
flow = self.edge_attr[(current, m)]['flow']
capacity = self.edge_attr[(current, m)]['capacity']
new_flow = flow-min_capacity
self.edge_attr[(current, m)]['flow'] = new_flow
if capacity == INF:
self.edge_attr[(current, m)]['label'] = \
'INF' + '/'+str(new_flow)
else:
self.edge_attr[(current, m)]['label'] = \
str(capacity)+'/'+str(new_flow)
if new_flow==0:
self.edge_attr[(current, m)]['color'] = 'red'
else:
self.edge_attr[(current, m)]['color'] = 'green'
self.display()
if m == source:
break
current = m
if display is not None:
self.attr['display'] = old_display
def get_negative_cycle(self):
'''
API:
get_negative_cycle(self)
Description:
Finds and returns negative cost cycle using 'cost' attribute of
arcs. Return value is a list of nodes representing cycle it is in
the following form; n_1-n_2-...-n_k, when the cycle has k nodes.
Pre:
Arcs should have 'cost' attribute.
Return:
Returns a list of nodes in the cycle if a negative cycle exists,
returns None otherwise.
'''
nl = self.get_node_list()
i = nl[0]
(valid, distance, nextn) = self.floyd_warshall()
if not valid:
cycle = self.floyd_warshall_get_cycle(distance, nextn)
return cycle
else:
return None
def floyd_warshall(self):
'''
API:
floyd_warshall(self)
Description:
Finds all pair shortest paths and stores it in a list of lists.
This is possible if the graph does not have negative cycles. It will
return a tuple with 3 elements. The first element indicates whether
the graph has a negative cycle. It is true if the graph does not
have a negative cycle, ie. distances found are valid shortest
distances. The second element is a dictionary of shortest distances
between nodes. Keys are tuple of node pairs ie. (i,j). The third
element is a dictionary that helps to retrieve the shortest path
between nodes. Then return value can be represented as (validity,
distance, nextn) where nextn is the dictionary to retrieve paths.
distance and nextn can be used as inputs to other methods to get
shortest path between nodes.
Pre:
Arcs should have 'cost' attribute.
Return:
Returns (validity, distance, nextn). The distances are valid if
validity is True.
'''
nl = self.get_node_list()
el = self.get_edge_list()
# initialize distance
distance = {}
for i in nl:
for j in nl:
distance[(i,j)] = 'infinity'
for i in nl:
distance[(i,i)] = 0
for e in el:
distance[(e[0],e[1])] = self.get_edge_cost(e)
# == end of distance initialization
# initialize next
nextn = {}
for i in nl:
for j in nl:
if i==j or distance[(i,j)]=='infinity':
nextn[(i,j)] = None
else:
nextn[(i,j)] = i
# == end of next initialization
# compute shortest distance
for k in nl:
for i in nl:
for j in nl:
if distance[(i,k)]=='infinity' or distance[(k,j)]=='infinity':
continue
elif distance[(i,j)]=='infinity':
distance[(i,j)] = distance[(i,k)] + distance[(k,j)]
nextn[(i,j)] = nextn[(k,j)]
elif distance[(i,j)] > distance[(i,k)] + distance[(k,j)]:
distance[(i,j)] = distance[(i,k)] + distance[(k,j)]
nextn[(i,j)] = nextn[(k,j)]
# == end of compute shortest distance
# check if graph has negative cycles
for i in nl:
if distance[(i,i)] < 0:
# shortest distances are not valid
# graph has negative cycle
return (False, distance, nextn)
return (True, distance, nextn)
def floyd_warshall_get_path(self, distance, nextn, i, j):
'''
API:
floyd_warshall_get_path(self, distance, nextn, i, j):
Description:
Finds shortest path between i and j using distance and nextn
dictionaries.
Pre:
(1) distance and nextn are outputs of floyd_warshall method.
(2) The graph does not have a negative cycle, , ie.
distance[(i,i)] >=0 for all node i.
Return:
Returns the list of nodes on the path from i to j, ie. [i,...,j]
'''
if distance[(i,j)]=='infinity':
return None
k = nextn[(i,j)]
path = self.floyd_warshall_get_path
if i==k:
return [i, j]
else:
return path(distance, nextn, i,k) + [k] + path(distance, nextn, k,j)
def floyd_warshall_get_cycle(self, distance, nextn, element = None):
'''
API:
floyd_warshall_get_cycle(self, distance, nextn, element = None)
Description:
Finds a negative cycle in the graph.
Pre:
(1) distance and nextn are outputs of floyd_warshall method.
(2) The graph should have a negative cycle, , ie.
distance[(i,i)] < 0 for some node i.
Return:
Returns the list of nodes on the cycle. Ex: [i,j,k,...,r], where
(i,j), (j,k) and (r,i) are some edges in the cycle.
'''
nl = self.get_node_list()
if element is None:
for i in nl:
if distance[(i,i)] < 0:
# graph has a cycle on the path from i to i.
element = i
break
else:
raise Exception('Graph does not have a negative cycle!')
elif distance[(element,element)] >= 0:
raise Exception('Graph does not have a negative cycle that contains node '+str(element)+'!')
# find the cycle on the path from i to i.
cycle = [element]
k = nextn[(element,element)]
while k not in cycle:
cycle.insert(1,k)
k = nextn[(element,k)]
if k==element:
return cycle
else:
return self.floyd_warshall_get_cycle(distance, nextn, k)
def find_cycle_capacity(self, cycle):
'''
API:
find_cycle_capacity(self, cycle):
Description:
Finds capacity of the cycle input.
Pre:
(1) Arcs should have 'capacity' attribute.
Input:
cycle: a list representing a cycle
Return:
Returns an integer number representing capacity of cycle.
'''
index = 0
k = len(cycle)
capacity = self.get_edge_attr(cycle[k-1], cycle[0], 'capacity')
while index<(k-1):
i = cycle[index]
j = cycle[index+1]
capacity_ij = self.get_edge_attr(i, j, 'capacity')
if capacity > capacity_ij:
capacity = capacity_ij
index += 1
return capacity
def fifo_label_correcting(self, source):
'''
API:
fifo_label_correcting(self, source)
Description:
finds shortest path from source to every other node. Returns
predecessor dictionary. If graph has a negative cycle, detects it
and returns to it.
Pre:
(1) 'cost' attribute of arcs. It will be used to compute shortest
path.
Input:
source: source node
Post:
Modifies 'distance' attribute of nodes.
Return:
If there is no negative cycle returns to (True, pred), otherwise
returns to (False, cycle) where pred is the predecessor dictionary
and cycle is a list of nodes that represents cycle. It is in
[n_1, n_2, ..., n_k] form where the cycle has k nodes.
'''
pred = {}
self.get_node(source).set_attr('distance', 0)
pred[source] = None
for n in self.neighbors:
if n!=source:
self.get_node(n).set_attr('distance', 'inf')
q = [source]
while q:
i = q[0]
q = q[1:]
for j in self.neighbors[i]:
distance_j = self.get_node(j).get_attr('distance')
distance_i = self.get_node(i).get_attr('distance')
c_ij = self.get_edge_attr(i, j, 'cost')
if distance_j > distance_i + c_ij:
self.get_node(j).set_attr('distance', distance_i+c_ij)
if j in pred:
pred[j] = i
cycle = self.label_correcting_check_cycle(j, pred)
if cycle is not None:
return (False, cycle)
else:
pred[j] = i
if j not in q:
q.append(j)
return (True, pred)
def label_correcting_check_cycle(self, j, pred):
'''
API:
label_correcting_check_cycle(self, j, pred)
Description:
Checks if predecessor dictionary has a cycle, j represents the node
that predecessor is recently updated.
Pre:
(1) predecessor of source node should be None.
Input:
j: node that predecessor is recently updated.
pred: predecessor dictionary
Return:
If there exists a cycle, returns the list that represents the
cycle, otherwise it returns to None.
'''
labelled = {}
for n in self.neighbors:
labelled[n] = None
current = j
while current != None:
if labelled[current]==j:
cycle = self.label_correcting_get_cycle(j, pred)
return cycle
labelled[current] = j
current = pred[current]
return None
def label_correcting_get_cycle(self, j, pred):
'''
API:
label_correcting_get_cycle(self, labelled, pred)
Description:
In label correcting check cycle it is decided pred has a cycle and
nodes in the cycle are labelled. We will create a list of nodes
in the cycle using labelled and pred inputs.
Pre:
This method should be called from label_correcting_check_cycle(),
unless you are sure about what you are doing.
Input:
j: Node that predecessor is recently updated. We know that it is
in the cycle
pred: Predecessor dictionary that contains a cycle
Post:
Returns a list of nodes that represents cycle. It is in
[n_1, n_2, ..., n_k] form where the cycle has k nodes.
'''
cycle = []
cycle.append(j)
current = pred[j]
while current!=j:
cycle.append(current)
current = pred[current]
cycle.reverse()
return cycle
def augment_cycle(self, amount, cycle):
'''
API:
augment_cycle(self, amount, cycle):
Description:
Augments 'amount' unit of flow along cycle.
Pre:
Arcs should have 'flow' attribute.
Inputs:
amount: An integer representing the amount to augment
cycle: A list representing a cycle
Post:
Changes 'flow' attributes of arcs.
'''
index = 0
k = len(cycle)
while index<(k-1):
i = cycle[index]
j = cycle[index+1]
if (i,j) in self.edge_attr:
flow_ij = self.edge_attr[(i,j)]['flow']
self.edge_attr[(i,j)]['flow'] = flow_ij+amount
else:
flow_ji = self.edge_attr[(j,i)]['flow']
self.edge_attr[(j,i)]['flow'] = flow_ji-amount
index += 1
i = cycle[k-1]
j = cycle[0]
if (i,j) in self.edge_attr:
flow_ij = self.edge_attr[(i,j)]['flow']
self.edge_attr[(i,j)]['flow'] = flow_ij+amount
else:
flow_ji = self.edge_attr[(j,i)]['flow']
self.edge_attr[(j,i)]['flow'] = flow_ji-amount
def network_simplex(self, display, pivot, root):
'''
API:
network_simplex(self, display, pivot, root)
Description:
Solves minimum cost feasible flow problem using network simplex
algorithm. It is recommended to use min_cost_flow(algo='simplex')
instead of using network_simplex() directly. Returns True when an
optimal solution is found, returns False otherwise. 'flow' attribute
values of arcs should be considered as junk when returned False.
Pre:
(1) check Pre section of min_cost_flow()
Input:
pivot: specifies pivot rule. Check min_cost_flow()
display: 'off' for no display, 'matplotlib' for live update of
spanning tree.
root: Root node for the underlying spanning trees that will be
generated by network simplex algorthm.
Post:
(1) Changes 'flow' attribute of edges.
Return:
Returns True when an optimal solution is found, returns
False otherwise.
'''
# ==== determine an initial tree structure (T,L,U)
# find a feasible flow
if not self.find_feasible_flow():
return False
t = self.simplex_find_tree()
self.set_display_mode(display)
# mark spanning tree arcs
self.simplex_mark_st_arcs(t)
# display initial spanning tree
t.simplex_redraw(display, root)
t.set_display_mode(display)
#t.display()
self.display()
# set predecessor, depth and thread indexes
t.simplex_search(root, 1)
# compute potentials
self.simplex_compute_potentials(t, root)
# while some nontree arc violates optimality conditions
while not self.simplex_optimal(t):
self.display()
# select an entering arc (k,l)
(k,l) = self.simplex_select_entering_arc(t, pivot)
self.simplex_mark_entering_arc(k, l)
self.display()
# determine leaving arc
((p,q), capacity, cycle)=self.simplex_determine_leaving_arc(t,k,l)
# mark leaving arc
self.simplex_mark_leaving_arc(p, q)
self.display()
self.simplex_remove_arc(t, p, q, capacity, cycle)
# display after arc removed
self.display()
self.simplex_mark_st_arcs(t)
self.display()
# set predecessor, depth and thread indexes
t.simplex_redraw(display, root)
#t.display()
t.simplex_search(root, 1)
# compute potentials
self.simplex_compute_potentials(t, root)
return True
def simplex_mark_leaving_arc(self, p, q):
'''
API:
simplex_mark_leving_arc(self, p, q)
Description:
Marks leaving arc.
Input:
p: tail of the leaving arc
q: head of the leaving arc
Post:
Changes color attribute of leaving arc.
'''
self.set_edge_attr(p, q, 'color', 'red')
def simplex_determine_leaving_arc(self, t, k, l):
'''
API:
simplex_determine_leaving_arc(self, t, k, l)
Description:
Determines and returns the leaving arc.
Input:
t: current spanning tree solution.
k: tail of the entering arc.
l: head of the entering arc.
Return:
Returns the tuple that represents leaving arc, capacity of the
cycle and cycle.
'''
# k,l are the first two elements of the cycle
cycle = self.simplex_identify_cycle(t, k, l)
flow_kl = self.get_edge_attr(k, l, 'flow')
capacity_kl = self.get_edge_attr(k, l, 'capacity')
min_capacity = capacity_kl
# check if k,l is in U or L
if flow_kl==capacity_kl:
# l,k will be the last two elements
cycle.reverse()
n = len(cycle)
index = 0
# determine last blocking arc
t.add_edge(k, l)
tel = t.get_edge_list()
while index < (n-1):
if (cycle[index], cycle[index+1]) in tel:
flow = self.edge_attr[(cycle[index], cycle[index+1])]['flow']
capacity = \
self.edge_attr[(cycle[index],cycle[index+1])]['capacity']
if min_capacity >= (capacity-flow):
candidate = (cycle[index], cycle[index+1])
min_capacity = capacity-flow
else:
flow = self.edge_attr[(cycle[index+1], cycle[index])]['flow']
if min_capacity >= flow:
candidate = (cycle[index+1], cycle[index])
min_capacity = flow
index += 1
# check arc (cycle[n-1], cycle[0])
if (cycle[n-1], cycle[0]) in tel:
flow = self.edge_attr[(cycle[n-1], cycle[0])]['flow']
capacity = self.edge_attr[(cycle[n-1], cycle[0])]['capacity']
if min_capacity >= (capacity-flow):
candidate = (cycle[n-1], cycle[0])
min_capacity = capacity-flow
else:
flow = self.edge_attr[(cycle[0], cycle[n-1])]['flow']
if min_capacity >= flow:
candidate = (cycle[0], cycle[n-1])
min_capacity = flow
return (candidate, min_capacity, cycle)
def simplex_mark_entering_arc(self, k, l):
'''
API:
simplex_mark_entering_arc(self, k, l)
Description:
Marks entering arc (k,l)
Input:
k: tail of the entering arc
l: head of the entering arc
Post:
(1) color attribute of the arc (k,l)
'''
self.set_edge_attr(k, l, 'color', 'green')
def simplex_mark_st_arcs(self, t):
'''
API:
simplex_mark_st_arcs(self, t)
Description:
Marks spanning tree arcs.
Case 1, Blue: Arcs that are at lower bound and in tree.
Case 2, Red: Arcs that are at upper bound and in tree.
Case 3, Green: Arcs that are between bounds are green.
Case 4, Brown: Non-tree arcs at lower bound.
Case 5, Violet: Non-tree arcs at upper bound.
Input:
t: t is the current spanning tree
Post:
(1) color attribute of edges.
'''
tel = list(t.edge_attr.keys())
for e in self.get_edge_list():
flow_e = self.edge_attr[e]['flow']
capacity_e = self.edge_attr[e]['capacity']
if e in tel:
if flow_e == 0:
self.edge_attr[e]['color'] = 'blue'
elif flow_e == capacity_e:
self.edge_attr[e]['color'] = 'blue'
else:
self.edge_attr[e]['color'] = 'blue'
else:
if flow_e == 0:
self.edge_attr[e]['color'] = 'black'
elif flow_e == capacity_e:
self.edge_attr[e]['color'] = 'black'
else:
msg = "Arc is not in ST but has flow between bounds."
raise Exception(msg)
def print_flow(self):
'''
API:
print_flow(self)
Description:
Prints all positive flows to stdout. This method can be used for
debugging purposes.
'''
print('printing current edge, flow, capacity')
for e in self.edge_attr:
if self.edge_attr[e]['flow']!=0:
print(e, str(self.edge_attr[e]['flow']).ljust(4), end=' ')
print(str(self.edge_attr[e]['capacity']).ljust(4))
def simplex_redraw(self, display, root):
'''
API:
simplex_redraw(self, display, root)
Description:
Returns a new graph instance that is same as self but adds nodes
and arcs in a way that the resulting tree will be displayed
properly.
Input:
display: display mode
root: root node in tree.
Return:
Returns a graph same as self.
'''
nl = self.get_node_list()
el = self.get_edge_list()
new = Graph(type=DIRECTED_GRAPH, layout='dot', display=display)
pred_i = self.get_node(root).get_attr('pred')
thread_i = self.get_node(root).get_attr('thread')
depth_i = self.get_node(root).get_attr('depth')
new.add_node(root, pred=pred_i, thread=thread_i, depth=depth_i)
q = [root]
visited = [root]
while q:
name = q.pop()
visited.append(name)
neighbors = self.neighbors[name] + self.in_neighbors[name]
for n in neighbors:
if n not in new.get_node_list():
pred_i = self.get_node(n).get_attr('pred')
thread_i = self.get_node(n).get_attr('thread')
depth_i = self.get_node(n).get_attr('depth')
new.add_node(n, pred=pred_i, thread=thread_i, depth=depth_i)
if (name,n) in el:
if (name,n) not in new.edge_attr:
new.add_edge(name,n)
else:
if (n,name) not in new.edge_attr:
new.add_edge(n,name)
if n not in visited:
q.append(n)
for e in el:
flow = self.edge_attr[e]['flow']
capacity = self.edge_attr[e]['capacity']
cost = self.edge_attr[e]['cost']
new.edge_attr[e]['flow'] = flow
new.edge_attr[e]['capacity'] = capacity
new.edge_attr[e]['cost'] = cost
new.edge_attr[e]['label'] = "%d/%d/%d" %(flow,capacity,cost)
return new
def simplex_remove_arc(self, t, p, q, min_capacity, cycle):
'''
API:
simplex_remove_arc(self, p, q, min_capacity, cycle)
Description:
Removes arc (p,q), updates t, updates flows, where (k,l) is
the entering arc.
Input:
t: tree solution to be updated.
p: tail of the leaving arc.
q: head of the leaving arc.
min_capacity: capacity of the cycle.
cycle: cycle obtained when entering arc considered.
Post:
(1) updates t.
(2) updates 'flow' attributes.
'''
# augment min_capacity along cycle
n = len(cycle)
tel = list(t.edge_attr.keys())
index = 0
while index < (n-1):
if (cycle[index], cycle[index+1]) in tel:
flow_e = self.edge_attr[(cycle[index], cycle[index+1])]['flow']
self.edge_attr[(cycle[index], cycle[index+1])]['flow'] =\
flow_e+min_capacity
else:
flow_e = self.edge_attr[(cycle[index+1], cycle[index])]['flow']
self.edge_attr[(cycle[index+1], cycle[index])]['flow'] =\
flow_e-min_capacity
index += 1
# augment arc cycle[n-1], cycle[0]
if (cycle[n-1], cycle[0]) in tel:
flow_e = self.edge_attr[(cycle[n-1], cycle[0])]['flow']
self.edge_attr[(cycle[n-1], cycle[0])]['flow'] =\
flow_e+min_capacity
else:
flow_e = self.edge_attr[(cycle[0], cycle[n-1])]['flow']
self.edge_attr[(cycle[0], cycle[n-1])]['flow'] =\
flow_e-min_capacity
# remove leaving arc
t.del_edge((p, q))
# set label of removed arc
flow_pq = self.get_edge_attr(p, q, 'flow')
capacity_pq = self.get_edge_attr(p, q, 'capacity')
cost_pq = self.get_edge_attr(p, q, 'cost')
self.set_edge_attr(p, q, 'label',
"%d/%d/%d" %(flow_pq,capacity_pq,cost_pq))
for e in t.edge_attr:
flow = self.edge_attr[e]['flow']
capacity = self.edge_attr[e]['capacity']
cost = self.edge_attr[e]['cost']
t.edge_attr[e]['flow'] = flow
t.edge_attr[e]['capacity'] = capacity
t.edge_attr[e]['cost'] = cost
t.edge_attr[e]['label'] = "%d/%d/%d" %(flow,capacity,cost)
self.edge_attr[e]['label'] = "%d/%d/%d" %(flow,capacity,cost)
def simplex_select_entering_arc(self, t, pivot):
'''
API:
simplex_select_entering_arc(self, t, pivot)
Description:
Decides and returns entering arc using pivot rule.
Input:
t: current spanning tree solution
pivot: May be one of the following; 'first_eligible' or 'dantzig'.
'dantzig' is the default value.
Return:
Returns entering arc tuple (k,l)
'''
if pivot=='dantzig':
# pick the maximum violation
candidate = {}
for e in self.edge_attr:
if e in t.edge_attr:
continue
flow_ij = self.edge_attr[e]['flow']
potential_i = self.get_node(e[0]).get_attr('potential')
potential_j = self.get_node(e[1]).get_attr('potential')
capacity_ij = self.edge_attr[e]['capacity']
c_ij = self.edge_attr[e]['cost']
cpi_ij = c_ij - potential_i + potential_j
if flow_ij==0:
if cpi_ij < 0:
candidate[e] = cpi_ij
elif flow_ij==capacity_ij:
if cpi_ij > 0:
candidate[e] = cpi_ij
for e in candidate:
max_c = e
max_v = abs(candidate[e])
break
for e in candidate:
if max_v < abs(candidate[e]):
max_c = e
max_v = abs(candidate[e])
elif pivot=='first_eligible':
# pick the first eligible
for e in self.edge_attr:
if e in t.edge_attr:
continue
flow_ij = self.edge_attr[e]['flow']
potential_i = self.get_node(e[0]).get_attr('potential')
potential_j = self.get_node(e[1]).get_attr('potential')
capacity_ij = self.edge_attr[e]['capacity']
c_ij = self.edge_attr[e]['cost']
cpi_ij = c_ij - potential_i + potential_j
if flow_ij==0:
if cpi_ij < 0:
max_c = e
max_v = abs(cpi_ij)
elif flow_ij==capacity_ij:
if cpi_ij > 0:
max_c = e
max_v = cpi_ij
else:
raise Exception("Unknown pivot rule.")
return max_c
def simplex_optimal(self, t):
'''
API:
simplex_optimal(self, t)
Description:
Checks if the current solution is optimal, if yes returns True,
False otherwise.
Pre:
'flow' attributes represents a solution.
Input:
t: Graph instance tat reperesents spanning tree solution.
Return:
Returns True if the current solution is optimal (optimality
conditions are satisfied), else returns False
'''
for e in self.edge_attr:
if e in t.edge_attr:
continue
flow_ij = self.edge_attr[e]['flow']
potential_i = self.get_node(e[0]).get_attr('potential')
potential_j = self.get_node(e[1]).get_attr('potential')
capacity_ij = self.edge_attr[e]['capacity']
c_ij = self.edge_attr[e]['cost']
cpi_ij = c_ij - potential_i + potential_j
if flow_ij==0:
if cpi_ij < 0:
return False
elif flow_ij==capacity_ij:
if cpi_ij > 0:
return False
return True
def simplex_find_tree(self):
'''
API:
simplex_find_tree(self)
Description:
Assumes a feasible flow solution stored in 'flow' attribute's of
arcs and converts this solution to a feasible spanning tree
solution.
Pre:
(1) 'flow' attributes represents a feasible flow solution.
Post:
(1) 'flow' attributes may change when eliminating cycles.
Return:
Return a Graph instance that is a spanning tree solution.
'''
# find a cycle
solution_g = self.get_simplex_solution_graph()
cycle = solution_g.simplex_find_cycle()
while cycle is not None:
# find amount to augment and direction
amount = self.simplex_augment_cycle(cycle)
# augment along the cycle
self.augment_cycle(amount, cycle)
# find a new cycle
solution_g = self.get_simplex_solution_graph()
cycle = solution_g.simplex_find_cycle()
# check if the solution is connected
while self.simplex_connect(solution_g):
pass
# add attributes
for e in self.edge_attr:
flow = self.edge_attr[e]['flow']
capacity = self.edge_attr[e]['capacity']
cost = self.edge_attr[e]['cost']
self.edge_attr[e]['label'] = "%d/%d/%d" %(flow,capacity,cost)
if e in solution_g.edge_attr:
solution_g.edge_attr[e]['flow'] = flow
solution_g.edge_attr[e]['capacity'] = capacity
solution_g.edge_attr[e]['cost'] = cost
solution_g.edge_attr[e]['label'] = "%d/%d/%d" %(flow,capacity,cost)
return solution_g
def simplex_connect(self, solution_g):
'''
API:
simplex_connect(self, solution_g)
Description:
At this point we assume that the solution does not have a cycle.
We check if all the nodes are connected, if not we add an arc to
solution_g that does not create a cycle and return True. Otherwise
we do nothing and return False.
Pre:
(1) We assume there is no cycle in the solution.
Input:
solution_g: current spanning tree solution instance.
Post:
(1) solution_g is updated. An arc that does not create a cycle is
added.
(2) 'component' attribute of nodes are changed.
Return:
Returns True if an arc is added, returns False otherwise.
'''
nl = solution_g.get_node_list()
current = nl[0]
pred = solution_g.simplex_search(current, current)
separated = list(pred.keys())
for n in nl:
if solution_g.get_node(n).get_attr('component') != current:
# find an arc from n to seperated
for m in separated:
if (n,m) in self.edge_attr:
solution_g.add_edge(n,m)
return True
elif (m,n) in self.edge_attr:
solution_g.add_edge(m,n)
return True
return False
def simplex_search(self, source, component_nr):
'''
API:
simplex_search(self, source, component_nr)
Description:
Searches graph starting from source. Its difference from usual
search is we can also go backwards along an arc. When the graph
is a spanning tree it computes predecessor, thread and depth
indexes and stores them as node attributes. These values should be
considered as junk when the graph is not a spanning tree.
Input:
source: source node
component_nr: component number
Post:
(1) Sets the component number of all reachable nodes to component.
Changes 'component' attribute of nodes.
(2) Sets 'pred', 'thread' and 'depth' attributes of nodes. These
values are junk if the graph is not a tree.
Return:
Returns predecessor dictionary.
'''
q = [source]
pred = {source:None}
depth = {source:0}
sequence = []
for n in self.neighbors:
self.get_node(n).set_attr('component', None)
while q:
current = q.pop()
self.get_node(current).set_attr('component', component_nr)
sequence.append(current)
neighbors = self.in_neighbors[current] + self.neighbors[current]
for n in neighbors:
if n in pred:
continue
self.get_node(n).set_attr('component', component_nr)
pred[n] = current
depth[n] = depth[current]+1
q.append(n)
for i in range(len(sequence)-1):
self.get_node(sequence[i]).set_attr('thread', int(sequence[i+1]))
self.get_node(sequence[-1]).set_attr('thread', int(sequence[0]))
for n in pred:
self.get_node(n).set_attr('pred', pred[n])
self.get_node(n).set_attr('depth', depth[n])
return pred
def simplex_augment_cycle(self, cycle):
'''
API:
simplex_augment_cycle(self, cycle)
Description:
Augments along the cycle to break it.
Pre:
'flow', 'capacity' attributes on arcs.
Input:
cycle: list representing a cycle in the solution
Post:
'flow' attribute will be modified.
'''
# find amount to augment
index = 0
k = len(cycle)
el = list(self.edge_attr.keys())
# check arc (cycle[k-1], cycle[0])
if (cycle[k-1], cycle[0]) in el:
min_capacity = self.edge_attr[(cycle[k-1], cycle[0])]['capacity']-\
self.edge_attr[(cycle[k-1], cycle[0])]['flow']
else:
min_capacity = self.edge_attr[(cycle[0], cycle[k-1])]['flow']
# check rest of the arcs in the cycle
while index<(k-1):
i = cycle[index]
j = cycle[index+1]
if (i,j) in el:
capacity_ij = self.edge_attr[(i,j)]['capacity'] -\
self.edge_attr[(i,j)]['flow']
else:
capacity_ij = self.edge_attr[(j,i)]['flow']
if min_capacity > capacity_ij:
min_capacity = capacity_ij
index += 1
return min_capacity
def simplex_find_cycle(self):
'''
API:
simplex_find_cycle(self)
Description:
Returns a cycle (list of nodes) if the graph has one, returns None
otherwise. Uses DFS. During DFS checks existence of arcs to lower
depth regions. Note that direction of the arcs are not important.
Return:
Returns list of nodes that represents cycle. Returns None if the
graph does not have any cycle.
'''
# make a dfs, if you identify an arc to a lower depth node we have a
# cycle
nl = self.get_node_list()
q = [nl[0]]
visited = []
depth = {nl[0]:0}
pred = {nl[0]:None}
for n in nl:
self.get_node(n).set_attr('component', None)
component_nr = int(nl[0])
self.get_node(nl[0]).set_attr('component', component_nr)
while True:
while q:
current = q.pop()
visited.append(current)
neighbors = self.in_neighbors[current] +\
self.neighbors[current]
for n in neighbors:
if n==pred[current]:
continue
self.get_node(n).set_attr('component', component_nr)
if n in depth:
# we have a cycle
cycle1 = []
cycle2 = []
temp = n
while temp is not None:
cycle1.append(temp)
temp = pred[temp]
temp = current
while temp is not None:
cycle2.append(temp)
temp = pred[temp]
cycle1.pop()
cycle1.reverse()
cycle2.extend(cycle1)
return cycle2
else:
pred[n] = current
depth[n] = depth[current] + 1
if n not in visited:
q.append(n)
flag = False
for n in nl:
if self.get_node(n).get_attr('component') is None:
q.append(n)
depth = {n:0}
pred = {n:None}
visited = []
component_nr = int(n)
self.get_node(n).set_attr('component', component_nr)
flag = True
break
if not flag:
break
return None
def get_simplex_solution_graph(self):
'''
API:
get_simplex_solution_graph(self):
Description:
Assumes a feasible flow solution stored in 'flow' attribute's of
arcs. Returns the graph with arcs that have flow between 0 and
capacity.
Pre:
(1) 'flow' attribute represents a feasible flow solution. See
Pre section of min_cost_flow() for details.
Return:
Graph instance that only has the arcs that have flow strictly
between 0 and capacity.
'''
simplex_g = Graph(type=DIRECTED_GRAPH)
for i in self.neighbors:
simplex_g.add_node(i)
for e in self.edge_attr:
flow_e = self.edge_attr[e]['flow']
capacity_e = self.edge_attr[e]['capacity']
if flow_e>0 and flow_e<capacity_e:
simplex_g.add_edge(e[0], e[1])
return simplex_g
def simplex_compute_potentials(self, t, root):
'''
API:
simplex_compute_potentials(self, t, root)
Description:
Computes node potentials for a minimum cost flow problem and stores
them as node attribute 'potential'. Based on pseudocode given in
Network Flows by Ahuja et al.
Pre:
(1) Assumes a directed graph in which each arc has a 'cost'
attribute.
(2) Uses 'thread' and 'pred' attributes of nodes.
Input:
t: Current spanning tree solution, its type is Graph.
root: root node of the tree.
Post:
Keeps the node potentials as 'potential' attribute.
'''
self.get_node(root).set_attr('potential', 0)
j = t.get_node(root).get_attr('thread')
while j is not root:
i = t.get_node(j).get_attr('pred')
potential_i = self.get_node(i).get_attr('potential')
if (i,j) in self.edge_attr:
c_ij = self.edge_attr[(i,j)]['cost']
self.get_node(j).set_attr('potential', potential_i-c_ij)
if (j,i) in self.edge_attr:
c_ji = self.edge_attr[(j,i)]['cost']
self.get_node(j).set_attr('potential', potential_i+c_ji)
j = t.get_node(j).get_attr('thread')
def simplex_identify_cycle(self, t, k, l):
'''
API:
identify_cycle(self, t, k, l)
Description:
Identifies and returns to the pivot cycle, which is a list of
nodes.
Pre:
(1) t is spanning tree solution, (k,l) is the entering arc.
Input:
t: current spanning tree solution
k: tail of the entering arc
l: head of the entering arc
Returns:
List of nodes in the cycle.
'''
i = k
j = l
cycle = []
li = [k]
lj = [j]
while i is not j:
depth_i = t.get_node(i).get_attr('depth')
depth_j = t.get_node(j).get_attr('depth')
if depth_i > depth_j:
i = t.get_node(i).get_attr('pred')
li.append(i)
elif depth_i < depth_j:
j = t.get_node(j).get_attr('pred')
lj.append(j)
else:
i = t.get_node(i).get_attr('pred')
li.append(i)
j = t.get_node(j).get_attr('pred')
lj.append(j)
cycle.extend(lj)
li.pop()
li.reverse()
cycle.extend(li)
# l is beginning k is end
return cycle
def min_cost_flow(self, display = None, **args):
'''
API:
min_cost_flow(self, display='off', **args)
Description:
Solves minimum cost flow problem using node/edge attributes with
the algorithm specified.
Pre:
(1) Assumes a directed graph in which each arc has 'capacity' and
'cost' attributes.
(2) Nodes should have 'demand' attribute. This value should be
positive for supply and negative for demand, and 0 for transhipment
nodes.
(3) The graph should be connected.
(4) Assumes (i,j) and (j,i) does not exist together. Needed when
solving max flow. (max flow problem is solved to get a feasible
flow).
Input:
display: 'off' for no display, 'matplotlib' for live update of tree
args: may have the following
display: display method, if not given current mode (the one
specified by __init__ or set_display) will be used.
algo: determines algorithm to use, can be one of the following
'simplex': network simplex algorithm
'cycle_canceling': cycle canceling algorithm
'simplex' is used if not given.
see Network Flows by Ahuja et al. for details of algorithms.
pivot: valid if algo is 'simlex', determines pivoting rule for
simplex, may be one of the following; 'first_eligible',
'dantzig' or 'scaled'.
'dantzig' is used if not given.
see Network Flows by Ahuja et al. for pivot rules.
root: valid if algo is 'simlex', specifies the root node for
simplex algorithm. It is name of the one of the nodes. It
will be chosen randomly if not provided.
Post:
The 'flow' attribute of each arc gives the optimal flows.
'distance' attribute of the nodes are also changed during max flow
solution process.
Examples:
g.min_cost_flow():
solves minimum cost feasible flow problem using simplex
algorithm with dantzig pivoting rule.
See pre section for details.
g.min_cost_flow(algo='cycle_canceling'):
solves minimum cost feasible flow problem using cycle canceling
agorithm.
g.min_cost_flow(algo='simplex', pivot='scaled'):
solves minimum cost feasible flow problem using network simplex
agorithm with scaled pivot rule.
'''
if display is None:
display = self.attr['display']
if 'algo' in args:
algorithm = args['algo']
else:
algorithm = 'simplex'
if algorithm == 'simplex':
if 'root' in args:
root = args['root']
else:
for k in self.neighbors:
root = k
break
if 'pivot' in args:
if not self.network_simplex(display, args['pivot'], root):
print('problem is infeasible')
else:
if not self.network_simplex(display, 'dantzig', root):
print('problem is infeasible')
elif algorithm == 'cycle_canceling':
if not self.cycle_canceling(display):
print('problem is infeasible')
else:
print(args['algo'], 'is not a defined algorithm. Exiting.')
return
def random(self, numnodes = 10, degree_range = (2, 4), length_range = (1, 10),
density = None, edge_format = None, node_format = None,
Euclidean = False, seedInput = 0, add_labels = True,
parallel_allowed = False, node_selection = 'closest',
scale = 10, scale_cost = 5):
'''
API:
random(self, numnodes = 10, degree_range = None, length_range = None,
density = None, edge_format = None, node_format = None,
Euclidean = False, seedInput = 0)
Description:
Populates graph with random edges and nodes.
Input:
numnodes: Number of nodes to add.
degree_range: A tuple that has lower and upper bounds of degree for
a node.
length_range: A tuple that has lower and upper bounds for 'cost'
attribute of edges.
density: Density of edges, ie. 0.5 indicates a node will
approximately have edge to half of the other nodes.
edge_format: Dictionary that specifies attribute values for edges.
node_format: Dictionary that specifies attribute values for nodes.
Euclidean: Creates an Euclidean graph (Euclidean distance between
nodes) if True.
seedInput: Seed that will be used for random number generation.
Pre:
It is recommended to call this method on empty Graph objects.
Post:
Graph will be populated by nodes and edges.
'''
random.seed(seedInput)
if edge_format == None:
edge_format = {'fontsize':10,
'fontcolor':'blue'}
if node_format == None:
node_format = {'height':0.5,
'width':0.5,
'fixedsize':'true',
'fontsize':10,
'fontcolor':'red',
'shape':'circle',
}
if Euclidean == False:
for m in range(numnodes):
self.add_node(m, **node_format)
if degree_range is not None and density is None:
for m in range(numnodes):
degree = random.randint(degree_range[0], degree_range[1])
i = 0
while i < degree:
n = random.randint(1, numnodes-1)
if (((m,n) not in self.edge_attr and m != n) and
(parallel_allowed or (n, m) not in self.edge_attr)):
if length_range is not None:
length = random.randint(length_range[0],
length_range[1])
self.add_edge(m, n, cost = length, **edge_format)
if add_labels:
self.set_edge_attr(m, n, 'label', str(length))
else:
self.add_edge(m, n, **edge_format)
i += 1
elif density != None:
for m in range(numnodes):
if self.graph_type == DIRECTED_GRAPH:
numnodes2 = numnodes
else:
numnodes2 = m
for n in range(numnodes2):
if ((parallel_allowed or (n, m) not in self.edge_attr)
and m != n):
if random.random() < density:
if length_range is not None:
length = random.randint(length_range[0],
length_range[1])
self.add_edge(m, n, cost = length,
**edge_format)
if add_labels:
self.set_edge_attr(m, n, 'label', str(length))
else:
self.add_edge(m, n, **edge_format)
else:
print("Must set either degree range or density")
else:
for m in range(numnodes):
''' Assigns random coordinates (between 1 and 20) to the nodes
'''
x = random.random()*scale
y = random.random()*scale
self.add_node(m, locationx = x, locationy = y,
pos = '"'+str(x) + "," + str(y)+'!"',
**node_format)
if degree_range is not None and density is None:
for m in range(numnodes):
degree = random.randint(degree_range[0], degree_range[1])
i = 0
neighbors = []
if node_selection == 'random':
while i < degree:
length = round((((self.get_node(n).get_attr('locationx') -
self.get_node(m).get_attr('locationx')) ** 2 +
(self.get_node(n).get_attr('locationy') -
self.get_node(m).get_attr('locationy')) ** 2) ** 0.5)*scale_cost,
0)
if (((m,n) not in self.edge_attr and m != n) and
(parallel_allowed or (n, m) not in self.edge_attr)):
neighbors.append(random.randint(0, numnodes-1))
self.add_edge(m, n, cost = int(length), **edge_format)
if add_labels:
self.set_edge_attr(m, n, 'label', str(int(length)))
i += 1
elif node_selection == 'closest':
lengths = []
for n in range(numnodes):
lengths.append((n, round((((self.get_node(n).get_attr('locationx') -
self.get_node(m).get_attr('locationx')) ** 2 +
(self.get_node(n).get_attr('locationy') -
self.get_node(m).get_attr('locationy')) ** 2) ** 0.5)*scale_cost,
0)))
lengths.sort(key = lambda l : l[1])
for i in range(degree+1):
if not (lengths[i][0] == m or self.check_edge(m, lengths[i][0])):
self.add_edge(m, lengths[i][0], cost = int(lengths[i][1]), **edge_format)
if add_labels:
self.set_edge_attr(m, lengths[i][0], 'label', str(int(lengths[i][1])))
else:
print("Unknown node selection rule...exiting")
return
elif density != None:
for m in range(numnodes):
if self.graph_type == DIRECTED_GRAPH:
numnodes2 = numnodes
else:
numnodes2 = m
for n in range(numnodes2):
if ((parallel_allowed or (n, m) not in self.edge_attr)
and m != n):
if random.random() < density:
if length_range is None:
''' calculates the euclidean norm and round it
to an integer '''
length = round((((self.get_node(n).get_attr('locationx') -
self.get_node(m).get_attr('locationx')) ** 2 +
(self.get_node(n).get_attr('locationy') -
self.get_node(m).get_attr('locationy')) ** 2) ** 0.5), 0)
self.add_edge(m, n, cost = int(length), **edge_format)
if add_labels:
self.set_edge_attr(m, n, 'label', str(int(length)))
else:
self.add_edge(m, n, **edge_format)
else:
print("Must set either degree range or density")
def page_rank(self, damping_factor=0.85, max_iterations=100,
min_delta=0.00001):
'''
API:
page_rank(self, damping_factor=0.85, max_iterations=100,
min_delta=0.00001)
Description:
Compute and return the page-rank of a directed graph.
This function was originally taken from here and modified for this
graph class: http://code.google.com/p/python-graph/source/browse/
trunk/core/pygraph/algorithms/pagerank.py
Input:
damping_factor: Damping factor.
max_iterations: Maximum number of iterations.
min_delta: Smallest variation required to have a new iteration.
Pre:
Graph should be a directed graph.
Return:
Returns dictionary of page-ranks. Keys are node names, values are
corresponding page-ranks.
'''
nodes = self.get_node_list()
graph_size = len(nodes)
if graph_size == 0:
return {}
#value for nodes without inbound links
min_value = old_div((1.0-damping_factor),graph_size)
# itialize the page rank dict with 1/N for all nodes
pagerank = dict.fromkeys(nodes, old_div(1.0,graph_size))
for _ in range(max_iterations):
diff = 0 #total difference compared to last iteraction
# computes each node PageRank based on inbound links
for node in nodes:
rank = min_value
for referring_page in self.get_in_neighbors(node):
rank += (damping_factor * pagerank[referring_page] /
len(self.get_neighbors(referring_page)))
diff += abs(pagerank[node] - rank)
pagerank[node] = rank
#stop if PageRank has converged
if diff < min_delta:
break
return pagerank
def get_degrees(self):
'''
API:
get_degree(self)
Description:
Returns degrees of nodes in dictionary format.
Return:
Returns a dictionary of node degrees. Keys are node names, values
are corresponding degrees.
'''
degree = {}
if self.attr['type'] is not DIRECTED_GRAPH:
for n in self.get_node_list():
degree[n] = len(self.get_neighbors(n))
return degree
else:
for n in self.get_node_list():
degree[n] = (len(self.get_in_neighbors(n)) +
len(self.get_out_neighbors(n)))
def get_in_degrees(self):
'''
API:
get_degree(self)
Description:
Returns degrees of nodes in dictionary format.
Return:
Returns a dictionary of node degrees. Keys are node names, values
are corresponding degrees.
'''
degree = {}
if self.attr['type'] is not DIRECTED_GRAPH:
print('This function only works for directed graphs')
return
for n in self.get_node_list():
degree[n] = len(self.get_in_neighbors(n))
return degree
def get_out_degrees(self):
'''
API:
get_degree(self)
Description:
Returns degrees of nodes in dictionary format.
Return:
Returns a dictionary of node degrees. Keys are node names, values
are corresponding degrees.
'''
degree = {}
if self.attr['type'] is not DIRECTED_GRAPH:
print('This function only works for directed graphs')
return
for n in self.get_node_list():
degree[n] = len(self.get_out_neighbors(n))
return degree
def get_diameter(self):
'''
API:
get_diameter(self)
Description:
Returns diameter of the graph. Diameter is defined as follows.
distance(n,m): shortest unweighted path from n to m
eccentricity(n) = $\max _m distance(n,m)$
diameter = $\min _n eccentricity(n) = \min _n \max _m distance(n,m)$
Return:
Returns diameter of the graph.
'''
if self.attr['type'] is not UNDIRECTED_GRAPH:
print('This function only works for undirected graphs')
return
diameter = 'infinity'
eccentricity_n = 0
for n in self.get_node_list():
for m in self.get_node_list():
path_n_m = self.search(n, destination = m, algo = 'BFS')
if path_n_m is None:
# this indicates there is no path from n to m, no diameter
# is defined, since the graph is not connected, return
# 'infinity'
return 'infinity'
distance_n_m = len(path_n_m)-1
if distance_n_m > eccentricity_n:
eccentricity_n = distance_n_m
if diameter == 'infinity' or eccentricity_n > diameter:
diameter = eccentricity_n
return diameter
def create_cluster(self, node_list, cluster_attrs={}, node_attrs={}):
'''
API:
create_cluster(self, node_list, cluster_attrs, node_attrs)
Description:
Creates a cluster from the node given in the node list.
Input:
node_list: List of nodes in the cluster.
cluster_attrs: Dictionary of cluster attributes, see Dot language
grammer documentation for details.
node_attrs: Dictionary of node attributes. It will overwrite
previous attributes of the nodes in the cluster.
Post:
A cluster will be created. Attributes of the nodes in the cluster
may change.
'''
if 'name' in cluster_attrs:
if 'name' in self.cluster:
raise Exception('A cluster with name %s already exists!' %cluster_attrs['name'])
else:
name = cluster_attrs['name']
else:
name = 'c%d' %self.attr['cluster_count']
self.attr['cluster_count'] += 1
cluster_attrs['name'] = name
#cluster_attrs['name'] =
self.cluster[name] = {'node_list':node_list,
'attrs':copy.deepcopy(cluster_attrs),
'node_attrs':copy.deepcopy(node_attrs)}
class DisjointSet(Graph):
'''
Disjoint set data structure. Inherits Graph class.
'''
def __init__(self, optimize = True, **attrs):
'''
API:
__init__(self, optimize = True, **attrs):
Description:
Class constructor.
Input:
optimize: Optimizes find() if True.
attrs: Graph attributes.
Post:
self.optimize will be updated.
'''
attrs['type'] = DIRECTED_GRAPH
Graph.__init__(self, **attrs)
self.sizes = {}
self.optimize = optimize
def add(self, aList):
'''
API:
add(self, aList)
Description:
Adds items in the list to the set.
Input:
aList: List of items.
Post:
self.sizes will be updated.
'''
self.add_node(aList[0])
for i in range(1, len(aList)):
self.add_edge(aList[i], aList[0])
self.sizes[aList[0]] = len(aList)
def union(self, i, j):
'''
API:
union(self, i, j):
Description:
Finds sets of i and j and unites them.
Input:
i: Item.
j: Item.
Post:
self.sizes will be updated.
'''
roots = (self.find(i), self.find(j))
if roots[0] == roots[1]:
return False
if self.sizes[roots[0]] <= self.sizes[roots[1]] or not self.optimize:
self.add_edge(roots[0], roots[1])
self.sizes[roots[1]] += self.sizes[roots[0]]
return True
else:
self.add_edge(roots[1], roots[0])
self.sizes[roots[0]] += self.sizes[roots[1]]
return True
def find(self, i):
'''
API:
find(self, i)
Description:
Returns root of set that has i.
Input:
i: Item.
Return:
Returns root of set that has i.
'''
current = i
edge_list = []
while len(self.get_neighbors(current)) != 0:
successor = self.get_neighbors(current)[0]
edge_list.append((current, successor))
current = successor
if self.optimize:
for e in edge_list:
if e[1] != current:
self.del_edge((e[0], e[1]))
self.add_edge(e[0], current)
return current
if __name__ == '__main__':
G = Graph(type = UNDIRECTED_GRAPH, splines = 'true', K = 1.5)
#G.random(numnodes = 20, Euclidean = True, seedInput = 11,
# add_labels = False,
# scale = 10,
# scale_cost = 10,
# #degree_range = (2, 4),
# #length_range = (1, 10)
# )
#page_ranks = sorted(G.page_rank().iteritems(), key=operator.itemgetter(1))
#page_ranks.reverse()
#for i in page_ranks:
# print i #G = Graph(type = UNDIRECTED_GRAPH, splines = 'true', K = 1.5)
G.random(numnodes = 10, Euclidean = True, seedInput = 13,
add_labels = True,
scale = 10,
scale_cost = 10,
#degree_range = (2, 4),
#length_range = (1, 10)
)
G.set_display_mode('matplotlib')
G.display()
#G.dfs(0)
G.search(0, display = 'matplotlib', algo = 'Prim')
#G.minimum_spanning_tree_kruskal()
| epl-1.0 | -4,642,684,740,455,221,000 | 38.827773 | 125 | 0.50736 | false |
mhrivnak/crane | tests/views/test_search.py | 3 | 1513 | import httplib
import json
import mock
from crane.search.base import SearchResult, SearchBackend
import base
class TestSearch(base.BaseCraneAPITest):
def test_no_query(self):
response = self.test_client.get('/v1/search')
self.assertEqual(response.status_code, httplib.BAD_REQUEST)
def test_empty_query(self):
response = self.test_client.get('/v1/search?q=')
self.assertEqual(response.status_code, httplib.BAD_REQUEST)
@mock.patch('crane.search.backend.search', spec_set=True)
def test_with_results(self, mock_search):
mock_search.return_value = [
SearchBackend._format_result(SearchResult('rhel', 'Red Hat Enterprise Linux')),
]
response = self.test_client.get('/v1/search?q=rhel')
data = json.loads(response.data)
self.assertDictEqual(data, {
'query': 'rhel',
'num_results': 1,
'results': mock_search.return_value
})
@mock.patch('crane.search.backend.search', spec_set=True)
def test_num_results(self, mock_search):
mock_search.return_value = [
SearchBackend._format_result(SearchResult('rhel', 'Red Hat Enterprise Linux')),
SearchBackend._format_result(SearchResult('foo', 'Foo')),
SearchBackend._format_result(SearchResult('bar', 'Bar')),
]
response = self.test_client.get('/v1/search?q=rhel')
data = json.loads(response.data)
self.assertEqual(data['num_results'], 3)
| gpl-2.0 | 5,744,869,896,480,165,000 | 30.520833 | 91 | 0.637145 | false |
aaltay/beam | sdks/python/apache_beam/metrics/execution.py | 1 | 11781 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# cython: language_level=3
"""
This module is for internal use only; no backwards-compatibility guarantees.
The classes in this file keep shared state, and organize metrics information.
Available classes:
- MetricKey - Internal key for a metric.
- MetricResult - Current status of a metric's updates/commits.
- _MetricsEnvironment - Keeps track of MetricsContainer and other metrics
information for every single execution working thread.
- MetricsContainer - Holds the metrics of a single step and a single
unit-of-commit (bundle).
"""
# pytype: skip-file
from __future__ import absolute_import
import threading
from builtins import object
from typing import TYPE_CHECKING
from typing import Any
from typing import Dict
from typing import FrozenSet
from typing import Optional
from typing import Type
from typing import Union
from typing import cast
from apache_beam.metrics import monitoring_infos
from apache_beam.metrics.cells import CounterCell
from apache_beam.metrics.cells import DistributionCell
from apache_beam.metrics.cells import GaugeCell
from apache_beam.runners.worker import statesampler
from apache_beam.runners.worker.statesampler import get_current_tracker
if TYPE_CHECKING:
from apache_beam.metrics.cells import GaugeData
from apache_beam.metrics.cells import DistributionData
from apache_beam.metrics.cells import MetricCell
from apache_beam.metrics.cells import MetricCellFactory
from apache_beam.metrics.metricbase import MetricName
from apache_beam.portability.api import metrics_pb2
class MetricKey(object):
"""Key used to identify instance of metric cell.
Metrics are internally keyed by the name of the step they're associated with,
the name and namespace (if it is a user defined metric) of the metric,
and any extra label metadata added by the runner specific metric collection
service.
"""
def __init__(self, step, metric, labels=None):
"""Initializes ``MetricKey``.
Args:
step: A string with the step this metric cell is part of.
metric: A ``MetricName`` namespace+name that identifies a metric.
labels: An arbitrary set of labels that also identifies the metric.
"""
self.step = step
self.metric = metric
self.labels = labels if labels else dict()
def __eq__(self, other):
return (
self.step == other.step and self.metric == other.metric and
self.labels == other.labels)
def __hash__(self):
return hash((self.step, self.metric, frozenset(self.labels)))
def __repr__(self):
return 'MetricKey(step={}, metric={}, labels={})'.format(
self.step, self.metric, self.labels)
class MetricResult(object):
"""Keeps track of the status of a metric within a single bundle.
It contains the physical and logical updates to the metric. Physical updates
are updates that have not necessarily been committed, but that have been made
during pipeline execution. Logical updates are updates that have been
committed.
Attributes:
key: A ``MetricKey`` that identifies the metric and bundle of this result.
committed: The committed updates of the metric. This attribute's type is
of metric type result (e.g. int, DistributionResult, GaugeResult).
attempted: The logical updates of the metric. This attribute's type is that
of metric type result (e.g. int, DistributionResult, GaugeResult).
"""
def __init__(self, key, committed, attempted):
"""Initializes ``MetricResult``.
Args:
key: A ``MetricKey`` object.
committed: Metric data that has been committed (e.g. logical updates)
attempted: Metric data that has been attempted (e.g. physical updates)
"""
self.key = key
self.committed = committed
self.attempted = attempted
def __eq__(self, other):
return (
self.key == other.key and self.committed == other.committed and
self.attempted == other.attempted)
def __hash__(self):
return hash((self.key, self.committed, self.attempted))
def __repr__(self):
return 'MetricResult(key={}, committed={}, attempted={})'.format(
self.key, str(self.committed), str(self.attempted))
def __str__(self):
return repr(self)
@property
def result(self):
"""Short-hand for falling back to attempted metrics if it seems that
committed was not populated (e.g. due to not being supported on a given
runner"""
return self.committed if self.committed else self.attempted
class _MetricsEnvironment(object):
"""Holds the MetricsContainer for every thread and other metric information.
This class is not meant to be instantiated, instead being used to keep
track of global state.
"""
def current_container(self):
"""Returns the current MetricsContainer."""
sampler = statesampler.get_current_tracker()
if sampler is None:
return None
return sampler.current_state().metrics_container
def process_wide_container(self):
"""Returns the MetricsContainer for process wide metrics, e.g. memory."""
return PROCESS_WIDE_METRICS_CONTAINER
MetricsEnvironment = _MetricsEnvironment()
class _TypedMetricName(object):
"""Like MetricName, but also stores the cell type of the metric."""
def __init__(
self,
cell_type, # type: Union[Type[MetricCell], MetricCellFactory]
metric_name # type: Union[str, MetricName]
):
# type: (...) -> None
self.cell_type = cell_type
self.metric_name = metric_name
if isinstance(metric_name, str):
self.fast_name = metric_name
else:
self.fast_name = metric_name.fast_name()
# Cached for speed, as this is used as a key for every counter update.
self._hash = hash((cell_type, self.fast_name))
def __eq__(self, other):
return self is other or (
self.cell_type == other.cell_type and self.fast_name == other.fast_name)
def __hash__(self):
return self._hash
def __str__(self):
return '%s %s' % (self.cell_type, self.metric_name)
def __reduce__(self):
return _TypedMetricName, (self.cell_type, self.metric_name)
_DEFAULT = None # type: Any
class MetricUpdater(object):
"""A callable that updates the metric as quickly as possible."""
def __init__(
self,
cell_type, # type: Union[Type[MetricCell], MetricCellFactory]
metric_name, # type: Union[str, MetricName]
default_value=None,
process_wide=False):
self.process_wide = process_wide
self.typed_metric_name = _TypedMetricName(cell_type, metric_name)
self.default_value = default_value
def __call__(self, value=_DEFAULT):
# type: (Any) -> None
if value is _DEFAULT:
if self.default_value is _DEFAULT:
raise ValueError(
'Missing value for update of %s' % self.typed_metric_name.fast_name)
value = self.default_value
if self.process_wide:
MetricsEnvironment.process_wide_container().get_metric_cell(
self.typed_metric_name).update(value)
else:
tracker = get_current_tracker()
if tracker is not None:
tracker.update_metric(self.typed_metric_name, value)
def __reduce__(self):
return MetricUpdater, (
self.typed_metric_name.cell_type,
self.typed_metric_name.metric_name,
self.default_value)
class MetricsContainer(object):
"""Holds the metrics of a single step and a single bundle.
Or the metrics associated with the process/SDK harness. I.e. memory usage.
"""
def __init__(self, step_name):
self.step_name = step_name
self.lock = threading.Lock()
self.metrics = dict() # type: Dict[_TypedMetricName, MetricCell]
def get_counter(self, metric_name):
# type: (MetricName) -> CounterCell
return cast(
CounterCell,
self.get_metric_cell(_TypedMetricName(CounterCell, metric_name)))
def get_distribution(self, metric_name):
# type: (MetricName) -> DistributionCell
return cast(
DistributionCell,
self.get_metric_cell(_TypedMetricName(DistributionCell, metric_name)))
def get_gauge(self, metric_name):
# type: (MetricName) -> GaugeCell
return cast(
GaugeCell,
self.get_metric_cell(_TypedMetricName(GaugeCell, metric_name)))
def get_metric_cell(self, typed_metric_name):
# type: (_TypedMetricName) -> MetricCell
cell = self.metrics.get(typed_metric_name, None)
if cell is None:
with self.lock:
cell = self.metrics[typed_metric_name] = typed_metric_name.cell_type()
return cell
def get_cumulative(self):
# type: () -> MetricUpdates
"""Return MetricUpdates with cumulative values of all metrics in container.
This returns all the cumulative values for all metrics.
"""
counters = {
MetricKey(self.step_name, k.metric_name): v.get_cumulative()
for k,
v in self.metrics.items() if k.cell_type == CounterCell
}
distributions = {
MetricKey(self.step_name, k.metric_name): v.get_cumulative()
for k,
v in self.metrics.items() if k.cell_type == DistributionCell
}
gauges = {
MetricKey(self.step_name, k.metric_name): v.get_cumulative()
for k,
v in self.metrics.items() if k.cell_type == GaugeCell
}
return MetricUpdates(counters, distributions, gauges)
def to_runner_api(self):
return [
cell.to_runner_api_user_metric(key.metric_name) for key,
cell in self.metrics.items()
]
def to_runner_api_monitoring_infos(self, transform_id):
# type: (str) -> Dict[FrozenSet, metrics_pb2.MonitoringInfo]
"""Returns a list of MonitoringInfos for the metrics in this container."""
with self.lock:
items = list(self.metrics.items())
all_metrics = [
cell.to_runner_api_monitoring_info(key.metric_name, transform_id)
for key,
cell in items
]
return {
monitoring_infos.to_key(mi): mi
for mi in all_metrics if mi is not None
}
def reset(self):
# type: () -> None
for metric in self.metrics.values():
metric.reset()
def __reduce__(self):
raise NotImplementedError
PROCESS_WIDE_METRICS_CONTAINER = MetricsContainer(None)
class MetricUpdates(object):
"""Contains updates for several metrics.
A metric update is an object containing information to update a metric.
For Distribution metrics, it is DistributionData, and for Counter metrics,
it's an int.
"""
def __init__(
self,
counters=None, # type: Optional[Dict[MetricKey, int]]
distributions=None, # type: Optional[Dict[MetricKey, DistributionData]]
gauges=None # type: Optional[Dict[MetricKey, GaugeData]]
):
# type: (...) -> None
"""Create a MetricUpdates object.
Args:
counters: Dictionary of MetricKey:MetricUpdate updates.
distributions: Dictionary of MetricKey:MetricUpdate objects.
gauges: Dictionary of MetricKey:MetricUpdate objects.
"""
self.counters = counters or {}
self.distributions = distributions or {}
self.gauges = gauges or {}
| apache-2.0 | 1,169,649,252,800,144,600 | 31.907821 | 80 | 0.691962 | false |
AlexanderPease/viv | app/ui_methods.py | 1 | 2070 | # Just for ordinalizing the number of district
def ordinal(numb):
if type(numb) is str:
numb = int(float(numb))
if numb < 20: #determining suffix for < 20
if numb == 1:
suffix = 'st'
elif numb == 2:
suffix = 'nd'
elif numb == 3:
suffix = 'rd'
else:
suffix = 'th'
else: #determining suffix for > 20
tens = str(numb)
tens = tens[-2]
unit = str(numb)
unit = unit[-1]
if tens == "1":
suffix = "th"
else:
if unit == "1":
suffix = 'st'
elif unit == "2":
suffix = 'nd'
elif unit == "3":
suffix = 'rd'
else:
suffix = 'th'
return str(numb)+ suffix
def list_to_comma_delimited_string(list_arg):
"""
Takes a list and turns into comma-delimited string.
Used for turning Group.invited_emails into correct form for template display.
Args:
list: A list, ex: ["[email protected]", "[email protected]"] or Group.users
Returns
A string , ex: "[email protected], [email protected]"
"""
long_string = ""
for item in list_arg:
long_string += str(item) + ", "
long_string = long_string[0:-2] # Remove last ", "
return long_string
def get_domain(email):
"""
Returns just the domain name of an email address
Ex: reply.craigslist.com from [email protected]
"""
return email.split('@')[1]
def email_obscure(email):
"""
Obscures an email address
Args:
email: A string, ex: [email protected]
Returns
A string , ex: t*******@alexanderpease.com
"""
first_letter = email[0]
string_split = email.split('@')
obscured = ""
while len(obscured) < len(string_split[0])-1:
obscured = obscured + "*"
return first_letter + obscured + "@" + string_split[1]
def encode(text):
"""
For printing unicode characters
"""
return text.encode('utf-8') | gpl-3.0 | -6,869,473,597,955,327,000 | 24.256098 | 81 | 0.523671 | false |
geceo/django-gallery | general/models.py | 1 | 1025 | from django.db import models
from django import forms
from django.forms import ModelForm
# Create your models here.
class Settings(models.Model):
# General gallery informations
general_title = models.CharField(max_length=255)
intro = models.TextField(blank=True)
url = models.CharField(max_length=255)
# Facebook connector
facebook_appid = models.CharField(blank=True,max_length=255)
facebook_appsecret = models.CharField(blank=True,max_length=255)
facebook_profile_id = models.CharField(blank=True,max_length=255)
facebook_canvas_url = models.CharField(blank=True,max_length=255)
# Twitter connector
twitter_account = models.CharField(max_length=255)
twitter_consumer_key = models.CharField(max_length=255)
twitter_consumer_secret = models.CharField(max_length=255)
twitter_access_token = models.CharField(max_length=255)
twitter_access_token_secret = models.CharField(max_length=255)
class SettingsForm(ModelForm):
class Meta:
model = Settings
| bsd-3-clause | 966,780,564,379,091,100 | 36.962963 | 70 | 0.743415 | false |
pida42/Zabbix-Addons | App-Servers/Memcached/getMemcachedInfo.py | 1 | 3120 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import getopt, sys
from telnetlib import Telnet
# default memcached server to check
memcachedServer = '127.0.0.1'
memcachedPort = '11211'
ITEMS = (
'bytes',
'cmd_get',
'cmd_set',
'curr_items',
'curr_connections',
'evictions',
'limit_maxbytes',
'uptime',
'get_hits',
'get_misses',
'version',
'bytes_read',
'bytes_written',
)
################################################################################
### This is based in Enrico Tröger sources from:
### http://www.pending.io/yet-another-zabbix-template-to-monitor-memcache/
### but I chose to make it with dictionaries instead of objects.
################################################################################
class MemcachedStatsReader(object):
#----------------------------------------------------------------------
def __init__(self, server, port):
self._server = server
self._port = port
self._stats_raw = None
self._stats = None
#----------------------------------------------------------------------
def read(self):
self._read_stats()
self._parse_stats()
return self._stats
#----------------------------------------------------------------------
def _read_stats(self):
connection = Telnet(self._server, self._port, timeout=30)
connection.write('stats\n')
connection.write('quit\n')
self._stats_raw = connection.read_all()
#----------------------------------------------------------------------
def _parse_stats(self):
self._stats = {}
for line in self._stats_raw.splitlines():
if not line.startswith('STAT'):
continue
parts = line.split()
if not parts[1] in ITEMS:
continue
index = parts[1]
self._stats[index] = parts[2]
try:
ratio = float (self._stats["get_hits"]) * 100 / float (self._stats["cmd_get"])
except ZeroDivisionError:
ratio = 0.0
self._stats["ratio"] = round (ratio, 2)
try:
usage = float (self._stats["bytes"]) * 100 / float (self._stats["limit_maxbytes"])
except ZeroDivisionError:
usage = 0.0
self._stats["usage"] = round (usage, 2)
#----------------------------------------------------------------------
def Usage ():
print "Usage: getMemcachedInfo.py -h 127.0.0.1 -p 11211 -a <item>"
sys.exit(2)
def main(host, port):
getInfo = "ratio"
argv = sys.argv[1:]
try:
opts, args = getopt.getopt(argv, "h:p:a:")
for opt,arg in opts:
if opt == '-h':
host = arg
if opt == '-p':
port = arg
if opt == '-a':
getInfo = arg
except:
Usage()
data = MemcachedStatsReader(host, port)
items = data.read()
try:
print items[getInfo]
except:
print "Not valid item."
if __name__ == '__main__':
main(memcachedServer, memcachedPort)
| mit | 4,151,833,745,684,230,000 | 28.149533 | 94 | 0.445976 | false |
sergiusens/snapcraft | tests/integration/__init__.py | 1 | 27766 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015-2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import distutils.util
import fileinput
import glob
import os
import re
import shutil
import subprocess
import sys
import time
import uuid
from distutils import dir_util
from textwrap import dedent
from typing import Callable, List, Union
import fixtures
import pexpect
from pexpect import popen_spawn
import requests
import testtools
from testtools import content
from testtools.matchers import MatchesRegex
from snapcraft import yaml_utils
from tests import fixture_setup, os_release, subprocess_utils
from tests.integration import platform
class RegisterError(Exception):
pass
class TestCase(testtools.TestCase):
def setUp(self):
super().setUp()
if os.getenv("SNAPCRAFT_FROM_SNAP", False):
self.snapcraft_command = "/snap/bin/snapcraft"
elif os.getenv("SNAPCRAFT_FROM_DEB", False):
self.snapcraft_command = "/usr/bin/snapcraft"
self.snapcraft_parser_command = "/usr/bin/snapcraft-parser"
elif os.getenv("VIRTUAL_ENV") and sys.platform == "win32":
self.snapcraft_command = ["python", "-m", "snapcraft.cli.__main__"]
self.snapcraft_parser_command = os.path.join(
os.getenv("VIRTUAL_ENV"), "bin", "snapcraft-parser"
)
elif os.getenv("VIRTUAL_ENV"):
self.snapcraft_command = os.path.join(
os.getenv("VIRTUAL_ENV"), "bin", "snapcraft"
)
self.snapcraft_parser_command = os.path.join(
os.getenv("VIRTUAL_ENV"), "bin", "snapcraft-parser"
)
elif os.getenv("SNAPCRAFT_FROM_BREW", False):
self.snapcraft_command = "/usr/local/bin/snapcraft"
else:
raise EnvironmentError(
"snapcraft is not setup correctly for testing. Either set "
"SNAPCRAFT_FROM_SNAP, SNAPCRAFT_FROM_DEB or "
"SNAPCRAFT_FROM_BREW to run from either the snap, deb or "
"brew, or make sure your venv is properly setup as described "
"in HACKING.md."
)
if os.getenv("SNAPCRAFT_FROM_SNAP", False):
self.patchelf_command = "/snap/snapcraft/current/usr/bin/patchelf"
self.execstack_command = "/snap/snapcraft/current/usr/sbin/execstack"
else:
self.patchelf_command = "patchelf"
self.execstack_command = "execstack"
self.snaps_dir = os.path.join(os.path.dirname(__file__), "snaps")
temp_cwd_fixture = fixture_setup.TempCWD()
self.useFixture(temp_cwd_fixture)
self.path = temp_cwd_fixture.path
# Use a separate path for XDG dirs, or changes there may be detected as
# source changes.
self.xdg_path = self.useFixture(fixtures.TempDir()).path
self.useFixture(fixture_setup.TempXDG(self.xdg_path))
# Use a dumb terminal for tests
self.useFixture(fixtures.EnvironmentVariable("TERM", "dumb"))
# Disable Sentry reporting for tests, otherwise they'll hang waiting
# for input
self.useFixture(
fixtures.EnvironmentVariable("SNAPCRAFT_ENABLE_ERROR_REPORTING", "false")
)
# Don't let the managed host variable leak into tests
self.useFixture(fixtures.EnvironmentVariable("SNAPCRAFT_MANAGED_HOST"))
# Note that these directories won't exist when the test starts,
# they might be created after calling the snapcraft command on the
# project dir.
self.parts_dir = "parts"
self.stage_dir = "stage"
self.prime_dir = "prime"
self.deb_arch = platform.get_deb_arch()
self.arch_triplet = platform.get_arch_triplet()
self.distro_series = os_release.get_version_codename()
def run_snapcraft(
self,
command: Union[str, List[str]] = None,
project_dir: str = None,
debug: bool = True,
pre_func: Callable[[], None] = lambda: None,
env=None,
) -> None:
if project_dir:
self.copy_project_to_cwd(project_dir)
if command is None:
command = []
if isinstance(command, str):
command = [command]
snapcraft_command = self.snapcraft_command
if isinstance(snapcraft_command, str):
snapcraft_command = [snapcraft_command]
if debug:
snapcraft_command.append("-d")
try:
pre_func()
snapcraft_output = subprocess.check_output(
snapcraft_command + command,
stderr=subprocess.STDOUT,
universal_newlines=True,
env=env,
)
except subprocess.CalledProcessError as e:
self.addDetail("command", content.text_content(str(self.snapcraft_command)))
self.addDetail("output", content.text_content(e.output))
raise
except FileNotFoundError:
self.addDetail("command", content.text_content(str(self.snapcraft_command)))
raise
if not os.getenv("SNAPCRAFT_IGNORE_APT_AUTOREMOVE", False):
self.addCleanup(self.run_apt_autoremove)
return snapcraft_output
def spawn_snapcraft(self, command: Union[str, List[str]]):
snapcraft_command = self.snapcraft_command
if isinstance(snapcraft_command, str):
snapcraft_command = [snapcraft_command]
try:
return popen_spawn.PopenSpawn(" ".join(snapcraft_command + command))
except FileNotFoundError:
self.addDetail("command", content.text_content(str(snapcraft_command)))
def run_snapcraft_parser(self, arguments):
try:
snapcraft_output = subprocess.check_output(
[self.snapcraft_parser_command, "-d"] + arguments,
stderr=subprocess.STDOUT,
universal_newlines=True,
)
except subprocess.CalledProcessError as e:
self.addDetail("output", content.text_content(e.output))
raise
return snapcraft_output
def run_apt_autoremove(self):
if sys.platform == "win32":
return
deb_env = os.environ.copy()
deb_env.update(
{"DEBIAN_FRONTEND": "noninteractive", "DEBCONF_NONINTERACTIVE_SEEN": "true"}
)
try:
autoremove_output = subprocess.check_output(
"sudo apt-get autoremove -y".split(),
stderr=subprocess.STDOUT,
env=deb_env,
)
self.addDetail(
"apt-get autoremove output",
content.text_content(autoremove_output.decode("utf-8")),
)
except FileNotFoundError as e:
self.addDetail("apt-get autoremove error", content.text_content(str(e)))
except subprocess.CalledProcessError as e:
self.addDetail("apt-get autoremove error", content.text_content(str(e)))
self.addDetail(
"apt-get autoremove output",
content.text_content(e.output.decode("utf-8")),
)
if os.getenv("SNAPCRAFT_APT_AUTOREMOVE_CHECK_FAIL", False):
raise
def copy_project_to_cwd(self, project_dir: str) -> None:
# Because cwd already exists, shutil.copytree would raise
# FileExistsError. Use the lesser known distutils.dir_util.copy_tree
dir_util.copy_tree(
os.path.join(self.snaps_dir, project_dir), self.path, preserve_symlinks=True
)
def construct_yaml(
self,
name="test",
version="0.1",
summary="Simple test snap",
description="Something something",
grade=None,
architectures=None,
parts=dedent(
"""\
my-part:
plugin: nil
"""
),
build_packages="[]",
adopt_info=None,
):
snapcraft_yaml = {
"name": name,
"summary": summary,
"description": description,
"parts": yaml_utils.load(parts),
"build-packages": yaml_utils.load(build_packages),
}
if version:
snapcraft_yaml["version"] = version
if adopt_info:
snapcraft_yaml["adopt-info"] = adopt_info
if grade:
snapcraft_yaml["grade"] = grade
if architectures:
snapcraft_yaml["architectures"] = architectures
with open("snapcraft.yaml", "w") as f:
yaml_utils.dump(snapcraft_yaml, stream=f)
def get_output_ignoring_non_zero_exit(self, binary, cwd=None):
# Executing the binaries exists > 0 on trusty.
# TODO investigate more to understand the cause.
try:
output = subprocess.check_output(binary, universal_newlines=True, cwd=cwd)
except subprocess.CalledProcessError as exception:
output = exception.output
return output
def set_stage_package_version(
self, snapcraft_yaml_path, part, package, version=None
):
return self.set_package_version(
"stage-packages", snapcraft_yaml_path, part, package, version
)
def set_build_package_version(
self, snapcraft_yaml_path, part, package, version=None
):
return self.set_package_version(
"build-packages", snapcraft_yaml_path, part, package, version
)
def set_package_version(
self, type_, snapcraft_yaml_path, part, package, version=None
):
# This doesn't handle complex package syntax.
with open(snapcraft_yaml_path) as snapcraft_yaml_file:
snapcraft_yaml = yaml_utils.load(snapcraft_yaml_file)
if part:
packages = snapcraft_yaml["parts"][part].get(type_, [])
else:
packages = snapcraft_yaml.get(type_, [])
for index, package_in_yaml in enumerate(packages):
if package_in_yaml.split("=")[0] == package:
if version is None:
version = get_package_version(
package, self.distro_series, self.deb_arch
)
packages[index] = "{}={}".format(package, version)
break
else:
self.fail("The part {} doesn't have a package {}".format(part, package))
with open(snapcraft_yaml_path, "w") as snapcraft_yaml_file:
yaml_utils.dump(snapcraft_yaml, stream=snapcraft_yaml_file)
return version
def set_build_package_architecture(
self, snapcraft_yaml_path, part, package, architecture
):
# This doesn't handle complex package syntax.
with open(snapcraft_yaml_path) as snapcraft_yaml_file:
snapcraft_yaml = yaml_utils.load(snapcraft_yaml_file)
packages = snapcraft_yaml["parts"][part]["build-packages"]
for index, package_in_yaml in enumerate(packages):
if package_in_yaml == package:
packages[index] = "{}:{}".format(package, architecture)
break
else:
self.fail("The part {} doesn't have a package {}".format(part, package))
with open(snapcraft_yaml_path, "w") as snapcraft_yaml_file:
yaml_utils.dump(snapcraft_yaml, stream=snapcraft_yaml_file)
class BzrSourceBaseTestCase(TestCase):
def setUp(self):
super().setUp()
if shutil.which("bzr") is None:
self.skipTest("bzr is not installed")
def init_source_control(self):
subprocess.check_call(
["bzr", "init", "."], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
)
subprocess.check_call(
["bzr", "whoami", "--branch", '"Example Dev <[email protected]>"']
)
def commit(self, message, unchanged=False):
command = ["bzr", "commit", "-m", message]
if unchanged:
command.append("--unchanged")
subprocess.check_call(
command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
)
def get_revno(self, path=None):
command = ["bzr", "revno", "-r", "-1"]
if path:
command.append(path)
return subprocess.check_output(command, universal_newlines=True).strip()
class GitSourceBaseTestCase(TestCase):
def setUp(self):
super().setUp()
if shutil.which("git") is None:
self.skipTest("git is not installed")
def init_source_control(self):
subprocess.check_call(["git", "init", "."], stdout=subprocess.DEVNULL)
subprocess.check_call(
["git", "config", "--local", "user.name", '"Example Dev"']
)
subprocess.check_call(
["git", "config", "--local", "user.email", "[email protected]"]
)
def add_file(self, file_path):
subprocess.check_call(["git", "add", file_path], stdout=subprocess.DEVNULL)
def commit(self, message, allow_empty=False):
command = ["git", "commit", "-m", message]
if allow_empty:
command.append("--allow-empty")
subprocess.check_call(command, stdout=subprocess.DEVNULL)
def tag(self, tag_name):
subprocess.check_call(
["git", "tag", "-a", "-m", tag_name, tag_name], stdout=subprocess.DEVNULL
)
def get_revno(self):
return subprocess_utils.call_with_output(
["git", "rev-list", "HEAD", "--max-count=1"]
)
class HgSourceBaseTestCase(TestCase):
def setUp(self):
super().setUp()
if shutil.which("hg") is None:
self.skipTest("mercurial is not installed")
def init_source_control(self):
subprocess.check_call(["hg", "init", "."])
def commit(self, message, file_):
subprocess.check_call(
["hg", "commit", "-m", message, "--user", '"Example Dev"', "-A", file_]
)
def get_revno(self, path=None):
command = ["hg", "log", "--template", '"{desc}"', "-r", "-1"]
if path:
command.extend(["--cwd", path])
return subprocess.check_output(command, universal_newlines=True).strip()
def get_id(self):
return subprocess_utils.call_with_output(["hg", "id"]).split()[0]
class SubversionSourceBaseTestCase(TestCase):
def setUp(self):
super().setUp()
if shutil.which("svn") is None:
self.skipTest("svn is not installed")
def init_source_control(self):
subprocess.check_call(["svnadmin", "create", "repo"], stdout=subprocess.DEVNULL)
def checkout(self, source, destination):
subprocess.check_call(
["svn", "checkout", source, destination], stdout=subprocess.DEVNULL
)
def add(self, file_path, cwd=None):
subprocess.check_call(
["svn", "add", file_path], stdout=subprocess.DEVNULL, cwd=cwd
)
def commit(self, message, cwd=None):
subprocess.check_call(
["svn", "commit", "-m", message], stdout=subprocess.DEVNULL, cwd=cwd
)
def update(self, cwd=None):
subprocess.check_call(["svn", "update"], stdout=subprocess.DEVNULL, cwd=cwd)
class StoreTestCase(TestCase):
def setUp(self):
super().setUp()
self.test_store = fixture_setup.TestStore()
self.useFixture(self.test_store)
self.useFixture(fixtures.EnvironmentVariable("SNAPCRAFT_TEST_INPUT", "1"))
def is_store_fake(self):
return (os.getenv("TEST_STORE") or "fake") == "fake"
def is_store_staging(self):
return os.getenv("TEST_STORE") == "staging"
def _conduct_login(self, process, email, password, expect_success) -> None:
process.expect_exact(
"Enter your Ubuntu One e-mail address and password." + os.linesep
)
process.expect_exact(
"If you do not have an Ubuntu One account, you can create one at "
"https://dashboard.snapcraft.io/openid/login" + os.linesep
)
process.expect_exact("Email: ")
process.sendline(email)
process.expect_exact("Password: ")
process.sendline(password)
if expect_success:
process.expect_exact(
"We strongly recommend enabling multi-factor authentication:"
)
def export_login(
self,
export_path,
email: str = None,
password: str = None,
expect_success: bool = True,
) -> None:
email = email or self.test_store.user_email
password = password or self.test_store.user_password
process = self.spawn_snapcraft(["export-login", export_path])
self._conduct_login(process, email, password, expect_success)
if expect_success:
process.expect("This exported login is not encrypted")
else:
process.expect("Authentication error: Failed to get unbound discharge.")
def login(self, email=None, password=None, expect_success=True):
email = email or self.test_store.user_email
password = password or self.test_store.user_password
process = self.spawn_snapcraft(["login"])
self._conduct_login(process, email, password, expect_success)
if expect_success:
process.expect_exact("Login successful.")
else:
process.expect("Authentication error: Failed to get unbound discharge.")
def logout(self):
output = self.run_snapcraft("logout")
expected = r".*Credentials cleared.\n.*"
self.assertThat(output, MatchesRegex(expected, flags=re.DOTALL))
def register(self, snap_name, private=False, wait=True):
command = ["register", snap_name]
if private:
command.append("--private")
process = self.spawn_snapcraft(command)
process.expect(r".*\[y/N\]: ")
process.sendline("y")
try:
process.expect_exact(
"Congrats! You are now the publisher of {!r}.".format(snap_name)
)
except pexpect.exceptions.EOF:
wait_error_regex = (
".*You must wait (\d+) seconds before trying to register your "
"next snap.*"
)
output = process.before.decode(sys.getfilesystemencoding())
match = re.search(wait_error_regex, output)
if wait and match:
time.sleep(int(match.group(1)))
# This could get stuck for ever if the user is registering
# other snaps in parallel.
self.register(snap_name, private, wait)
else:
raise RegisterError(output)
def register_key(self, key_name, email=None, password=None, expect_success=True):
email = email or self.test_store.user_email
password = password or self.test_store.user_password
process = self.spawn_snapcraft(["register-key", key_name])
process.expect_exact(
"Enter your Ubuntu One e-mail address and password." + os.linesep
)
process.expect_exact(
"If you do not have an Ubuntu One account, you can create one at "
"https://dashboard.snapcraft.io/openid/login" + os.linesep
)
process.expect_exact("Email: ")
process.sendline(email)
process.expect_exact("Password: ")
process.sendline(password)
if expect_success:
process.expect_exact(
"We strongly recommend enabling multi-factor authentication:"
)
process.expect(
r'Done\. The key "{}" .* may be used to sign your '
r"assertions\.".format(key_name)
)
else:
process.expect_exact(
"Cannot continue without logging in successfully: "
"Authentication error: Failed to get unbound discharge"
)
process.expect(pexpect.EOF)
return process.wait()
def list_keys(self, expected_keys):
process = self.spawn_snapcraft(["list-keys"])
for enabled, key_name, key_id in expected_keys:
process.expect(
"{} *{} *{}".format("\*" if enabled else "-", key_name, key_id)
)
process.expect(pexpect.EOF)
return process.wait()
def list_registered(self, expected_snaps):
process = self.spawn_snapcraft(["list-registered"])
for name, visibility, price, notes in expected_snaps:
# Ignores 'since' to avoid confusion on fake and actual stores.
process.expect(
"{} *[T:\-\d]+Z *{} *{} *{}".format(name, visibility, price, notes)
)
process.expect(pexpect.EOF)
return process.wait()
def get_unique_name(self, prefix=""):
"""Return a unique snap name.
It uses a UUIDv4 to create unique names and limits its full size
to 40 chars (as defined in the snap specification).
"""
unique_id = uuid.uuid4().int
# Do not change the test-snapcraft- prefix. Ensure that you
# notify the store team if you need to use a different value when
# working with the production store.
return "test-snapcraft-{}{}".format(prefix, unique_id)[:40]
def get_unique_version(self):
"""Return a unique snap version.
It uses a UUIDv4 to create unique version and limits its full size
to 32 chars (as defined in the snap specification).
"""
unique_id = uuid.uuid4().int
return "{}".format(unique_id)[:32]
def update_name_arch_and_version(self, name=None, arch=None, version=None):
if name is None:
name = self.get_unique_name()
if version is None:
version = self.get_unique_version()
if arch is None:
arch = "amd64"
for line in fileinput.input(
os.path.join("snap", "snapcraft.yaml"), inplace=True
):
if "name: " in line:
print("name: {}".format(name))
elif "version: " in line:
print("version: {}".format(version))
elif "architectures: " in line:
print("architectures: [{}]".format(arch))
else:
print(line)
def update_name_and_version(self, name=None, version=None):
if name is None:
name = self.get_unique_name()
if version is None:
version = self.get_unique_version()
for line in fileinput.input(
os.path.join("snap", "snapcraft.yaml"), inplace=True
):
if "name: " in line:
print("name: {}".format(name))
elif "version: " in line:
print("version: {}".format(version))
else:
print(line)
def gated(self, snap_name, expected_validations=[], expected_output=None):
process = self.spawn_snapcraft(["gated", snap_name])
if expected_output:
process.expect(expected_output)
else:
for name, revision in expected_validations:
process.expect("{} *{}".format(name, revision))
process.expect(pexpect.EOF)
return process.wait()
def validate(self, snap_name, validations, expected_error=None):
process = self.spawn_snapcraft(["validate", snap_name] + validations)
if expected_error:
process.expect(expected_error)
else:
for v in validations:
process.expect("Signing validations assertion for {}".format(v))
process.expect(pexpect.EOF)
return process.wait()
def sign_build(
self, snap_filename, key_name="default", local=False, expect_success=True
):
cmd = ["sign-build", snap_filename, "--key-name", key_name]
if local:
# only sign it, no pushing
cmd.append("--local")
process = self.spawn_snapcraft(cmd)
if expect_success:
if local:
process.expect(
"Build assertion .*{}-build saved to disk.".format(snap_filename)
)
else:
process.expect(
"Build assertion .*{}-build pushed.".format(snap_filename)
)
process.expect(pexpect.EOF)
return process.wait()
def close(self, *args, **kwargs):
process = self.spawn_snapcraft(["close"] + list(args))
expected = kwargs.get("expected")
if expected is not None:
process.expect(expected)
process.expect(pexpect.EOF)
return process.wait()
def push(self, snap, release=None, expected=None):
actions = ["push", snap]
if release is not None:
actions += ["--release", release]
process = self.spawn_snapcraft(actions)
if expected is not None:
process.expect(expected)
process.expect(pexpect.EOF)
return process.wait()
class SnapdIntegrationTestCase(TestCase):
slow_test = False
def setUp(self) -> None:
super().setUp()
run_slow_tests = os.environ.get("SNAPCRAFT_SLOW_TESTS", False)
if run_slow_tests:
run_slow_tests = distutils.util.strtobool(str(run_slow_tests))
if self.slow_test and not run_slow_tests:
self.skipTest("Not running slow tests")
if os.environ.get("ADT_TEST") and self.deb_arch == "armhf":
self.skipTest("The autopkgtest armhf runners can't install snaps")
def install_snap(self) -> None:
try:
subprocess.check_output(
["sudo", "snap", "install", glob.glob("*.snap")[0], "--dangerous"],
stderr=subprocess.STDOUT,
universal_newlines=True,
)
except subprocess.CalledProcessError as e:
self.addDetail("output", content.text_content(e.output))
raise
def get_package_version(package_name, series, deb_arch):
# http://people.canonical.com/~ubuntu-archive/madison.cgi?package=hello&a=amd64&c=&s=zesty&text=on
params = {
"package": package_name,
"s": "{0},{0}-updates,{0}-security".format(series),
"a": deb_arch,
"text": "on",
}
query = requests.get(
"http://people.canonical.com/~ubuntu-archive/madison.cgi", params
)
query.raise_for_status()
package = query.text.strip().split("\n")[-1]
package_status = [i.strip() for i in package.strip().split("|")]
return package_status[1]
def add_stage_packages(
*, part_name: str, stage_packages: List[str], snapcraft_yaml_file=None
):
if snapcraft_yaml_file is None:
snapcraft_yaml_file = os.path.join("snap", "snapcraft.yaml")
with open(snapcraft_yaml_file) as file_read:
y = yaml_utils.load(file_read)
if "stage-packages" in y["parts"][part_name]:
y["parts"][part_name]["stage-packages"].extend(stage_packages)
else:
y["parts"][part_name]["stage-packages"] = stage_packages
with open(snapcraft_yaml_file, "w") as file_write:
yaml_utils.dump(y, stream=file_write)
| gpl-3.0 | -1,028,729,214,117,175,600 | 35.582345 | 102 | 0.587733 | false |
LawrenceK/console-server | consoleserver/ssh.py | 1 | 4049 | #
# (C) Copyright L.P.Klyne 2013
#
"""This is based on the basic ssh server example, the protocol handler has been pulled out as a separate
source as this is where the logic for the console server sits.
"""
import logging
_log = logging.getLogger(__name__)
import os
import grp
from zope.interface import implements
from twisted.cred import portal
from twisted.conch import avatar
from twisted.conch.ssh import factory, userauth, connection, keys, session
from twisted.conch.checkers import SSHPublicKeyDatabase, UNIXPasswordDatabase
from twisted.python import components
from twisted.python import randbytes
from ssh_protocol import TSProtocol
import config
class TSAvatar(avatar.ConchUser):
def __init__(self, username):
avatar.ConchUser.__init__(self)
self.username = username
self.channelLookup.update({'session': session.SSHSession})
def check_priviledged(self):
"""Test for membership of root or sudo groups, hence has admin ability"""
def is_user_in_group(groupname):
return self.username in grp.getgrnam(groupname)[3]
print "TSAvatar.check_priviledged %s" % self.username
_log.debug("TSAvatar.check_priviledged %s", self.username)
return is_user_in_group("root") or is_user_in_group("sudo")
class TSRealm:
implements(portal.IRealm)
def requestAvatar(self, avatarId, mind, *interfaces):
return interfaces[0], TSAvatar(avatarId), lambda: None
class TSSession:
implements(session.ISession)
def __init__(self, avatar):
self.avatar = avatar
@property
def factory(self):
return self.conn.transport.factory
def getPty(self, term, windowSize, attrs):
pass
def execCommand(self, proto, cmd):
raise Exception("no executing commands")
def openShell(self, protocol):
_log.debug("openShell %s", protocol.getHost().address.port)
# protocol is an SSHSessionProcessProtocol object
# protocol.getHost().address.port
# protocol.factory
# protocol.transport
# TODO if port is global sshport create CLI
ts_protocol = TSProtocol(self.avatar)
ts_protocol.makeConnection(protocol)
protocol.makeConnection(session.wrapProtocol(ts_protocol))
def windowChanged(newWindowSize):
pass
def eofReceived(self):
pass
def closed(self):
pass
TS_portal = portal.Portal(TSRealm())
TS_portal.registerChecker(UNIXPasswordDatabase())
TS_portal.registerChecker(SSHPublicKeyDatabase())
components.registerAdapter(TSSession, TSAvatar, session.ISession)
class TSFactory(factory.SSHFactory):
portal = TS_portal
services = {
'ssh-userauth': userauth.SSHUserAuthServer,
'ssh-connection': connection.SSHConnection
}
publickey_file = 'public.key'
privatekey_file = 'private.key'
publicKeys = {}
privateKeys = {}
def getRSAKeys(self):
TSFactory.publickey_file = config.find_file( TSFactory.publickey_file, default = True )
TSFactory.privatekey_file = config.find_file( TSFactory.privatekey_file, default = True )
if not (os.path.exists(self.publickey_file) and os.path.exists(self.privatekey_file)):
# generate a RSA keypair
_log.info("Generating RSA keypair")
from Crypto.PublicKey import RSA
KEY_LENGTH = 1024
rsaKey = RSA.generate(KEY_LENGTH, randbytes.secureRandom)
# save keys for next time
file(self.publickey_file, 'w+b').write(keys.Key(rsaKey).public().toString('OPENSSH'))
file(self.privatekey_file, 'w+b').write(keys.Key(rsaKey).toString('OPENSSH'))
TSFactory.publicKeys['ssh-rsa'] = keys.Key.fromString(data=file(self.publickey_file).read())
TSFactory.privateKeys['ssh-rsa'] = keys.Key.fromString(data=file(self.privatekey_file).read())
def __init__(self, consolecollection):
self.consolecollection = consolecollection
self.getRSAKeys()
# we then start the listen using TSFactory
| gpl-3.0 | 4,608,994,057,645,392,400 | 32.188525 | 104 | 0.687824 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.