repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
Alwnikrotikz/visvis.dev | core/orientation.py | 5 | 6451 | # -*- coding: utf-8 -*-
# Copyright (C) 2012, Almar Klein
#
# Visvis is distributed under the terms of the (new) BSD License.
# The full license can be found in 'license.txt'.
""" Module baseWobjects
Defines the mix class for orientable wobjects.
"""
import numpy as np
from visvis.core import misc
from visvis.pypoints import Point, is_Point
# todo: is this the best way to allow users to orient their objects,
# or might there be other ways?
class OrientationForWobjects_mixClass(object):
""" OrientationForWobjects_mixClass()
This class can be mixed with a wobject class to enable easy
orientation of the objects in space. It makes use of the
tranformation list that each wobject has.
The functionality provided by this class is not made part of the
Wobject class because it does not make sense for all kind of wobjects
(for example lines and images). The OrientableMesh is a class that
inherits from this class.
"""
def __init__(self):
# Set current and reference direction (default up)
self._refDirection = Point(0,0,1)
self._direction = Point(0,0,1)
# Create transformations
self._scaleTransform = misc.Transform_Scale()
self._translateTransform = misc.Transform_Translate()
self._rotateTransform = misc.Transform_Rotate()
self._directionTransform = misc.Transform_Rotate()
# Append transformations to THE list
self.transformations.append(self._translateTransform)
self.transformations.append(self._directionTransform)
self.transformations.append(self._rotateTransform)
self.transformations.append(self._scaleTransform)
@misc.PropWithDraw
def scaling():
""" Get/Set the scaling of the object. Can be set using
a 3-element tuple, a 3D point, or a scalar. The getter always
returns a Point.
"""
def fget(self):
s = self._scaleTransform
return Point(s.sx, s.sy, s.sz)
def fset(self, value):
if isinstance(value, (float, int)):
self._scaleTransform.sx = float(value)
self._scaleTransform.sy = float(value)
self._scaleTransform.sz = float(value)
elif isinstance(value, (list, tuple)) and len(value) == 3:
self._scaleTransform.sx = float(value[0])
self._scaleTransform.sy = float(value[1])
self._scaleTransform.sz = float(value[2])
elif is_Point(value) and value.ndim == 3:
self._scaleTransform.sx = value.x
self._scaleTransform.sy = value.y
self._scaleTransform.sz = value.z
else:
raise ValueError('Scaling should be a scalar, 3D Point, or 3-element tuple.')
return locals()
@misc.PropWithDraw
def translation():
""" Get/Set the transaltion of the object. Can be set using
a 3-element tuple or a 3D point. The getter always returns
a Point.
"""
def fget(self):
d = self._translateTransform
return Point(d.dx, d.dy, d.dz)
def fset(self, value):
if isinstance(value, (list, tuple)) and len(value) == 3:
self._translateTransform.dx = value[0]
self._translateTransform.dy = value[1]
self._translateTransform.dz = value[2]
elif is_Point(value) and value.ndim == 3:
self._translateTransform.dx = value.x
self._translateTransform.dy = value.y
self._translateTransform.dz = value.z
else:
raise ValueError('Translation should be a 3D Point or 3-element tuple.')
return locals()
@misc.PropWithDraw
def direction():
""" Get/Set the direction (i.e. orientation) of the object. Can
be set using a 3-element tuple or a 3D point. The getter always
returns a Point.
"""
def fget(self):
return self._direction.copy()
def fset(self, value):
# Store direction
if isinstance(value, (list, tuple)) and len(value) == 3:
self._direction = Point(*tuple(value))
elif is_Point(value) and value.ndim == 3:
self._direction = value
else:
raise ValueError('Direction should be a 3D Point or 3-element tuple.')
# Normalize
if self._direction.norm()==0:
raise ValueError('Direction vector must have a non-zero length.')
self._direction = self._direction.normalize()
# Create ref point
refPoint = self._refDirection
# Convert to rotation. The cross product of two vectors results
# in a vector normal to both vectors. This is the axis of rotation
# over which the minimal rotation is achieved.
axis = self._direction.cross(refPoint)
if axis.norm() < 0.1:
if self._direction.z > 0:
# No rotation
self._directionTransform.ax = 0.0
self._directionTransform.ay = 0.0
self._directionTransform.az = 1.0
self._directionTransform.angle = 0.0
else:
# Flipped
self._directionTransform.ax = 1.0
self._directionTransform.ay = 0.0
self._directionTransform.az = 0.0
self._directionTransform.angle = np.pi
else:
axis = axis.normalize()
angle = -refPoint.angle(self._direction)
self._directionTransform.ax = axis.x
self._directionTransform.ay = axis.y
self._directionTransform.az = axis.z
self._directionTransform.angle = angle * 180 / np.pi
return locals()
@misc.PropWithDraw
def rotation():
""" Get/Set the rotation of the object (in degrees, around its
direction vector).
"""
def fget(self):
return self._rotateTransform.angle
def fset(self, value):
self._rotateTransform.angle = float(value)
return locals()
| bsd-3-clause | -4,064,351,584,112,723,500 | 38.335366 | 93 | 0.569679 | false |
juanc27/myfavteam | news/nba_news.py | 2 | 9360 | from newspaper import Article, Config
from bs4 import BeautifulSoup
from urllib2 import urlopen
import datetime
import re
from collections import OrderedDict
import pytz
from pytz import timezone
from dateutil.parser import parse
nba_url = "http://www.nba.com"
espn_url = "http://espn.go.com"
#lxml didn't work for espn
my_config = Config()
my_config.parser_class = "soup"
def normalize_url(url, link):
if link.startswith("http://"):
return link
elif link.startswith("/"):
return url + link
else:
return url + "/" + link
def get_soup_from_url(url):
response = urlopen(url)
try:
data = response.read()
except:
print "**error visiting {}".format(url)
raise
return BeautifulSoup(data)
def print_article_dict(dict):
print ""
for field, value in dict.items():
if field == "keywords":
print "keywords: {}".format(value)
continue
if value != None:
print "{}: {} ". format(field, value.encode('utf-8', 'replace'))
else:
print "{}: {} ". format(field, value)
""" search string for timezone keywords lind PT and return a the date formated with timezone
"""
def format_date (date_string, date):
if date_string.find("PT") > -1:
tzone = "US/Pacific"
elif date_string.find("CT") > -1:
tzone = "US/Central"
elif date_string.find("ET") > -1:
tzone = "US/Eastern"
else:
print "timezone from date_string not found"
return (pytz.timezone(tzone).localize(date, is_dst=True))
def getESPN_date_from_article(soup):
mday = soup.find("div", class_ = "monthday")
if mday != None:
date = mday.find_previous("span").text
time = mday.find_next("div", class_ = "time").text
timeofday = mday.find_next("div", class_ = "timeofday").text
date = parse(date + " " + time + " " + timeofday)
date = format_date (timeofday, date)
resp_date = date.isoformat()
else:
dates = soup.find_all("div", class_ = "date")
resp_date = None
for date in dates:
txt = date.text
dt = None
if txt.startswith("Publish Date: Today, "):
dt = datetime.date.today()
#print "{} {}".format(dt.isoformat(), txt[21:32])
dt = parse("{} {}".format(dt.isoformat(), txt[21:32]))
elif txt.startswith("Publish Date: Yesterday, "):
dt = datetime.date.today()
dt = dt - datetime.timedelta(days=1)
#print "{} {}".format(dt.isoformat(), txt[25:36])
dt = parse("{} {}".format(dt.isoformat(), txt[25:36]))
elif txt.startswith("Publish Date: "):
#print "{}".format(txt[14:38])
dt = parse(txt[14:38])
if dt != None:
date = format_date(txt, dt)
resp_date = date.isoformat()
break
else:
continue
if resp_date == None:
gtl = soup.find("div", class_ = "game-time-location")
if gtl != None:
dt = gtl.find_next("p")
if dt != None:
date = parse(dt.text)
date = format_date (dt.text, date)
resp_date = date.isoformat()
return resp_date
def getESPN_dot_com_team_news(team_short_name = None, visited_links = []):
news = list()
if team_short_name == None:
return
team_short_name = team_short_name.lower()
try:
espn_team_blog = short_name_to_espn_blog[team_short_name]
except:
return None
url = espn_url + "/" + espn_team_blog + "/"
soup = get_soup_from_url(url)
if espn_team_blog.find('blog') > -1:
headers = soup.find_all("div", class_ = "mod-header")
else:
headers = soup.find_all("li", class_ = "result")
for header in headers:
resp = OrderedDict()
h3 = header.find("h3")
if h3 == None:
continue
link = h3.find("a").get("href")
if link == None:
continue
url = normalize_url(espn_url, link)
if url in visited_links:
continue
#avoid blog post from other teams
#if url.find('blog') > -1:
# if url.find(team_short_name) == -1:
# continue
article = Article(url, my_config)
article.download()
article.parse()
resp['title'] = article.title
resp['link'] = url
resp['description'] = article.meta_description
resp['text'] = article.text
resp['image'] = article.top_image
resp['keywords'] = article.meta_keywords
#extra fields not provided by newspaper (author and date)
article_soup = get_soup_from_url(url)
if len(article.authors) < 1 :
author = article_soup.find("cite", class_ = "byline")
if author != None:
author_txt = author.find("a")
if author_txt != None:
author = author_txt.text
else:
#print "found this as an author {}".format(author.text
author = author.text
else:
author = article_soup.find("cite", class_ = "source")
if author != None:
author = author.text
resp['author'] = author
else:
resp['author'] = article.authors[0]
resp['date'] = getESPN_date_from_article(article_soup)
print_article_dict(resp)
news.append(resp)
return news
"""
Converts nba team short names to espn style names used in blogs.
example: warriors -> "golden-state-warriors"
"""
short_name_to_espn_blog = {
'celtics' : 'blog/boston/celtics',
'nets' : 'blog/brooklyn-nets',
'knick' : 'blog/new-york-knicks',
'76ers' : 'nba/team/_/name/phi/philadelphia-76ers',
'raptors' : 'nba/team/_/name/tor/toronto-raptors',
'bulls' : 'blog/chicago-bulls',
'cavaliers' : 'blog/cleveland-cavaliers',
'pistons' : 'nba/team/_/name/det/detroit-pistons',
'pacers' : 'nba/team/_/name/ind/indiana-pacers',
'bucks' : 'nba/team/_/name/mil/milwaukee-bucks',
'hawks' : 'nba/team/_/name/atl/atlanta-hawks',
'hornets' : 'nba/team/_/name/cha/charlotte-hornets',
'heat' : 'blog/truehoopmiamiheat',
'magic' : 'nba/team/_/name/orl/orlando-magic',
'wizards' : 'nba/team/_/name/wsh/washington-wizards',
'warriors' : 'blog/golden-state-warriors',
'clippers' : 'blog/los-angeles-clippers',
'lakers' : 'blog/los-angeles-lakers',
'suns' : 'nba/team/_/name/phx/phoenix-suns',
'kings' : 'nba/team/_/name/sac/sacramento-kings',
'mavericks' : 'blog/dallas-mavericks',
'rockets' : 'nba/team/_/name/hou/houston-rockets',
'grizzlies' : 'nba/team/_/name/mem/memphis-grizzlies',
'pelicans' : 'nba/team/_/name/no/new-orleans-pelicans',
'spurs' : 'nba/team/_/name/sa/san-antonio-spurs',
'nuggets' : 'nba/team/_/name/den/denver-nuggets',
'timberwolves' : 'nba/team/_/name/min/minnesota-timberwolves',
'thunder' : 'nba/team/_/name/okc/oklahoma-city-thunder',
'blazers' : 'nba/team/_/name/por/portland-trail-blazers',
'jazz' : 'nba/team/_/name/utah/utah-jazz',
}
def getNBA_dot_com_team_news(team_short_name = None, visited_links = []):
news = list()
if team_short_name == None:
return
team_short_name = team_short_name.lower()
url = nba_url + "/" + team_short_name + "/news"
soup = get_soup_from_url(url)
headers = soup.find_all("div", class_ = re.compile("post__information"))
for header in headers:
resp = OrderedDict()
title = header.find("div", class_ = "post__title")
if title == None:
continue
link = title.find("a").get("href")
if link == None:
continue
url = normalize_url(nba_url, link)
if url in visited_links:
continue
#avoiding articles in other languages for nba.com/china/
if url.find("/china/") > -1:
continue
article = Article(url, my_config)
article.download()
article.parse()
resp['title'] = article.title
resp['link'] = url
resp['description'] = article.meta_description
resp['text'] = article.text
resp['image'] = article.top_image
resp['keywords'] = article.meta_keywords
#nba.com doesn't show a clear author for articles
resp['author'] = "nba.com"
article_soup = get_soup_from_url(url)
date = article_soup.find("div", class_ = "author-block__post-date")
resp['date'] = None
if date != None:
txt = date.text
if txt.startswith("Posted: "):
#print "{}".format(txt[8:])
dt = parse(txt[8:])
#nba.com doesn't provide the time, we will use now
now = datetime.datetime.now(pytz.timezone("US/Eastern"))
dt = dt.replace(hour=now.hour, minute=now.minute,
second=now.second, tzinfo=now.tzinfo)
resp['date'] = dt.isoformat()
print_article_dict(resp)
news.append(resp)
return news
if __name__ == "__main__":
# getESPN_dot_com_team_news("warriors")
getESPN_dot_com_team_news("heat")
getNBA_dot_com_team_news("heat")
| mit | 6,313,839,723,252,596,000 | 31.61324 | 92 | 0.559722 | false |
rollokb/django-mailgun | django_mailgun.py | 1 | 5894 | from __future__ import unicode_literals
import six
import requests
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail.message import sanitize_address
from requests.packages.urllib3.filepost import encode_multipart_formdata
__version__ = '0.7.1'
version = '0.7.1'
# A mapping of smtp headers to API key names, along
# with a callable to transform them somehow (if nec.)
#
# https://documentation.mailgun.com/user_manual.html#sending-via-smtp
# https://documentation.mailgun.com/api-sending.html#sending
#
# structure is SMTP_HEADER: (api_name, data_transform_function)
HEADERS_MAP = {
'X-Mailgun-Tag': ('o:tag', lambda x: x),
'X-Mailgun-Campaign-Id': ('o:campaign', lambda x: x),
'X-Mailgun-Dkim': ('o:dkim', lambda x: x),
'X-Mailgun-Deliver-By': ('o:deliverytime', lambda x: x),
'X-Mailgun-Drop-Message': ('o:testmode', lambda x: x),
'X-Mailgun-Track': ('o:tracking', lambda x: x),
'X-Mailgun-Track-Clicks': ('o:tracking-clicks', lambda x: x),
'X-Mailgun-Track-Opens': ('o:tracking-opens', lambda x: x),
'X-Mailgun-Variables': ('v:my-var', lambda x: x),
}
class MailgunAPIError(Exception):
pass
class MailgunBackend(BaseEmailBackend):
"""A Django Email backend that uses mailgun.
"""
def __init__(self, fail_silently=False, *args, **kwargs):
access_key, server_name = (kwargs.pop('access_key', None),
kwargs.pop('server_name', None))
super(MailgunBackend, self).__init__(
fail_silently=fail_silently,
*args, **kwargs)
try:
self._access_key = access_key or getattr(settings, 'MAILGUN_ACCESS_KEY')
self._server_name = server_name or getattr(settings, 'MAILGUN_SERVER_NAME')
except AttributeError:
if fail_silently:
self._access_key, self._server_name = None
else:
raise
self._api_url = "https://api.mailgun.net/v3/%s/" % self._server_name
self._headers_map = HEADERS_MAP
def open(self):
"""Stub for open connection, all sends are done over HTTP POSTs
"""
pass
def close(self):
"""Close any open HTTP connections to the API server.
"""
pass
def _map_smtp_headers_to_api_parameters(self, email_message):
"""
Map the values passed in SMTP headers to API-ready
2-item tuples present in HEADERS_MAP
header values must be a single string or list or tuple of strings
:return: 2-item tuples of the form (api_name, api_values)
"""
api_data = []
for smtp_key, api_transformer in six.iteritems(self._headers_map):
data_to_transform = email_message.extra_headers.pop(smtp_key, None)
if data_to_transform is not None:
if type(data_to_transform) in (list, tuple):
# map each value in the tuple/list
for data in data_to_transform:
api_data.append((api_transformer[0], api_transformer[1](data)))
else:
# we only have one value
api_data.append((api_transformer[0], api_transformer[1](data_to_transform)))
return api_data
def _send(self, email_message):
"""A helper method that does the actual sending."""
if not email_message.recipients():
return False
from_email = sanitize_address(email_message.from_email, email_message.encoding)
recipients = [sanitize_address(addr, email_message.encoding)
for addr in email_message.recipients()]
try:
post_data = []
post_data.append(('to', (",".join(recipients)),))
post_data.append(('text', email_message.body,))
post_data.append(('subject', email_message.subject,))
post_data.append(('from', from_email,))
# get our recipient variables if they were passed in
recipient_variables = email_message.extra_headers.pop('recipient_variables', None)
if recipient_variables is not None:
post_data.append(('recipient-variables', recipient_variables, ))
for name, value in self._map_smtp_headers_to_api_parameters(email_message):
post_data.append((name, value, ))
if hasattr(email_message, 'alternatives') and email_message.alternatives:
for alt in email_message.alternatives:
if alt[1] == 'text/html':
post_data.append(('html', alt[0],))
break
if email_message.attachments:
for attachment in email_message.attachments:
post_data.append(('attachment', (attachment[0], attachment[1],)))
content, header = encode_multipart_formdata(post_data)
headers = {'Content-Type': header}
else:
content = post_data
headers = None
response = requests.post(self._api_url + "messages",
auth=("api", self._access_key),
data=content, headers=headers)
except:
if not self.fail_silently:
raise
return False
if response.status_code != 200:
if not self.fail_silently:
raise MailgunAPIError(response)
return False
return True
def send_messages(self, email_messages):
"""Sends one or more EmailMessage objects and returns the number of
email messages sent.
"""
if not email_messages:
return
num_sent = 0
for message in email_messages:
if self._send(message):
num_sent += 1
return num_sent
| mit | 1,728,921,739,636,360,000 | 35.8375 | 96 | 0.583305 | false |
opendatateam/udata | udata/frontend/__init__.py | 1 | 1614 | import inspect
import logging
from importlib import import_module
from jinja2 import Markup
from udata import assets, entrypoints
from .markdown import UdataCleaner, init_app as init_markdown
log = logging.getLogger(__name__)
class SafeMarkup(Markup):
'''Markup object bypasses Jinja's escaping. This override allows to sanitize the resulting html.'''
def __new__(cls, base, *args, **kwargs):
cleaner = UdataCleaner()
return super().__new__(cls, cleaner.clean(base), *args, **kwargs)
def _load_views(app, module):
views = module if inspect.ismodule(module) else import_module(module)
blueprint = getattr(views, 'blueprint', None)
if blueprint:
app.register_blueprint(blueprint)
VIEWS = ['core.storages', 'core.tags', 'admin']
def init_app(app, views=None):
views = views or VIEWS
init_markdown(app)
for view in views:
_load_views(app, 'udata.{}.views'.format(view))
# Load all plugins views and blueprints
for module in entrypoints.get_enabled('udata.views', app).values():
_load_views(app, module)
# Load all plugins views and blueprints
for module in entrypoints.get_enabled('udata.front', app).values():
front_module = module if inspect.ismodule(module) else import_module(module)
front_module.init_app(app)
# Load core manifest
with app.app_context():
assets.register_manifest('udata')
for dist in entrypoints.get_plugins_dists(app, 'udata.views'):
if assets.has_manifest(dist.project_name):
assets.register_manifest(dist.project_name)
| agpl-3.0 | 7,320,030,350,934,159,000 | 28.888889 | 103 | 0.679058 | false |
CoDEmanX/ArangoDB | 3rdParty/V8-4.3.61/third_party/python_26/Lib/encodings/iso2022_jp_2.py | 816 | 1061 | #
# iso2022_jp_2.py: Python Unicode Codec for ISO2022_JP_2
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp_2')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp_2',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| apache-2.0 | -4,348,393,940,529,492,500 | 26.205128 | 74 | 0.702168 | false |
Zen-CODE/kivybits | Examples/DynamicUI/main.py | 1 | 3087 | """
Small app demonstrating a dynamic UI
Author: ZenCODE
Date: 22/10/2013
"""
from kivy.lang import Builder
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.factory import Factory
class DataSource(object):
"""
This class would be an abstraction of your data source:
the MySQLdb in this case.
"""
def get_data(self):
"""Return the data. We use a list of dicts representing a list of rows
"""
return [{"image": "kivy1.png",
"row_id": 1,
"header": "Question 1",
"type": "Label",
"value_name": "text",
"value": "My Text"},
{"image": "kivy2.png",
"row_id": 2,
"header": "Question 2",
"type": "Button",
"value_name": "text",
"value": "Button"},
{"image": "kivy1.png",
"row_id": 3,
"header": "Question 3",
"type": "CheckBox",
"value_name": "active",
"value": "True"}]
Builder.load_string('''
<QuestionWidget>:
ques_image: ques_image # These do your mapping to the ObjectProperty
header_label: header_label
box_container: box_container
orientation: "vertical"
Image:
id: ques_image
Label:
id: header_label
BoxLayout:
id: box_container
''')
class QuestionWidget(BoxLayout):
"""
This widget would represent each Question
"""
def build(self, data_dict):
"""Build the widget based on the dictionary from the data source"""
# The widgets are part of every instance
self.ques_image.source = data_dict["image"]
self.header_label.text = data_dict["header"]
# But this content is generated dynamically
self.box_container.add_widget(self.get_content(data_dict))
@staticmethod
def get_content(data_dict):
"""Returns the instance specific widgets for the box_layout"""
# We get class based on it's name as registered in the factory and instantiate
content = Factory.get(data_dict["type"])()
# We noe set any of it's properties and return it
setattr(content, data_dict["value_name"], data_dict["value"])
return content
class TestApp(App):
def __init__(self, **kwargs):
"""
On initialization, register the classes we want to create dynamically
via the Factory object
"""
super(TestApp, self).__init__(**kwargs)
Factory.register('Label', module='kivy.uix.label')
Factory.register('Button', module='kivy.uix.button')
Factory.register('CheckBox', module='kivy.uix.checkbox')
def build(self):
container = BoxLayout() # or screen, carousel etc.
for item in DataSource().get_data():
ques_widget = QuestionWidget()
ques_widget.build(item)
container.add_widget(ques_widget)
return container
if __name__ == "__main__":
TestApp().run()
| mit | 7,099,345,708,829,937,000 | 29.564356 | 86 | 0.568837 | false |
a-parhom/edx-platform | openedx/core/djangoapps/credit/signals.py | 18 | 3705 | """
This file contains receivers of course publication signals.
"""
import logging
from django.dispatch import receiver
from django.utils import timezone
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED
log = logging.getLogger(__name__)
def on_course_publish(course_key):
"""
Will receive a delegated 'course_published' signal from cms/djangoapps/contentstore/signals.py
and kick off a celery task to update the credit course requirements.
IMPORTANT: It is assumed that the edx-proctoring subsystem has been appropriate refreshed
with any on_publish event workflow *BEFORE* this method is called.
"""
# Import here, because signal is registered at startup, but items in tasks
# are not yet able to be loaded
from openedx.core.djangoapps.credit import api, tasks
if api.is_credit_course(course_key):
tasks.update_credit_course_requirements.delay(unicode(course_key))
log.info(u'Added task to update credit requirements for course "%s" to the task queue', course_key)
@receiver(COURSE_GRADE_CHANGED)
def listen_for_grade_calculation(sender, user, course_grade, course_key, deadline, **kwargs): # pylint: disable=unused-argument
"""Receive 'MIN_GRADE_REQUIREMENT_STATUS' signal and update minimum grade requirement status.
Args:
sender: None
user(User): User Model object
course_grade(CourseGrade): CourseGrade object
course_key(CourseKey): The key for the course
deadline(datetime): Course end date or None
Kwargs:
kwargs : None
"""
# This needs to be imported here to avoid a circular dependency
# that can cause migrations to fail.
from openedx.core.djangoapps.credit import api
course_id = CourseKey.from_string(unicode(course_key))
is_credit = api.is_credit_course(course_id)
if is_credit:
requirements = api.get_credit_requirements(course_id, namespace='grade')
if requirements:
criteria = requirements[0].get('criteria')
if criteria:
min_grade = criteria.get('min_grade')
passing_grade = course_grade.percent >= min_grade
now = timezone.now()
status = None
reason = None
if (deadline and now < deadline) or not deadline:
# Student completed coursework on-time
if passing_grade:
# Student received a passing grade
status = 'satisfied'
reason = {'final_grade': course_grade.percent}
else:
# Submission after deadline
if passing_grade:
# Grade was good, but submission arrived too late
status = 'failed'
reason = {
'current_date': now,
'deadline': deadline
}
else:
# Student failed to receive minimum grade
status = 'failed'
reason = {
'final_grade': course_grade.percent,
'minimum_grade': min_grade
}
# We do not record a status if the user has not yet earned the minimum grade, but still has
# time to do so.
if status and reason:
api.set_credit_requirement_status(
user, course_id, 'grade', 'grade', status=status, reason=reason
)
| agpl-3.0 | -740,572,477,190,660,100 | 38 | 128 | 0.586235 | false |
mitchellcash/ion | qa/rpc-tests/txn_doublespend.py | 1 | 5059 | #!/usr/bin/env python2
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Copyright (c) 2015-2018 The PIVX developers
# Copyright (c) 2018 The Ion developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test proper accounting with malleable transactions
#
from test_framework import BitcoinTestFramework
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from decimal import Decimal
from util import *
import os
import shutil
class TxnMallTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
return super(TxnMallTest, self).setup_network(True)
def run_test(self):
# All nodes should start with 1,250 BTC:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
self.nodes[0].move("", "foo", 1220)
self.nodes[0].move("", "bar", 30)
assert_equal(self.nodes[0].getbalance(""), 0)
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# First: use raw transaction API to send 1210 BTC to node1_address,
# but don't broadcast:
(total_in, inputs) = gather_inputs(self.nodes[0], 1210)
change_address = self.nodes[0].getnewaddress("foo")
outputs = {}
outputs[change_address] = 40
outputs[node1_address] = 1210
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
doublespend = self.nodes[0].signrawtransaction(rawtx)
assert_equal(doublespend["complete"], True)
# Create two transaction from node[0] to node[1]; the
# second must spend change from the first because the first
# spends all mature inputs:
txid1 = self.nodes[0].sendfrom("foo", node1_address, 1210, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 20, 0)
# Have node0 mine a block:
if (self.options.mine_block):
self.nodes[0].setgenerate(True, 1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50BTC for another
# matured block, minus 1210, minus 20, and minus transaction fees:
expected = starting_balance
if self.options.mine_block: expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo"), 1220+tx1["amount"]+tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar"), 30+tx2["amount"]+tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"]+tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Now give doublespend to miner:
mutated_txid = self.nodes[2].sendrawtransaction(doublespend["hex"])
# ... mine a block...
self.nodes[2].setgenerate(True, 1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].setgenerate(True, 1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Both transactions should be conflicted
assert_equal(tx1["confirmations"], -1)
assert_equal(tx2["confirmations"], -1)
# Node0's total balance should be starting balance, plus 100BTC for
# two more matured blocks, minus 1210 for the double-spend:
expected = starting_balance + 100 - 1210
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*"), expected)
# foo account should be debited, but bar account should not:
assert_equal(self.nodes[0].getbalance("foo"), 1220-1210)
assert_equal(self.nodes[0].getbalance("bar"), 30)
# Node1's "from" account balance should be just the mutated send:
assert_equal(self.nodes[1].getbalance("from0"), 1210)
if __name__ == '__main__':
TxnMallTest().main()
| mit | 9,000,425,572,157,098,000 | 40.467213 | 111 | 0.635896 | false |
louisLouL/pair_trading | capstone_env/lib/python3.6/site-packages/pandas/tests/tools/test_numeric.py | 6 | 14437 | import pytest
import decimal
import numpy as np
import pandas as pd
from pandas import to_numeric, _np_version_under1p9
from pandas.util import testing as tm
from numpy import iinfo
class TestToNumeric(object):
def test_empty(self):
# see gh-16302
s = pd.Series([], dtype=object)
res = to_numeric(s)
expected = pd.Series([], dtype=np.int64)
tm.assert_series_equal(res, expected)
# Original issue example
res = to_numeric(s, errors='coerce', downcast='integer')
expected = pd.Series([], dtype=np.int8)
tm.assert_series_equal(res, expected)
def test_series(self):
s = pd.Series(['1', '-3.14', '7'])
res = to_numeric(s)
expected = pd.Series([1, -3.14, 7])
tm.assert_series_equal(res, expected)
s = pd.Series(['1', '-3.14', 7])
res = to_numeric(s)
tm.assert_series_equal(res, expected)
def test_series_numeric(self):
s = pd.Series([1, 3, 4, 5], index=list('ABCD'), name='XXX')
res = to_numeric(s)
tm.assert_series_equal(res, s)
s = pd.Series([1., 3., 4., 5.], index=list('ABCD'), name='XXX')
res = to_numeric(s)
tm.assert_series_equal(res, s)
# bool is regarded as numeric
s = pd.Series([True, False, True, True],
index=list('ABCD'), name='XXX')
res = to_numeric(s)
tm.assert_series_equal(res, s)
def test_error(self):
s = pd.Series([1, -3.14, 'apple'])
msg = 'Unable to parse string "apple" at position 2'
with tm.assert_raises_regex(ValueError, msg):
to_numeric(s, errors='raise')
res = to_numeric(s, errors='ignore')
expected = pd.Series([1, -3.14, 'apple'])
tm.assert_series_equal(res, expected)
res = to_numeric(s, errors='coerce')
expected = pd.Series([1, -3.14, np.nan])
tm.assert_series_equal(res, expected)
s = pd.Series(['orange', 1, -3.14, 'apple'])
msg = 'Unable to parse string "orange" at position 0'
with tm.assert_raises_regex(ValueError, msg):
to_numeric(s, errors='raise')
def test_error_seen_bool(self):
s = pd.Series([True, False, 'apple'])
msg = 'Unable to parse string "apple" at position 2'
with tm.assert_raises_regex(ValueError, msg):
to_numeric(s, errors='raise')
res = to_numeric(s, errors='ignore')
expected = pd.Series([True, False, 'apple'])
tm.assert_series_equal(res, expected)
# coerces to float
res = to_numeric(s, errors='coerce')
expected = pd.Series([1., 0., np.nan])
tm.assert_series_equal(res, expected)
def test_list(self):
s = ['1', '-3.14', '7']
res = to_numeric(s)
expected = np.array([1, -3.14, 7])
tm.assert_numpy_array_equal(res, expected)
def test_list_numeric(self):
s = [1, 3, 4, 5]
res = to_numeric(s)
tm.assert_numpy_array_equal(res, np.array(s, dtype=np.int64))
s = [1., 3., 4., 5.]
res = to_numeric(s)
tm.assert_numpy_array_equal(res, np.array(s))
# bool is regarded as numeric
s = [True, False, True, True]
res = to_numeric(s)
tm.assert_numpy_array_equal(res, np.array(s))
def test_numeric(self):
s = pd.Series([1, -3.14, 7], dtype='O')
res = to_numeric(s)
expected = pd.Series([1, -3.14, 7])
tm.assert_series_equal(res, expected)
s = pd.Series([1, -3.14, 7])
res = to_numeric(s)
tm.assert_series_equal(res, expected)
# GH 14827
df = pd.DataFrame(dict(
a=[1.2, decimal.Decimal(3.14), decimal.Decimal("infinity"), '0.1'],
b=[1.0, 2.0, 3.0, 4.0],
))
expected = pd.DataFrame(dict(
a=[1.2, 3.14, np.inf, 0.1],
b=[1.0, 2.0, 3.0, 4.0],
))
# Test to_numeric over one column
df_copy = df.copy()
df_copy['a'] = df_copy['a'].apply(to_numeric)
tm.assert_frame_equal(df_copy, expected)
# Test to_numeric over multiple columns
df_copy = df.copy()
df_copy[['a', 'b']] = df_copy[['a', 'b']].apply(to_numeric)
tm.assert_frame_equal(df_copy, expected)
def test_numeric_lists_and_arrays(self):
# Test to_numeric with embedded lists and arrays
df = pd.DataFrame(dict(
a=[[decimal.Decimal(3.14), 1.0], decimal.Decimal(1.6), 0.1]
))
df['a'] = df['a'].apply(to_numeric)
expected = pd.DataFrame(dict(
a=[[3.14, 1.0], 1.6, 0.1],
))
tm.assert_frame_equal(df, expected)
df = pd.DataFrame(dict(
a=[np.array([decimal.Decimal(3.14), 1.0]), 0.1]
))
df['a'] = df['a'].apply(to_numeric)
expected = pd.DataFrame(dict(
a=[[3.14, 1.0], 0.1],
))
tm.assert_frame_equal(df, expected)
def test_all_nan(self):
s = pd.Series(['a', 'b', 'c'])
res = to_numeric(s, errors='coerce')
expected = pd.Series([np.nan, np.nan, np.nan])
tm.assert_series_equal(res, expected)
def test_type_check(self):
# GH 11776
df = pd.DataFrame({'a': [1, -3.14, 7], 'b': ['4', '5', '6']})
with tm.assert_raises_regex(TypeError, "1-d array"):
to_numeric(df)
for errors in ['ignore', 'raise', 'coerce']:
with tm.assert_raises_regex(TypeError, "1-d array"):
to_numeric(df, errors=errors)
def test_scalar(self):
assert pd.to_numeric(1) == 1
assert pd.to_numeric(1.1) == 1.1
assert pd.to_numeric('1') == 1
assert pd.to_numeric('1.1') == 1.1
with pytest.raises(ValueError):
to_numeric('XX', errors='raise')
assert to_numeric('XX', errors='ignore') == 'XX'
assert np.isnan(to_numeric('XX', errors='coerce'))
def test_numeric_dtypes(self):
idx = pd.Index([1, 2, 3], name='xxx')
res = pd.to_numeric(idx)
tm.assert_index_equal(res, idx)
res = pd.to_numeric(pd.Series(idx, name='xxx'))
tm.assert_series_equal(res, pd.Series(idx, name='xxx'))
res = pd.to_numeric(idx.values)
tm.assert_numpy_array_equal(res, idx.values)
idx = pd.Index([1., np.nan, 3., np.nan], name='xxx')
res = pd.to_numeric(idx)
tm.assert_index_equal(res, idx)
res = pd.to_numeric(pd.Series(idx, name='xxx'))
tm.assert_series_equal(res, pd.Series(idx, name='xxx'))
res = pd.to_numeric(idx.values)
tm.assert_numpy_array_equal(res, idx.values)
def test_str(self):
idx = pd.Index(['1', '2', '3'], name='xxx')
exp = np.array([1, 2, 3], dtype='int64')
res = pd.to_numeric(idx)
tm.assert_index_equal(res, pd.Index(exp, name='xxx'))
res = pd.to_numeric(pd.Series(idx, name='xxx'))
tm.assert_series_equal(res, pd.Series(exp, name='xxx'))
res = pd.to_numeric(idx.values)
tm.assert_numpy_array_equal(res, exp)
idx = pd.Index(['1.5', '2.7', '3.4'], name='xxx')
exp = np.array([1.5, 2.7, 3.4])
res = pd.to_numeric(idx)
tm.assert_index_equal(res, pd.Index(exp, name='xxx'))
res = pd.to_numeric(pd.Series(idx, name='xxx'))
tm.assert_series_equal(res, pd.Series(exp, name='xxx'))
res = pd.to_numeric(idx.values)
tm.assert_numpy_array_equal(res, exp)
def test_datetimelike(self):
for tz in [None, 'US/Eastern', 'Asia/Tokyo']:
idx = pd.date_range('20130101', periods=3, tz=tz, name='xxx')
res = pd.to_numeric(idx)
tm.assert_index_equal(res, pd.Index(idx.asi8, name='xxx'))
res = pd.to_numeric(pd.Series(idx, name='xxx'))
tm.assert_series_equal(res, pd.Series(idx.asi8, name='xxx'))
res = pd.to_numeric(idx.values)
tm.assert_numpy_array_equal(res, idx.asi8)
def test_timedelta(self):
idx = pd.timedelta_range('1 days', periods=3, freq='D', name='xxx')
res = pd.to_numeric(idx)
tm.assert_index_equal(res, pd.Index(idx.asi8, name='xxx'))
res = pd.to_numeric(pd.Series(idx, name='xxx'))
tm.assert_series_equal(res, pd.Series(idx.asi8, name='xxx'))
res = pd.to_numeric(idx.values)
tm.assert_numpy_array_equal(res, idx.asi8)
def test_period(self):
idx = pd.period_range('2011-01', periods=3, freq='M', name='xxx')
res = pd.to_numeric(idx)
tm.assert_index_equal(res, pd.Index(idx.asi8, name='xxx'))
# ToDo: enable when we can support native PeriodDtype
# res = pd.to_numeric(pd.Series(idx, name='xxx'))
# tm.assert_series_equal(res, pd.Series(idx.asi8, name='xxx'))
def test_non_hashable(self):
# Test for Bug #13324
s = pd.Series([[10.0, 2], 1.0, 'apple'])
res = pd.to_numeric(s, errors='coerce')
tm.assert_series_equal(res, pd.Series([np.nan, 1.0, np.nan]))
res = pd.to_numeric(s, errors='ignore')
tm.assert_series_equal(res, pd.Series([[10.0, 2], 1.0, 'apple']))
with tm.assert_raises_regex(TypeError, "Invalid object type"):
pd.to_numeric(s)
def test_downcast(self):
# see gh-13352
mixed_data = ['1', 2, 3]
int_data = [1, 2, 3]
date_data = np.array(['1970-01-02', '1970-01-03',
'1970-01-04'], dtype='datetime64[D]')
invalid_downcast = 'unsigned-integer'
msg = 'invalid downcasting method provided'
smallest_int_dtype = np.dtype(np.typecodes['Integer'][0])
smallest_uint_dtype = np.dtype(np.typecodes['UnsignedInteger'][0])
# support below np.float32 is rare and far between
float_32_char = np.dtype(np.float32).char
smallest_float_dtype = float_32_char
for data in (mixed_data, int_data, date_data):
with tm.assert_raises_regex(ValueError, msg):
pd.to_numeric(data, downcast=invalid_downcast)
expected = np.array([1, 2, 3], dtype=np.int64)
res = pd.to_numeric(data)
tm.assert_numpy_array_equal(res, expected)
res = pd.to_numeric(data, downcast=None)
tm.assert_numpy_array_equal(res, expected)
expected = np.array([1, 2, 3], dtype=smallest_int_dtype)
for signed_downcast in ('integer', 'signed'):
res = pd.to_numeric(data, downcast=signed_downcast)
tm.assert_numpy_array_equal(res, expected)
expected = np.array([1, 2, 3], dtype=smallest_uint_dtype)
res = pd.to_numeric(data, downcast='unsigned')
tm.assert_numpy_array_equal(res, expected)
expected = np.array([1, 2, 3], dtype=smallest_float_dtype)
res = pd.to_numeric(data, downcast='float')
tm.assert_numpy_array_equal(res, expected)
# if we can't successfully cast the given
# data to a numeric dtype, do not bother
# with the downcast parameter
data = ['foo', 2, 3]
expected = np.array(data, dtype=object)
res = pd.to_numeric(data, errors='ignore',
downcast='unsigned')
tm.assert_numpy_array_equal(res, expected)
# cannot cast to an unsigned integer because
# we have a negative number
data = ['-1', 2, 3]
expected = np.array([-1, 2, 3], dtype=np.int64)
res = pd.to_numeric(data, downcast='unsigned')
tm.assert_numpy_array_equal(res, expected)
# cannot cast to an integer (signed or unsigned)
# because we have a float number
data = (['1.1', 2, 3],
[10000.0, 20000, 3000, 40000.36, 50000, 50000.00])
expected = (np.array([1.1, 2, 3], dtype=np.float64),
np.array([10000.0, 20000, 3000,
40000.36, 50000, 50000.00], dtype=np.float64))
for _data, _expected in zip(data, expected):
for downcast in ('integer', 'signed', 'unsigned'):
res = pd.to_numeric(_data, downcast=downcast)
tm.assert_numpy_array_equal(res, _expected)
# the smallest integer dtype need not be np.(u)int8
data = ['256', 257, 258]
for downcast, expected_dtype in zip(
['integer', 'signed', 'unsigned'],
[np.int16, np.int16, np.uint16]):
expected = np.array([256, 257, 258], dtype=expected_dtype)
res = pd.to_numeric(data, downcast=downcast)
tm.assert_numpy_array_equal(res, expected)
def test_downcast_limits(self):
# Test the limits of each downcast. Bug: #14401.
# Check to make sure numpy is new enough to run this test.
if _np_version_under1p9:
pytest.skip("Numpy version is under 1.9")
i = 'integer'
u = 'unsigned'
dtype_downcast_min_max = [
('int8', i, [iinfo(np.int8).min, iinfo(np.int8).max]),
('int16', i, [iinfo(np.int16).min, iinfo(np.int16).max]),
('int32', i, [iinfo(np.int32).min, iinfo(np.int32).max]),
('int64', i, [iinfo(np.int64).min, iinfo(np.int64).max]),
('uint8', u, [iinfo(np.uint8).min, iinfo(np.uint8).max]),
('uint16', u, [iinfo(np.uint16).min, iinfo(np.uint16).max]),
('uint32', u, [iinfo(np.uint32).min, iinfo(np.uint32).max]),
('uint64', u, [iinfo(np.uint64).min, iinfo(np.uint64).max]),
('int16', i, [iinfo(np.int8).min, iinfo(np.int8).max + 1]),
('int32', i, [iinfo(np.int16).min, iinfo(np.int16).max + 1]),
('int64', i, [iinfo(np.int32).min, iinfo(np.int32).max + 1]),
('int16', i, [iinfo(np.int8).min - 1, iinfo(np.int16).max]),
('int32', i, [iinfo(np.int16).min - 1, iinfo(np.int32).max]),
('int64', i, [iinfo(np.int32).min - 1, iinfo(np.int64).max]),
('uint16', u, [iinfo(np.uint8).min, iinfo(np.uint8).max + 1]),
('uint32', u, [iinfo(np.uint16).min, iinfo(np.uint16).max + 1]),
('uint64', u, [iinfo(np.uint32).min, iinfo(np.uint32).max + 1])
]
for dtype, downcast, min_max in dtype_downcast_min_max:
series = pd.to_numeric(pd.Series(min_max), downcast=downcast)
assert series.dtype == dtype
| mit | -1,611,026,365,908,463,400 | 36.401554 | 79 | 0.549837 | false |
Hassan93/openTima | node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py | 2485 | 5536 | # This file comes from
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
# Do not edit! Edit the upstream one instead.
"""Python module for generating .ninja files.
Note that this is emphatically not a required piece of Ninja; it's
just a helpful utility for build-file-generation systems that already
use Python.
"""
import textwrap
import re
def escape_path(word):
return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
class Writer(object):
def __init__(self, output, width=78):
self.output = output
self.width = width
def newline(self):
self.output.write('\n')
def comment(self, text):
for line in textwrap.wrap(text, self.width - 2):
self.output.write('# ' + line + '\n')
def variable(self, key, value, indent=0):
if value is None:
return
if isinstance(value, list):
value = ' '.join(filter(None, value)) # Filter out empty strings.
self._line('%s = %s' % (key, value), indent)
def pool(self, name, depth):
self._line('pool %s' % name)
self.variable('depth', depth, indent=1)
def rule(self, name, command, description=None, depfile=None,
generator=False, pool=None, restat=False, rspfile=None,
rspfile_content=None, deps=None):
self._line('rule %s' % name)
self.variable('command', command, indent=1)
if description:
self.variable('description', description, indent=1)
if depfile:
self.variable('depfile', depfile, indent=1)
if generator:
self.variable('generator', '1', indent=1)
if pool:
self.variable('pool', pool, indent=1)
if restat:
self.variable('restat', '1', indent=1)
if rspfile:
self.variable('rspfile', rspfile, indent=1)
if rspfile_content:
self.variable('rspfile_content', rspfile_content, indent=1)
if deps:
self.variable('deps', deps, indent=1)
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
variables=None):
outputs = self._as_list(outputs)
all_inputs = self._as_list(inputs)[:]
out_outputs = list(map(escape_path, outputs))
all_inputs = list(map(escape_path, all_inputs))
if implicit:
implicit = map(escape_path, self._as_list(implicit))
all_inputs.append('|')
all_inputs.extend(implicit)
if order_only:
order_only = map(escape_path, self._as_list(order_only))
all_inputs.append('||')
all_inputs.extend(order_only)
self._line('build %s: %s' % (' '.join(out_outputs),
' '.join([rule] + all_inputs)))
if variables:
if isinstance(variables, dict):
iterator = iter(variables.items())
else:
iterator = iter(variables)
for key, val in iterator:
self.variable(key, val, indent=1)
return outputs
def include(self, path):
self._line('include %s' % path)
def subninja(self, path):
self._line('subninja %s' % path)
def default(self, paths):
self._line('default %s' % ' '.join(self._as_list(paths)))
def _count_dollars_before_index(self, s, i):
"""Returns the number of '$' characters right in front of s[i]."""
dollar_count = 0
dollar_index = i - 1
while dollar_index > 0 and s[dollar_index] == '$':
dollar_count += 1
dollar_index -= 1
return dollar_count
def _line(self, text, indent=0):
"""Write 'text' word-wrapped at self.width characters."""
leading_space = ' ' * indent
while len(leading_space) + len(text) > self.width:
# The text is too wide; wrap if possible.
# Find the rightmost space that would obey our width constraint and
# that's not an escaped space.
available_space = self.width - len(leading_space) - len(' $')
space = available_space
while True:
space = text.rfind(' ', 0, space)
if space < 0 or \
self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# No such space; just use the first unescaped space we can find.
space = available_space - 1
while True:
space = text.find(' ', space + 1)
if space < 0 or \
self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# Give up on breaking.
break
self.output.write(leading_space + text[0:space] + ' $\n')
text = text[space+1:]
# Subsequent lines are continuations, so indent them.
leading_space = ' ' * (indent+2)
self.output.write(leading_space + text + '\n')
def _as_list(self, input):
if input is None:
return []
if isinstance(input, list):
return input
return [input]
def escape(string):
"""Escape a string such that it can be embedded into a Ninja file without
further interpretation."""
assert '\n' not in string, 'Ninja syntax does not allow newlines'
# We only have one special metacharacter: '$'.
return string.replace('$', '$$')
| gpl-2.0 | -7,489,120,442,481,716,000 | 33.6 | 80 | 0.551662 | false |
alxgu/ansible | lib/ansible/modules/network/netvisor/_pn_trunk.py | 47 | 14298 | #!/usr/bin/python
""" PN CLI trunk-create/trunk-delete/trunk-modify """
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: pn_trunk
author: "Pluribus Networks (@amitsi)"
version_added: "2.2"
short_description: CLI command to create/delete/modify a trunk.
deprecated:
removed_in: '2.12'
why: Doesn't support latest Pluribus Networks netvisor
alternative: Latest modules will be pushed in Ansible future versions.
description:
- Execute trunk-create or trunk-delete command.
- Trunks can be used to aggregate network links at Layer 2 on the local
switch. Use this command to create a new trunk.
options:
pn_cliusername:
description:
- Provide login username if user is not root.
required: False
pn_clipassword:
description:
- Provide login password if user is not root.
required: False
pn_cliswitch:
description:
- Target switch(es) to run the cli on.
required: False
default: 'local'
state:
description:
- State the action to perform. Use 'present' to create trunk,
'absent' to delete trunk and 'update' to modify trunk.
required: True
choices: ['present', 'absent', 'update']
pn_name:
description:
- Specify the name for the trunk configuration.
required: true
pn_ports:
description:
- Specify the port number(s) for the link(s) to aggregate into the trunk.
- Required for trunk-create.
pn_speed:
description:
- Specify the port speed or disable the port.
choices: ['disable', '10m', '100m', '1g', '2.5g', '10g', '40g']
pn_egress_rate_limit:
description:
- Specify an egress port data rate limit for the configuration.
pn_jumbo:
description:
- Specify if the port can receive jumbo frames.
type: bool
pn_lacp_mode:
description:
- Specify the LACP mode for the configuration.
choices: ['off', 'passive', 'active']
pn_lacp_priority:
description:
- Specify the LACP priority. This is a number between 1 and 65535 with a
default value of 32768.
pn_lacp_timeout:
description:
- Specify the LACP time out as slow (30 seconds) or fast (4seconds).
The default value is slow.
choices: ['slow', 'fast']
pn_lacp_fallback:
description:
- Specify the LACP fallback mode as bundles or individual.
choices: ['bundle', 'individual']
pn_lacp_fallback_timeout:
description:
- Specify the LACP fallback timeout in seconds. The range is between 30
and 60 seconds with a default value of 50 seconds.
pn_edge_switch:
description:
- Specify if the switch is an edge switch.
type: bool
pn_pause:
description:
- Specify if pause frames are sent.
type: bool
pn_description:
description:
- Specify a description for the trunk configuration.
pn_loopback:
description:
- Specify loopback if you want to use loopback.
type: bool
pn_mirror_receive:
description:
- Specify if the configuration receives mirrored traffic.
type: bool
pn_unknown_ucast_level:
description:
- Specify an unknown unicast level in percent. The default value is 100%.
pn_unknown_mcast_level:
description:
- Specify an unknown multicast level in percent. The default value is 100%.
pn_broadcast_level:
description:
- Specify a broadcast level in percent. The default value is 100%.
pn_port_macaddr:
description:
- Specify the MAC address of the port.
pn_loopvlans:
description:
- Specify a list of looping vlans.
pn_routing:
description:
- Specify if the port participates in routing on the network.
type: bool
pn_host:
description:
- Host facing port control setting.
type: bool
"""
EXAMPLES = """
- name: create trunk
pn_trunk:
state: 'present'
pn_name: 'spine-to-leaf'
pn_ports: '11,12,13,14'
- name: delete trunk
pn_trunk:
state: 'absent'
pn_name: 'spine-to-leaf'
"""
RETURN = """
command:
description: The CLI command run on the target node(s).
returned: always
type: str
stdout:
description: The set of responses from the trunk command.
returned: always
type: list
stderr:
description: The set of error responses from the trunk command.
returned: on error
type: list
changed:
description: Indicates whether the CLI caused changes on the target.
returned: always
type: bool
"""
import shlex
# Ansible boiler-plate
from ansible.module_utils.basic import AnsibleModule
TRUNK_EXISTS = None
def pn_cli(module):
"""
This method is to generate the cli portion to launch the Netvisor cli.
It parses the username, password, switch parameters from module.
:param module: The Ansible module to fetch username, password and switch
:return: returns the cli string for further processing
"""
username = module.params['pn_cliusername']
password = module.params['pn_clipassword']
cliswitch = module.params['pn_cliswitch']
if username and password:
cli = '/usr/bin/cli --quiet --user %s:%s ' % (username, password)
else:
cli = '/usr/bin/cli --quiet '
if cliswitch == 'local':
cli += ' switch-local '
else:
cli += ' switch ' + cliswitch
return cli
def check_cli(module, cli):
"""
This method checks for idempotency using the trunk-show command.
If a trunk with given name exists, return TRUNK_EXISTS as True else False.
:param module: The Ansible module to fetch input parameters
:param cli: The CLI string
:return Global Booleans: TRUNK_EXISTS
"""
name = module.params['pn_name']
show = cli + ' trunk-show format switch,name no-show-headers'
show = shlex.split(show)
out = module.run_command(show)[1]
out = out.split()
# Global flags
global TRUNK_EXISTS
if name in out:
TRUNK_EXISTS = True
else:
TRUNK_EXISTS = False
def run_cli(module, cli):
"""
This method executes the cli command on the target node(s) and returns the
output. The module then exits based on the output.
:param cli: the complete cli string to be executed on the target node(s).
:param module: The Ansible module to fetch command
"""
cliswitch = module.params['pn_cliswitch']
state = module.params['state']
command = get_command_from_state(state)
cmd = shlex.split(cli)
# 'out' contains the output
# 'err' contains the error messages
result, out, err = module.run_command(cmd)
print_cli = cli.split(cliswitch)[1]
# Response in JSON format
if result != 0:
module.exit_json(
command=print_cli,
stderr=err.strip(),
msg="%s operation failed" % command,
changed=False
)
if out:
module.exit_json(
command=print_cli,
stdout=out.strip(),
msg="%s operation completed" % command,
changed=True
)
else:
module.exit_json(
command=print_cli,
msg="%s operation completed" % command,
changed=True
)
def get_command_from_state(state):
"""
This method gets appropriate command name for the state specified. It
returns the command name for the specified state.
:param state: The state for which the respective command name is required.
"""
command = None
if state == 'present':
command = 'trunk-create'
if state == 'absent':
command = 'trunk-delete'
if state == 'update':
command = 'trunk-modify'
return command
def main():
""" This portion is for arguments parsing """
module = AnsibleModule(
argument_spec=dict(
pn_cliusername=dict(required=False, type='str'),
pn_clipassword=dict(required=False, type='str', no_log=True),
pn_cliswitch=dict(required=False, type='str', default='local'),
state=dict(required=True, type='str',
choices=['present', 'absent', 'update']),
pn_name=dict(required=True, type='str'),
pn_ports=dict(type='str'),
pn_speed=dict(type='str',
choices=['disable', '10m', '100m', '1g', '2.5g',
'10g', '40g']),
pn_egress_rate_limit=dict(type='str'),
pn_jumbo=dict(type='bool'),
pn_lacp_mode=dict(type='str', choices=[
'off', 'passive', 'active']),
pn_lacp_priority=dict(type='int'),
pn_lacp_timeout=dict(type='str', choices=['slow', 'fast']),
pn_lacp_fallback=dict(type='str', choices=[
'bundle', 'individual']),
pn_lacp_fallback_timeout=dict(type='str'),
pn_edge_switch=dict(type='bool'),
pn_pause=dict(type='bool'),
pn_description=dict(type='str'),
pn_loopback=dict(type='bool'),
pn_mirror_receive=dict(type='bool'),
pn_unknown_ucast_level=dict(type='str'),
pn_unknown_mcast_level=dict(type='str'),
pn_broadcast_level=dict(type='str'),
pn_port_macaddr=dict(type='str'),
pn_loopvlans=dict(type='str'),
pn_routing=dict(type='bool'),
pn_host=dict(type='bool')
),
required_if=(
["state", "present", ["pn_name", "pn_ports"]],
["state", "absent", ["pn_name"]],
["state", "update", ["pn_name"]]
)
)
# Accessing the arguments
state = module.params['state']
name = module.params['pn_name']
ports = module.params['pn_ports']
speed = module.params['pn_speed']
egress_rate_limit = module.params['pn_egress_rate_limit']
jumbo = module.params['pn_jumbo']
lacp_mode = module.params['pn_lacp_mode']
lacp_priority = module.params['pn_lacp_priority']
lacp_timeout = module.params['pn_lacp_timeout']
lacp_fallback = module.params['pn_lacp_fallback']
lacp_fallback_timeout = module.params['pn_lacp_fallback_timeout']
edge_switch = module.params['pn_edge_switch']
pause = module.params['pn_pause']
description = module.params['pn_description']
loopback = module.params['pn_loopback']
mirror_receive = module.params['pn_mirror_receive']
unknown_ucast_level = module.params['pn_unknown_ucast_level']
unknown_mcast_level = module.params['pn_unknown_mcast_level']
broadcast_level = module.params['pn_broadcast_level']
port_macaddr = module.params['pn_port_macaddr']
loopvlans = module.params['pn_loopvlans']
routing = module.params['pn_routing']
host = module.params['pn_host']
command = get_command_from_state(state)
# Building the CLI command string
cli = pn_cli(module)
if command == 'trunk-delete':
check_cli(module, cli)
if TRUNK_EXISTS is False:
module.exit_json(
skipped=True,
msg='Trunk with name %s does not exist' % name
)
cli += ' %s name %s ' % (command, name)
else:
if command == 'trunk-create':
check_cli(module, cli)
if TRUNK_EXISTS is True:
module.exit_json(
skipped=True,
msg='Trunk with name %s already exists' % name
)
cli += ' %s name %s ' % (command, name)
# Appending options
if ports:
cli += ' ports ' + ports
if speed:
cli += ' speed ' + speed
if egress_rate_limit:
cli += ' egress-rate-limit ' + egress_rate_limit
if jumbo is True:
cli += ' jumbo '
if jumbo is False:
cli += ' no-jumbo '
if lacp_mode:
cli += ' lacp-mode ' + lacp_mode
if lacp_priority:
cli += ' lacp-priority ' + lacp_priority
if lacp_timeout:
cli += ' lacp-timeout ' + lacp_timeout
if lacp_fallback:
cli += ' lacp-fallback ' + lacp_fallback
if lacp_fallback_timeout:
cli += ' lacp-fallback-timeout ' + lacp_fallback_timeout
if edge_switch is True:
cli += ' edge-switch '
if edge_switch is False:
cli += ' no-edge-switch '
if pause is True:
cli += ' pause '
if pause is False:
cli += ' no-pause '
if description:
cli += ' description ' + description
if loopback is True:
cli += ' loopback '
if loopback is False:
cli += ' no-loopback '
if mirror_receive is True:
cli += ' mirror-receive-only '
if mirror_receive is False:
cli += ' no-mirror-receive-only '
if unknown_ucast_level:
cli += ' unknown-ucast-level ' + unknown_ucast_level
if unknown_mcast_level:
cli += ' unknown-mcast-level ' + unknown_mcast_level
if broadcast_level:
cli += ' broadcast-level ' + broadcast_level
if port_macaddr:
cli += ' port-mac-address ' + port_macaddr
if loopvlans:
cli += ' loopvlans ' + loopvlans
if routing is True:
cli += ' routing '
if routing is False:
cli += ' no-routing '
if host is True:
cli += ' host-enable '
if host is False:
cli += ' host-disable '
run_cli(module, cli)
if __name__ == '__main__':
main()
| gpl-3.0 | -1,846,130,167,162,037,000 | 29.748387 | 81 | 0.602812 | false |
DoubleNegativeVisualEffects/gaffer | python/Gaffer/InfoPathFilter.py | 5 | 2873 | ##########################################################################
#
# Copyright (c) 2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from PathFilter import PathFilter
## A PathFilter which filters based on an item from Path.info() and an
# arbitrary match function.
class InfoPathFilter( PathFilter ) :
def __init__( self, infoKey, matcher, leafOnly=True, userData={} ) :
PathFilter.__init__( self, userData )
self.__infoKey = infoKey
self.__matcher = matcher
self.__leafOnly = leafOnly
## Matcher and infoKey are set with one call, as otherwise
# the intermediate state may yield a new matcher which doesn't work
# with the old key.
def setMatcher( self, infoKey, matcher ) :
self.__infoKey = infoKey
self.__matcher = matcher
self.changedSignal()( self )
def getMatcher( self ) :
return self.__infoKey, self.__matcher
def _filter( self, paths ) :
if self.__matcher is None :
return paths
result = []
for p in paths :
if self.__leafOnly and not p.isLeaf() :
result.append( p )
else :
i = p.info()
if self.__infoKey in i :
if self.__matcher( i[self.__infoKey] ) :
result.append( p )
return result
| bsd-3-clause | -9,201,933,831,618,949,000 | 35.367089 | 77 | 0.666551 | false |
kefo/moto | moto/dynamodb2/comparisons.py | 4 | 1528 | from __future__ import unicode_literals
# TODO add tests for all of these
EQ_FUNCTION = lambda item_value, test_value: item_value == test_value # flake8: noqa
NE_FUNCTION = lambda item_value, test_value: item_value != test_value # flake8: noqa
LE_FUNCTION = lambda item_value, test_value: item_value <= test_value # flake8: noqa
LT_FUNCTION = lambda item_value, test_value: item_value < test_value # flake8: noqa
GE_FUNCTION = lambda item_value, test_value: item_value >= test_value # flake8: noqa
GT_FUNCTION = lambda item_value, test_value: item_value > test_value # flake8: noqa
COMPARISON_FUNCS = {
'EQ': EQ_FUNCTION,
'=': EQ_FUNCTION,
'NE': NE_FUNCTION,
'!=': NE_FUNCTION,
'LE': LE_FUNCTION,
'<=': LE_FUNCTION,
'LT': LT_FUNCTION,
'<': LT_FUNCTION,
'GE': GE_FUNCTION,
'>=': GE_FUNCTION,
'GT': GT_FUNCTION,
'>': GT_FUNCTION,
'NULL': lambda item_value: item_value is None,
'NOT_NULL': lambda item_value: item_value is not None,
'CONTAINS': lambda item_value, test_value: test_value in item_value,
'NOT_CONTAINS': lambda item_value, test_value: test_value not in item_value,
'BEGINS_WITH': lambda item_value, test_value: item_value.startswith(test_value),
'IN': lambda item_value, *test_values: item_value in test_values,
'BETWEEN': lambda item_value, lower_test_value, upper_test_value: lower_test_value <= item_value <= upper_test_value,
}
def get_comparison_func(range_comparison):
return COMPARISON_FUNCS.get(range_comparison)
| apache-2.0 | -8,555,215,095,376,118,000 | 36.268293 | 121 | 0.674084 | false |
zstackorg/zstack-woodpecker | integrationtest/vm/multihosts/bs/test_iso_vm_del_ops_all_expg_hot_migrate.py | 1 | 4687 | '''
Test for deleting vm iso check vm all operations and expunge vm check change os.
@author: SyZhao
'''
import os
import apibinding.inventory as inventory
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.zstack_test.zstack_test_image as test_image
import zstackwoodpecker.zstack_test.zstack_test_image as zstack_image_header
import zstackwoodpecker.operations.volume_operations as vol_ops
import zstackwoodpecker.operations.vm_operations as vm_ops
import zstackwoodpecker.operations.image_operations as img_ops
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
image = None
def test():
global image
global test_obj_dict
allow_bs_list = [inventory.IMAGE_STORE_BACKUP_STORAGE_TYPE, inventory.CEPH_BACKUP_STORAGE_TYPE, inventory.SFTP_BACKUP_STORAGE_TYPE]
test_lib.skip_test_when_bs_type_not_in_list(allow_bs_list)
allow_ps_list = [inventory.CEPH_PRIMARY_STORAGE_TYPE, inventory.NFS_PRIMARY_STORAGE_TYPE, 'SharedMountPoint']
test_lib.skip_test_when_ps_type_not_in_list(allow_ps_list)
#run condition
hosts = res_ops.query_resource(res_ops.HOST)
if len(hosts) <= 1:
test_util.test_skip("skip for host_num is not satisfy condition host_num>1")
bs_cond = res_ops.gen_query_conditions("status", '=', "Connected")
bss = res_ops.query_resource_fields(res_ops.BACKUP_STORAGE, bs_cond, None, fields=['uuid'])
#create disk offering
data_volume_size = 10737418240
disk_offering_option = test_util.DiskOfferingOption()
disk_offering_option.set_name('root-disk-iso')
disk_offering_option.set_diskSize(data_volume_size)
data_volume_offering = vol_ops.create_volume_offering(disk_offering_option)
test_obj_dict.add_disk_offering(data_volume_offering)
#create instance offering
cpuNum = 2
memorySize = 1024*1024*1024
name = 'iso-vm-offering'
new_offering_option = test_util.InstanceOfferingOption()
new_offering_option.set_cpuNum(cpuNum)
new_offering_option.set_memorySize(memorySize)
new_offering_option.set_name(name)
new_offering = vm_ops.create_instance_offering(new_offering_option)
test_obj_dict.add_instance_offering(new_offering)
#add iso
img_option = test_util.ImageOption()
img_option.set_name('iso1')
bs_uuid = res_ops.query_resource_fields(res_ops.BACKUP_STORAGE, [], None)[0].uuid
img_option.set_backup_storage_uuid_list([bs_uuid])
img_option.set_url(os.environ.get('isoForVmUrl'))
image_inv = img_ops.add_iso_template(img_option)
image_uuid = image_inv.uuid
image = test_image.ZstackTestImage()
image.set_image(image_inv)
image.set_creation_option(img_option)
test_obj_dict.add_image(image)
#create vm by iso
l3_name = os.environ.get('l3VlanNetworkName1')
l3_net_uuid = test_lib.lib_get_l3_by_name(l3_name).uuid
root_disk_uuid = data_volume_offering.uuid
vm = test_stub.create_vm_with_iso([l3_net_uuid], image_uuid, 'iso-vm', root_disk_uuid, new_offering.uuid)
host_ip = test_lib.lib_find_host_by_vm(vm.get_vm()).managementIp
test_obj_dict.add_vm(vm)
#check vm
vm_inv = vm.get_vm()
vm_ip = vm_inv.vmNics[0].ip
#cmd ='[ -e /root ]'
#ssh_timeout = test_lib.SSH_TIMEOUT
#test_lib.SSH_TIMEOUT = 3600
test_lib.lib_set_vm_host_l2_ip(vm_inv)
test_lib.lib_wait_target_up(vm.get_vm().vmNics[0].ip, 22, 1800)
#if not test_lib.lib_ssh_vm_cmd_by_agent_with_retry(host_ip, vm_ip, 'root', 'password', cmd):
# test_lib.SSH_TIMEOUT = ssh_timeout
# test_util.test_fail("iso has been failed to installed.")
#test_lib.SSH_TIMEOUT = ssh_timeout
#delete iso
image.delete()
test_obj_dict.rm_image(image)
#vm ops test
test_stub.vm_ops_test(vm, "VM_TEST_ALL")
#expunge iso
image.expunge()
#detach iso
img_ops.detach_iso(vm.vm.uuid)
#vm ops test
test_stub.vm_ops_test(vm, "VM_TEST_MIGRATE")
vm.destroy()
vol_ops.delete_disk_offering(root_disk_uuid)
vm_ops.delete_instance_offering(new_offering.uuid)
test_obj_dict.rm_vm(vm)
test_obj_dict.rm_disk_offering(data_volume_offering)
test_obj_dict.rm_instance_offering(new_offering)
test_lib.lib_robot_cleanup(test_obj_dict)
test_util.test_pass('Create VM Image in Image Store Success')
#Will be called only if exception happens in test().
def error_cleanup():
global image
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
try:
image.delete()
except:
pass
| apache-2.0 | -4,563,834,138,471,196,000 | 35.333333 | 135 | 0.706635 | false |
kakunbsc/enigma2.1 | lib/python/Screens/VirtualKeyBoard.py | 2 | 10905 | # -*- coding: iso-8859-1 -*-
from Components.Language import language
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.MenuList import MenuList
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
from enigma import eListboxPythonMultiContent, gFont, RT_HALIGN_CENTER, RT_VALIGN_CENTER
from Screen import Screen
from Tools.Directories import resolveFilename, SCOPE_CURRENT_SKIN
from Tools.LoadPixmap import LoadPixmap
class VirtualKeyBoardList(MenuList):
def __init__(self, list, enableWrapAround=False):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.l.setFont(0, gFont("Regular", 28))
self.l.setItemHeight(45)
def VirtualKeyBoardEntryComponent(keys, selectedKey,shiftMode=False):
key_backspace = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/vkey_backspace.png"))
key_bg = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/vkey_bg.png"))
key_clr = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/vkey_clr.png"))
key_esc = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/vkey_esc.png"))
key_ok = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/vkey_ok.png"))
key_sel = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/vkey_sel.png"))
key_shift = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/vkey_shift.png"))
key_shift_sel = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/vkey_shift_sel.png"))
key_space = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/vkey_space.png"))
res = [ (keys) ]
x = 0
count = 0
if shiftMode:
shiftkey_png = key_shift_sel
else:
shiftkey_png = key_shift
for key in keys:
width = None
if key == "EXIT":
width = key_esc.size().width()
res.append(MultiContentEntryPixmapAlphaTest(pos=(x, 0), size=(width, 45), png=key_esc))
elif key == "BACKSPACE":
width = key_backspace.size().width()
res.append(MultiContentEntryPixmapAlphaTest(pos=(x, 0), size=(width, 45), png=key_backspace))
elif key == "CLEAR":
width = key_clr.size().width()
res.append(MultiContentEntryPixmapAlphaTest(pos=(x, 0), size=(width, 45), png=key_clr))
elif key == "SHIFT":
width = shiftkey_png.size().width()
res.append(MultiContentEntryPixmapAlphaTest(pos=(x, 0), size=(width, 45), png=shiftkey_png))
elif key == "SPACE":
width = key_space.size().width()
res.append(MultiContentEntryPixmapAlphaTest(pos=(x, 0), size=(width, 45), png=key_space))
elif key == "OK":
width = key_ok.size().width()
res.append(MultiContentEntryPixmapAlphaTest(pos=(x, 0), size=(width, 45), png=key_ok))
#elif key == "<-":
# res.append(MultiContentEntryPixmapAlphaTest(pos=(x, 0), size=(45, 45), png=key_left))
#elif key == "->":
# res.append(MultiContentEntryPixmapAlphaTest(pos=(x, 0), size=(45, 45), png=key_right))
else:
width = key_bg.size().width()
res.extend((
MultiContentEntryPixmapAlphaTest(pos=(x, 0), size=(width, 45), png=key_bg),
MultiContentEntryText(pos=(x, 0), size=(width, 45), font=0, text=key.encode("utf-8"), flags=RT_HALIGN_CENTER | RT_VALIGN_CENTER)
))
if selectedKey == count:
width = key_sel.size().width()
res.append(MultiContentEntryPixmapAlphaTest(pos=(x, 0), size=(width, 45), png=key_sel))
if width is not None:
x += width
else:
x += 45
count += 1
return res
class VirtualKeyBoard(Screen):
def __init__(self, session, title="", text=""):
Screen.__init__(self, session)
self.keys_list = []
self.shiftkeys_list = []
self.lang = language.getLanguage()
if self.lang == 'de_DE':
self.keys_list = [
[u"EXIT", u"1", u"2", u"3", u"4", u"5", u"6", u"7", u"8", u"9", u"0", u"BACKSPACE"],
[u"q", u"w", u"e", u"r", u"t", u"z", u"u", u"i", u"o", u"p", u"ü", u"+"],
[u"a", u"s", u"d", u"f", u"g", u"h", u"j", u"k", u"l", u"ö", u"ä", u"#"],
[u"<", u"y", u"x", u"c", u"v", u"b", u"n", u"m", u",", ".", u"-", u"CLEAR"],
[u"SHIFT", u"SPACE", u"@", u"ß", u"OK"]]
self.shiftkeys_list = [
[u"EXIT", u"!", u'"', u"§", u"$", u"%", u"&", u"/", u"(", u")", u"=", u"BACKSPACE"],
[u"Q", u"W", u"E", u"R", u"T", u"Z", u"U", u"I", u"O", u"P", u"Ü", u"*"],
[u"A", u"S", u"D", u"F", u"G", u"H", u"J", u"K", u"L", u"Ö", u"Ä", u"'"],
[u">", u"Y", u"X", u"C", u"V", u"B", u"N", u"M", u";", u":", u"_", u"CLEAR"],
[u"SHIFT", u"SPACE", u"?", u"\\", u"OK"]]
elif self.lang == 'es_ES':
#still missing keys (u"ùÙ")
self.keys_list = [
[u"EXIT", u"1", u"2", u"3", u"4", u"5", u"6", u"7", u"8", u"9", u"0", u"BACKSPACE"],
[u"q", u"w", u"e", u"r", u"t", u"z", u"u", u"i", u"o", u"p", u"ú", u"+"],
[u"a", u"s", u"d", u"f", u"g", u"h", u"j", u"k", u"l", u"ó", u"á", u"#"],
[u"<", u"y", u"x", u"c", u"v", u"b", u"n", u"m", u",", ".", u"-", u"CLEAR"],
[u"SHIFT", u"SPACE", u"@", u"£", u"à", u"é", u"è", u"í", u"ì", u"ñ", u"ò", u"OK"]]
self.shiftkeys_list = [
[u"EXIT", u"!", u'"', u"§", u"$", u"%", u"&", u"/", u"(", u")", u"=", u"BACKSPACE"],
[u"Q", u"W", u"E", u"R", u"T", u"Z", u"U", u"I", u"O", u"P", u"Ú", u"*"],
[u"A", u"S", u"D", u"F", u"G", u"H", u"J", u"K", u"L", u"Ó", u"Á", u"'"],
[u">", u"Y", u"X", u"C", u"V", u"B", u"N", u"M", u";", u":", u"_", u"CLEAR"],
[u"SHIFT", u"SPACE", u"?", u"\\", u"À", u"É", u"È", u"Í", u"Ì", u"Ñ", u"Ò", u"OK"]]
elif self.lang in ('sv_SE', 'fi_FI'):
self.keys_list = [
[u"EXIT", u"1", u"2", u"3", u"4", u"5", u"6", u"7", u"8", u"9", u"0", u"BACKSPACE"],
[u"q", u"w", u"e", u"r", u"t", u"z", u"u", u"i", u"o", u"p", u"é", u"+"],
[u"a", u"s", u"d", u"f", u"g", u"h", u"j", u"k", u"l", u"ö", u"ä", u"#"],
[u"<", u"y", u"x", u"c", u"v", u"b", u"n", u"m", u",", ".", u"-", u"CLEAR"],
[u"SHIFT", u"SPACE", u"@", u"ß", u"å", u"OK"]]
self.shiftkeys_list = [
[u"EXIT", u"!", u'"', u"§", u"$", u"%", u"&", u"/", u"(", u")", u"=", u"BACKSPACE"],
[u"Q", u"W", u"E", u"R", u"T", u"Z", u"U", u"I", u"O", u"P", u"É", u"*"],
[u"A", u"S", u"D", u"F", u"G", u"H", u"J", u"K", u"L", u"Ö", u"Ä", u"'"],
[u">", u"Y", u"X", u"C", u"V", u"B", u"N", u"M", u";", u":", u"_", u"CLEAR"],
[u"SHIFT", u"SPACE", u"?", u"\\", u"Å", u"OK"]]
else:
self.keys_list = [
[u"EXIT", u"1", u"2", u"3", u"4", u"5", u"6", u"7", u"8", u"9", u"0", u"BACKSPACE"],
[u"q", u"w", u"e", u"r", u"t", u"z", u"u", u"i", u"o", u"p", u"+", u"@"],
[u"a", u"s", u"d", u"f", u"g", u"h", u"j", u"k", u"l", u"#", u"\\"],
[u"<", u"y", u"x", u"c", u"v", u"b", u"n", u"m", u",", ".", u"-", u"CLEAR"],
[u"SHIFT", u"SPACE", u"OK"]]
self.shiftkeys_list = [
[u"EXIT", u"!", u'"', u"§", u"$", u"%", u"&", u"/", u"(", u")", u"=", u"BACKSPACE"],
[u"Q", u"W", u"E", u"R", u"T", u"Z", u"U", u"I", u"O", u"P", u"*"],
[u"A", u"S", u"D", u"F", u"G", u"H", u"J", u"K", u"L", u"'", u"?"],
[u">", u"Y", u"X", u"C", u"V", u"B", u"N", u"M", u";", u":", u"_", u"CLEAR"],
[u"SHIFT", u"SPACE", u"OK"]]
self.shiftMode = False
self.text = text
self.selectedKey = 0
self["header"] = Label(title)
self["text"] = Label(self.text)
self["list"] = VirtualKeyBoardList([])
self["actions"] = ActionMap(["OkCancelActions", "WizardActions", "ColorActions"],
{
"ok": self.okClicked,
"cancel": self.exit,
"left": self.left,
"right": self.right,
"up": self.up,
"down": self.down,
"red": self.backClicked,
"green": self.ok
}, -2)
self.onLayoutFinish.append(self.buildVirtualKeyBoard)
self.max_key=47+len(self.keys_list[4])
def buildVirtualKeyBoard(self, selectedKey=0):
list = []
if self.shiftMode:
self.k_list = self.shiftkeys_list
for keys in self.k_list:
if selectedKey < 12 and selectedKey > -1:
list.append(VirtualKeyBoardEntryComponent(keys, selectedKey,True))
else:
list.append(VirtualKeyBoardEntryComponent(keys, -1,True))
selectedKey -= 12
else:
self.k_list = self.keys_list
for keys in self.k_list:
if selectedKey < 12 and selectedKey > -1:
list.append(VirtualKeyBoardEntryComponent(keys, selectedKey))
else:
list.append(VirtualKeyBoardEntryComponent(keys, -1))
selectedKey -= 12
self["list"].setList(list)
def backClicked(self):
self.text = self["text"].getText()[:-1]
self["text"].setText(self.text)
def okClicked(self):
if self.shiftMode:
list = self.shiftkeys_list
else:
list = self.keys_list
selectedKey = self.selectedKey
text = None
for x in list:
if selectedKey < 12:
if selectedKey < len(x):
text = x[selectedKey]
break
else:
selectedKey -= 12
if text is None:
return
text = text.encode("utf-8")
if text == "EXIT":
self.close(None)
elif text == "BACKSPACE":
self.text = self["text"].getText()[:-1]
self["text"].setText(self.text)
elif text == "CLEAR":
self.text = ""
self["text"].setText(self.text)
elif text == "SHIFT":
if self.shiftMode:
self.shiftMode = False
else:
self.shiftMode = True
self.buildVirtualKeyBoard(self.selectedKey)
elif text == "SPACE":
self.text += " "
self["text"].setText(self.text)
elif text == "OK":
self.close(self["text"].getText())
else:
self.text = self["text"].getText()
self.text += text
self["text"].setText(self.text)
def ok(self):
self.close(self["text"].getText())
def exit(self):
self.close(None)
def left(self):
self.selectedKey -= 1
if self.selectedKey == -1:
self.selectedKey = 11
elif self.selectedKey == 11:
self.selectedKey = 23
elif self.selectedKey == 23:
self.selectedKey = 35
elif self.selectedKey == 35:
self.selectedKey = 47
elif self.selectedKey == 47:
self.selectedKey = self.max_key
self.showActiveKey()
def right(self):
self.selectedKey += 1
if self.selectedKey == 12:
self.selectedKey = 0
elif self.selectedKey == 24:
self.selectedKey = 12
elif self.selectedKey == 36:
self.selectedKey = 24
elif self.selectedKey == 48:
self.selectedKey = 36
elif self.selectedKey > self.max_key:
self.selectedKey = 48
self.showActiveKey()
def up(self):
self.selectedKey -= 12
if (self.selectedKey < 0) and (self.selectedKey > (self.max_key-60)):
self.selectedKey += 48
elif self.selectedKey < 0:
self.selectedKey += 60
self.showActiveKey()
def down(self):
self.selectedKey += 12
if (self.selectedKey > self.max_key) and (self.selectedKey > 59):
self.selectedKey -= 60
elif self.selectedKey > self.max_key:
self.selectedKey -= 48
self.showActiveKey()
def showActiveKey(self):
self.buildVirtualKeyBoard(self.selectedKey)
| gpl-2.0 | -6,924,088,789,754,726,000 | 33.729299 | 132 | 0.576341 | false |
rhjdjong/Slip | tests/integration/test_slip_file_unbuffered.py | 2 | 1139 | # Copyright (c) 2020. Ruud de Jong
# This file is part of the SlipLib project which is released under the MIT license.
# See https://github.com/rhjdjong/SlipLib for details.
# pylint: disable=relative-beyond-top-level
"""Test using SlipStream with an unbuffered file"""
from sliplib import encode, SlipStream
from .test_data import data, BaseFileTest
class TestUnbufferedFileAccess(BaseFileTest):
"""Test unbuffered SLIP file access."""
def test_reading_slip_file(self):
"""Test reading SLIP-encoded message"""
self.filepath.write_bytes(b''.join(encode(msg) for msg in data))
with self.filepath.open(mode='rb', buffering=0) as f:
slipstream = SlipStream(f)
for exp, act in zip(data, slipstream):
assert exp == act
def test_writing_slip_file(self):
"""Test writing SLIP-encoded messages"""
with self.filepath.open(mode='wb', buffering=0) as f:
slipstream = SlipStream(f)
for msg in data:
slipstream.send_msg(msg)
assert self.filepath.read_bytes() == b''.join(encode(msg) for msg in data)
| mit | 8,260,674,658,288,135,000 | 34.59375 | 84 | 0.65496 | false |
ff94315/hiwifi-openwrt-HC5661-HC5761 | staging_dir/target-mipsel_r2_uClibc-0.9.33.2/usr/lib/python2.7/abc.py | 488 | 7145 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) according to PEP 3119."""
import types
from _weakrefset import WeakSet
# Instance of old-style class
class _C: pass
_InstanceType = type(_C())
def abstractmethod(funcobj):
"""A decorator indicating abstract methods.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract methods are overridden.
The abstract methods can be called using any of the normal
'super' call mechanisms.
Usage:
class C:
__metaclass__ = ABCMeta
@abstractmethod
def my_abstract_method(self, ...):
...
"""
funcobj.__isabstractmethod__ = True
return funcobj
class abstractproperty(property):
"""A decorator indicating abstract properties.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract properties are overridden.
The abstract properties can be called using any of the normal
'super' call mechanisms.
Usage:
class C:
__metaclass__ = ABCMeta
@abstractproperty
def my_abstract_property(self):
...
This defines a read-only property; you can also define a read-write
abstract property using the 'long' form of property declaration:
class C:
__metaclass__ = ABCMeta
def getx(self): ...
def setx(self, value): ...
x = abstractproperty(getx, setx)
"""
__isabstractmethod__ = True
class ABCMeta(type):
"""Metaclass for defining Abstract Base Classes (ABCs).
Use this metaclass to create an ABC. An ABC can be subclassed
directly, and then acts as a mix-in class. You can also register
unrelated concrete classes (even built-in classes) and unrelated
ABCs as 'virtual subclasses' -- these and their descendants will
be considered subclasses of the registering ABC by the built-in
issubclass() function, but the registering ABC won't show up in
their MRO (Method Resolution Order) nor will method
implementations defined by the registering ABC be callable (not
even via super()).
"""
# A global counter that is incremented each time a class is
# registered as a virtual subclass of anything. It forces the
# negative cache to be cleared before its next use.
_abc_invalidation_counter = 0
def __new__(mcls, name, bases, namespace):
cls = super(ABCMeta, mcls).__new__(mcls, name, bases, namespace)
# Compute set of abstract method names
abstracts = set(name
for name, value in namespace.items()
if getattr(value, "__isabstractmethod__", False))
for base in bases:
for name in getattr(base, "__abstractmethods__", set()):
value = getattr(cls, name, None)
if getattr(value, "__isabstractmethod__", False):
abstracts.add(name)
cls.__abstractmethods__ = frozenset(abstracts)
# Set up inheritance registry
cls._abc_registry = WeakSet()
cls._abc_cache = WeakSet()
cls._abc_negative_cache = WeakSet()
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
return cls
def register(cls, subclass):
"""Register a virtual subclass of an ABC."""
if not isinstance(subclass, (type, types.ClassType)):
raise TypeError("Can only register classes")
if issubclass(subclass, cls):
return # Already a subclass
# Subtle: test for cycles *after* testing for "already a subclass";
# this means we allow X.register(X) and interpret it as a no-op.
if issubclass(cls, subclass):
# This would create a cycle, which is bad for the algorithm below
raise RuntimeError("Refusing to create an inheritance cycle")
cls._abc_registry.add(subclass)
ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache
def _dump_registry(cls, file=None):
"""Debug helper to print the ABC registry."""
print >> file, "Class: %s.%s" % (cls.__module__, cls.__name__)
print >> file, "Inv.counter: %s" % ABCMeta._abc_invalidation_counter
for name in sorted(cls.__dict__.keys()):
if name.startswith("_abc_"):
value = getattr(cls, name)
print >> file, "%s: %r" % (name, value)
def __instancecheck__(cls, instance):
"""Override for isinstance(instance, cls)."""
# Inline the cache checking when it's simple.
subclass = getattr(instance, '__class__', None)
if subclass is not None and subclass in cls._abc_cache:
return True
subtype = type(instance)
# Old-style instances
if subtype is _InstanceType:
subtype = subclass
if subtype is subclass or subclass is None:
if (cls._abc_negative_cache_version ==
ABCMeta._abc_invalidation_counter and
subtype in cls._abc_negative_cache):
return False
# Fall back to the subclass check.
return cls.__subclasscheck__(subtype)
return (cls.__subclasscheck__(subclass) or
cls.__subclasscheck__(subtype))
def __subclasscheck__(cls, subclass):
"""Override for issubclass(subclass, cls)."""
# Check cache
if subclass in cls._abc_cache:
return True
# Check negative cache; may have to invalidate
if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter:
# Invalidate the negative cache
cls._abc_negative_cache = WeakSet()
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
elif subclass in cls._abc_negative_cache:
return False
# Check the subclass hook
ok = cls.__subclasshook__(subclass)
if ok is not NotImplemented:
assert isinstance(ok, bool)
if ok:
cls._abc_cache.add(subclass)
else:
cls._abc_negative_cache.add(subclass)
return ok
# Check if it's a direct subclass
if cls in getattr(subclass, '__mro__', ()):
cls._abc_cache.add(subclass)
return True
# Check if it's a subclass of a registered class (recursive)
for rcls in cls._abc_registry:
if issubclass(subclass, rcls):
cls._abc_cache.add(subclass)
return True
# Check if it's a subclass of a subclass (recursive)
for scls in cls.__subclasses__():
if issubclass(subclass, scls):
cls._abc_cache.add(subclass)
return True
# No dice; update negative cache
cls._abc_negative_cache.add(subclass)
return False
| gpl-2.0 | -430,393,113,692,658,500 | 37.621622 | 79 | 0.611337 | false |
baylee/django | tests/staticfiles_tests/cases.py | 38 | 4503 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import codecs
import os
import shutil
import tempfile
from django.conf import settings
from django.core.management import call_command
from django.template import Context, Template
from django.test import SimpleTestCase, override_settings
from django.utils import six
from django.utils.encoding import force_text
from .settings import TEST_SETTINGS
class BaseStaticFilesMixin(object):
"""
Test case with a couple utility assertions.
"""
def assertFileContains(self, filepath, text):
self.assertIn(
text,
self._get_file(force_text(filepath)),
"'%s' not in '%s'" % (text, filepath),
)
def assertFileNotFound(self, filepath):
with self.assertRaises(IOError):
self._get_file(filepath)
def render_template(self, template, **kwargs):
if isinstance(template, six.string_types):
template = Template(template)
return template.render(Context(kwargs)).strip()
def static_template_snippet(self, path, asvar=False):
if asvar:
return "{%% load static from staticfiles %%}{%% static '%s' as var %%}{{ var }}" % path
return "{%% load static from staticfiles %%}{%% static '%s' %%}" % path
def assertStaticRenders(self, path, result, asvar=False, **kwargs):
template = self.static_template_snippet(path, asvar)
self.assertEqual(self.render_template(template, **kwargs), result)
def assertStaticRaises(self, exc, path, result, asvar=False, **kwargs):
with self.assertRaises(exc):
self.assertStaticRenders(path, result, **kwargs)
@override_settings(**TEST_SETTINGS)
class StaticFilesTestCase(BaseStaticFilesMixin, SimpleTestCase):
pass
@override_settings(**TEST_SETTINGS)
class CollectionTestCase(BaseStaticFilesMixin, SimpleTestCase):
"""
Tests shared by all file finding features (collectstatic,
findstatic, and static serve view).
This relies on the asserts defined in BaseStaticFilesTestCase, but
is separated because some test cases need those asserts without
all these tests.
"""
def setUp(self):
super(CollectionTestCase, self).setUp()
temp_dir = tempfile.mkdtemp()
# Override the STATIC_ROOT for all tests from setUp to tearDown
# rather than as a context manager
self.patched_settings = self.settings(STATIC_ROOT=temp_dir)
self.patched_settings.enable()
self.run_collectstatic()
# Same comment as in runtests.teardown.
self.addCleanup(shutil.rmtree, six.text_type(temp_dir))
def tearDown(self):
self.patched_settings.disable()
super(CollectionTestCase, self).tearDown()
def run_collectstatic(self, **kwargs):
call_command('collectstatic', interactive=False, verbosity=0,
ignore_patterns=['*.ignoreme'], **kwargs)
def _get_file(self, filepath):
assert filepath, 'filepath is empty.'
filepath = os.path.join(settings.STATIC_ROOT, filepath)
with codecs.open(filepath, "r", "utf-8") as f:
return f.read()
class TestDefaults(object):
"""
A few standard test cases.
"""
def test_staticfiles_dirs(self):
"""
Can find a file in a STATICFILES_DIRS directory.
"""
self.assertFileContains('test.txt', 'Can we find')
self.assertFileContains(os.path.join('prefix', 'test.txt'), 'Prefix')
def test_staticfiles_dirs_subdir(self):
"""
Can find a file in a subdirectory of a STATICFILES_DIRS
directory.
"""
self.assertFileContains('subdir/test.txt', 'Can we find')
def test_staticfiles_dirs_priority(self):
"""
File in STATICFILES_DIRS has priority over file in app.
"""
self.assertFileContains('test/file.txt', 'STATICFILES_DIRS')
def test_app_files(self):
"""
Can find a file in an app static/ directory.
"""
self.assertFileContains('test/file1.txt', 'file1 in the app dir')
def test_nonascii_filenames(self):
"""
Can find a file with non-ASCII character in an app static/ directory.
"""
self.assertFileContains('test/⊗.txt', '⊗ in the app dir')
def test_camelcase_filenames(self):
"""
Can find a file with capital letters.
"""
self.assertFileContains('test/camelCase.txt', 'camelCase')
| bsd-3-clause | 1,951,230,834,800,181,000 | 32.325926 | 99 | 0.647477 | false |
calroc/Tkinter3D | neat_demo.py | 1 | 1755 | #!/usr/bin/env python
#
# Copyright 2011, 2012, 2013, 2014 Simon Forman
#
# This file is part of Tkinter3D.
#
# Tkinter3D is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tkinter3D is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tkinter3D. If not, see <http://www.gnu.org/licenses/>.
#
from tkinter import Tk
from canvas3d import Canvas3D, Frame3D, roty
from scene import Thing3D as dot
root = Tk()
root.title("Rotating Cube Demo")
c = Canvas3D(root)
c.pack(expand=1, fill='both')
# Zoom out a little.
c.frame.T.z += -300.0
# Create a dot at world origin (0, 0, 0).
origin = dot(c, width=4)
c.frame.things.append(origin)
# Make a frame for some objects to be in.
cube_frame = Frame3D()
c.frame.subframes.append(cube_frame)
# Add a Z component to the cube frame's translation to offset the cube
# from the world frame.
cube_frame.T.z += 300.0
# Make a cube.
F = -100.0, 100.0
for x, y, z in ((x, y, z) for x in F for y in F for z in F):
cube_frame.things.append(dot(c, x, y, z, width=6))
# Apply a rotation repeatedly to our cube.
rotation = roty(360/30/3) # 4 degrees.
def delta():
cube_frame.RM *= rotation
c.after(60, delta) # Retrigger every sixty milliseconds.
# Start everything running.
delta()
c.start_updating()
root.mainloop()
| gpl-3.0 | 5,097,531,412,627,403,000 | 25.590909 | 73 | 0.698006 | false |
nanchenchen/emoticon-analysis | emoticonvis/apps/enhance/management/commands/build_tweet_dictionary.py | 1 | 1427 | from django.core.management.base import BaseCommand, make_option, CommandError
from time import time
import path
from django.db import transaction
class Command(BaseCommand):
help = "From Tweet Parser results, extract words and connect with messages for a dataset."
args = '<dataset_id> <parsed_filename> [...]'
def handle(self, dataset_id, *filenames, **options):
if not dataset_id:
raise CommandError("Dataset id is required.")
try:
dataset_id = int(dataset_id)
except ValueError:
raise CommandError("Dataset id must be a number.")
if len(filenames) == 0:
raise CommandError('At least one filename must be provided.')
for f in filenames:
if not path.path(f).exists():
raise CommandError("Filename %s does not exist" % f)
from emoticonvis.apps.enhance.tasks import import_from_tweet_parser_results
start = time()
for i, parsed_tweet_filename in enumerate(filenames):
if len(filenames) > 1:
print "Reading file %d of %d %s" % (i + 1, len(filenames), parsed_tweet_filename)
else:
print "Reading file %s" % parsed_tweet_filename
with transaction.atomic(savepoint=False):
import_from_tweet_parser_results(dataset_id, parsed_tweet_filename)
print "Time: %.2fs" % (time() - start) | mit | -8,923,610,039,147,219,000 | 37.594595 | 97 | 0.622285 | false |
hamiltont/CouchPotatoServer | libs/oauthlib/oauth1/rfc5849/signature.py | 112 | 19020 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
"""
oauthlib.oauth1.rfc5849.signature
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module represents a direct implementation of `section 3.4`_ of the spec.
Terminology:
* Client: software interfacing with an OAuth API
* Server: the API provider
* Resource Owner: the user who is granting authorization to the client
Steps for signing a request:
1. Collect parameters from the uri query, auth header, & body
2. Normalize those parameters
3. Normalize the uri
4. Pass the normalized uri, normalized parameters, and http method to
construct the base string
5. Pass the base string and any keys needed to a signing function
.. _`section 3.4`: http://tools.ietf.org/html/rfc5849#section-3.4
"""
import binascii
import hashlib
import hmac
import urlparse
from . import utils
from oauthlib.common import extract_params
def construct_base_string(http_method, base_string_uri,
normalized_encoded_request_parameters):
"""**String Construction**
Per `section 3.4.1.1`_ of the spec.
For example, the HTTP request::
POST /request?b5=%3D%253D&a3=a&c%40=&a2=r%20b HTTP/1.1
Host: example.com
Content-Type: application/x-www-form-urlencoded
Authorization: OAuth realm="Example",
oauth_consumer_key="9djdj82h48djs9d2",
oauth_token="kkk9d7dh3k39sjv7",
oauth_signature_method="HMAC-SHA1",
oauth_timestamp="137131201",
oauth_nonce="7d8f3e4a",
oauth_signature="bYT5CMsGcbgUdFHObYMEfcx6bsw%3D"
c2&a3=2+q
is represented by the following signature base string (line breaks
are for display purposes only)::
POST&http%3A%2F%2Fexample.com%2Frequest&a2%3Dr%2520b%26a3%3D2%2520q
%26a3%3Da%26b5%3D%253D%25253D%26c%2540%3D%26c2%3D%26oauth_consumer_
key%3D9djdj82h48djs9d2%26oauth_nonce%3D7d8f3e4a%26oauth_signature_m
ethod%3DHMAC-SHA1%26oauth_timestamp%3D137131201%26oauth_token%3Dkkk
9d7dh3k39sjv7
.. _`section 3.4.1.1`: http://tools.ietf.org/html/rfc5849#section-3.4.1.1
"""
# The signature base string is constructed by concatenating together,
# in order, the following HTTP request elements:
# 1. The HTTP request method in uppercase. For example: "HEAD",
# "GET", "POST", etc. If the request uses a custom HTTP method, it
# MUST be encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
base_string = utils.escape(http_method.upper())
# 2. An "&" character (ASCII code 38).
base_string += u'&'
# 3. The base string URI from `Section 3.4.1.2`_, after being encoded
# (`Section 3.6`_).
#
# .. _`Section 3.4.1.2`: http://tools.ietf.org/html/rfc5849#section-3.4.1.2
# .. _`Section 3.4.6`: http://tools.ietf.org/html/rfc5849#section-3.4.6
base_string += utils.escape(base_string_uri)
# 4. An "&" character (ASCII code 38).
base_string += u'&'
# 5. The request parameters as normalized in `Section 3.4.1.3.2`_, after
# being encoded (`Section 3.6`).
#
# .. _`Section 3.4.1.3.2`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3.2
# .. _`Section 3.4.6`: http://tools.ietf.org/html/rfc5849#section-3.4.6
base_string += utils.escape(normalized_encoded_request_parameters)
return base_string
def normalize_base_string_uri(uri):
"""**Base String URI**
Per `section 3.4.1.2`_ of the spec.
For example, the HTTP request::
GET /r%20v/X?id=123 HTTP/1.1
Host: EXAMPLE.COM:80
is represented by the base string URI: "http://example.com/r%20v/X".
In another example, the HTTPS request::
GET /?q=1 HTTP/1.1
Host: www.example.net:8080
is represented by the base string URI: "https://www.example.net:8080/".
.. _`section 3.4.1.2`: http://tools.ietf.org/html/rfc5849#section-3.4.1.2
"""
if not isinstance(uri, unicode):
raise ValueError('uri must be a unicode object.')
# FIXME: urlparse does not support unicode
scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri)
# The scheme, authority, and path of the request resource URI `RFC3986`
# are included by constructing an "http" or "https" URI representing
# the request resource (without the query or fragment) as follows:
#
# .. _`RFC2616`: http://tools.ietf.org/html/rfc3986
# 1. The scheme and host MUST be in lowercase.
scheme = scheme.lower()
netloc = netloc.lower()
# 2. The host and port values MUST match the content of the HTTP
# request "Host" header field.
# TODO: enforce this constraint
# 3. The port MUST be included if it is not the default port for the
# scheme, and MUST be excluded if it is the default. Specifically,
# the port MUST be excluded when making an HTTP request `RFC2616`_
# to port 80 or when making an HTTPS request `RFC2818`_ to port 443.
# All other non-default port numbers MUST be included.
#
# .. _`RFC2616`: http://tools.ietf.org/html/rfc2616
# .. _`RFC2818`: http://tools.ietf.org/html/rfc2818
default_ports = (
(u'http', u'80'),
(u'https', u'443'),
)
if u':' in netloc:
host, port = netloc.split(u':', 1)
if (scheme, port) in default_ports:
netloc = host
return urlparse.urlunparse((scheme, netloc, path, u'', u'', u''))
# ** Request Parameters **
#
# Per `section 3.4.1.3`_ of the spec.
#
# In order to guarantee a consistent and reproducible representation of
# the request parameters, the parameters are collected and decoded to
# their original decoded form. They are then sorted and encoded in a
# particular manner that is often different from their original
# encoding scheme, and concatenated into a single string.
#
# .. _`section 3.4.1.3`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3
def collect_parameters(uri_query='', body=[], headers=None,
exclude_oauth_signature=True):
"""**Parameter Sources**
Parameters starting with `oauth_` will be unescaped.
Body parameters must be supplied as a dict, a list of 2-tuples, or a
formencoded query string.
Headers must be supplied as a dict.
Per `section 3.4.1.3.1`_ of the spec.
For example, the HTTP request::
POST /request?b5=%3D%253D&a3=a&c%40=&a2=r%20b HTTP/1.1
Host: example.com
Content-Type: application/x-www-form-urlencoded
Authorization: OAuth realm="Example",
oauth_consumer_key="9djdj82h48djs9d2",
oauth_token="kkk9d7dh3k39sjv7",
oauth_signature_method="HMAC-SHA1",
oauth_timestamp="137131201",
oauth_nonce="7d8f3e4a",
oauth_signature="djosJKDKJSD8743243%2Fjdk33klY%3D"
c2&a3=2+q
contains the following (fully decoded) parameters used in the
signature base sting::
+------------------------+------------------+
| Name | Value |
+------------------------+------------------+
| b5 | =%3D |
| a3 | a |
| c@ | |
| a2 | r b |
| oauth_consumer_key | 9djdj82h48djs9d2 |
| oauth_token | kkk9d7dh3k39sjv7 |
| oauth_signature_method | HMAC-SHA1 |
| oauth_timestamp | 137131201 |
| oauth_nonce | 7d8f3e4a |
| c2 | |
| a3 | 2 q |
+------------------------+------------------+
Note that the value of "b5" is "=%3D" and not "==". Both "c@" and
"c2" have empty values. While the encoding rules specified in this
specification for the purpose of constructing the signature base
string exclude the use of a "+" character (ASCII code 43) to
represent an encoded space character (ASCII code 32), this practice
is widely used in "application/x-www-form-urlencoded" encoded values,
and MUST be properly decoded, as demonstrated by one of the "a3"
parameter instances (the "a3" parameter is used twice in this
request).
.. _`section 3.4.1.3.1`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3.1
"""
headers = headers or {}
params = []
# The parameters from the following sources are collected into a single
# list of name/value pairs:
# * The query component of the HTTP request URI as defined by
# `RFC3986, Section 3.4`_. The query component is parsed into a list
# of name/value pairs by treating it as an
# "application/x-www-form-urlencoded" string, separating the names
# and values and decoding them as defined by
# `W3C.REC-html40-19980424`_, Section 17.13.4.
#
# .. _`RFC3986, Section 3.4`: http://tools.ietf.org/html/rfc3986#section-3.4
# .. _`W3C.REC-html40-19980424`: http://tools.ietf.org/html/rfc5849#ref-W3C.REC-html40-19980424
if uri_query:
params.extend(urlparse.parse_qsl(uri_query, keep_blank_values=True))
# * The OAuth HTTP "Authorization" header field (`Section 3.5.1`_) if
# present. The header's content is parsed into a list of name/value
# pairs excluding the "realm" parameter if present. The parameter
# values are decoded as defined by `Section 3.5.1`_.
#
# .. _`Section 3.5.1`: http://tools.ietf.org/html/rfc5849#section-3.5.1
if headers:
headers_lower = dict((k.lower(), v) for k, v in headers.items())
authorization_header = headers_lower.get(u'authorization')
if authorization_header is not None:
params.extend([i for i in utils.parse_authorization_header(
authorization_header) if i[0] != u'realm'])
# * The HTTP request entity-body, but only if all of the following
# conditions are met:
# * The entity-body is single-part.
#
# * The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# `W3C.REC-html40-19980424`_.
# * The HTTP request entity-header includes the "Content-Type"
# header field set to "application/x-www-form-urlencoded".
#
# .._`W3C.REC-html40-19980424`: http://tools.ietf.org/html/rfc5849#ref-W3C.REC-html40-19980424
# TODO: enforce header param inclusion conditions
bodyparams = extract_params(body) or []
params.extend(bodyparams)
# ensure all oauth params are unescaped
unescaped_params = []
for k, v in params:
if k.startswith(u'oauth_'):
v = utils.unescape(v)
unescaped_params.append((k, v))
# The "oauth_signature" parameter MUST be excluded from the signature
# base string if present.
if exclude_oauth_signature:
unescaped_params = filter(lambda i: i[0] != u'oauth_signature',
unescaped_params)
return unescaped_params
def normalize_parameters(params):
"""**Parameters Normalization**
Per `section 3.4.1.3.2`_ of the spec.
For example, the list of parameters from the previous section would
be normalized as follows:
Encoded::
+------------------------+------------------+
| Name | Value |
+------------------------+------------------+
| b5 | %3D%253D |
| a3 | a |
| c%40 | |
| a2 | r%20b |
| oauth_consumer_key | 9djdj82h48djs9d2 |
| oauth_token | kkk9d7dh3k39sjv7 |
| oauth_signature_method | HMAC-SHA1 |
| oauth_timestamp | 137131201 |
| oauth_nonce | 7d8f3e4a |
| c2 | |
| a3 | 2%20q |
+------------------------+------------------+
Sorted::
+------------------------+------------------+
| Name | Value |
+------------------------+------------------+
| a2 | r%20b |
| a3 | 2%20q |
| a3 | a |
| b5 | %3D%253D |
| c%40 | |
| c2 | |
| oauth_consumer_key | 9djdj82h48djs9d2 |
| oauth_nonce | 7d8f3e4a |
| oauth_signature_method | HMAC-SHA1 |
| oauth_timestamp | 137131201 |
| oauth_token | kkk9d7dh3k39sjv7 |
+------------------------+------------------+
Concatenated Pairs::
+-------------------------------------+
| Name=Value |
+-------------------------------------+
| a2=r%20b |
| a3=2%20q |
| a3=a |
| b5=%3D%253D |
| c%40= |
| c2= |
| oauth_consumer_key=9djdj82h48djs9d2 |
| oauth_nonce=7d8f3e4a |
| oauth_signature_method=HMAC-SHA1 |
| oauth_timestamp=137131201 |
| oauth_token=kkk9d7dh3k39sjv7 |
+-------------------------------------+
and concatenated together into a single string (line breaks are for
display purposes only)::
a2=r%20b&a3=2%20q&a3=a&b5=%3D%253D&c%40=&c2=&oauth_consumer_key=9dj
dj82h48djs9d2&oauth_nonce=7d8f3e4a&oauth_signature_method=HMAC-SHA1
&oauth_timestamp=137131201&oauth_token=kkk9d7dh3k39sjv7
.. _`section 3.4.1.3.2`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3.2
"""
# The parameters collected in `Section 3.4.1.3`_ are normalized into a
# single string as follows:
#
# .. _`Section 3.4.1.3`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3
# 1. First, the name and value of each parameter are encoded
# (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
key_values = [(utils.escape(k), utils.escape(v)) for k, v in params]
# 2. The parameters are sorted by name, using ascending byte value
# ordering. If two or more parameters share the same name, they
# are sorted by their value.
key_values.sort()
# 3. The name of each parameter is concatenated to its corresponding
# value using an "=" character (ASCII code 61) as a separator, even
# if the value is empty.
parameter_parts = [u'{0}={1}'.format(k, v) for k, v in key_values]
# 4. The sorted name/value pairs are concatenated together into a
# single string by using an "&" character (ASCII code 38) as
# separator.
return u'&'.join(parameter_parts)
def sign_hmac_sha1(base_string, client_secret, resource_owner_secret):
"""**HMAC-SHA1**
The "HMAC-SHA1" signature method uses the HMAC-SHA1 signature
algorithm as defined in `RFC2104`_::
digest = HMAC-SHA1 (key, text)
Per `section 3.4.2`_ of the spec.
.. _`RFC2104`: http://tools.ietf.org/html/rfc2104
.. _`section 3.4.2`: http://tools.ietf.org/html/rfc5849#section-3.4.2
"""
# The HMAC-SHA1 function variables are used in following way:
# text is set to the value of the signature base string from
# `Section 3.4.1.1`_.
#
# .. _`Section 3.4.1.1`: http://tools.ietf.org/html/rfc5849#section-3.4.1.1
text = base_string
# key is set to the concatenated values of:
# 1. The client shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
key = utils.escape(client_secret or u'')
# 2. An "&" character (ASCII code 38), which MUST be included
# even when either secret is empty.
key += u'&'
# 3. The token shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
key += utils.escape(resource_owner_secret or u'')
# FIXME: HMAC does not support unicode!
key_utf8 = key.encode('utf-8')
text_utf8 = text.encode('utf-8')
signature = hmac.new(key_utf8, text_utf8, hashlib.sha1)
# digest is used to set the value of the "oauth_signature" protocol
# parameter, after the result octet string is base64-encoded
# per `RFC2045, Section 6.8`.
#
# .. _`RFC2045, Section 6.8`: http://tools.ietf.org/html/rfc2045#section-6.8
return binascii.b2a_base64(signature.digest())[:-1].decode('utf-8')
def sign_rsa_sha1(base_string, rsa_private_key):
"""**RSA-SHA1**
Per `section 3.4.3`_ of the spec.
The "RSA-SHA1" signature method uses the RSASSA-PKCS1-v1_5 signature
algorithm as defined in `RFC3447, Section 8.2`_ (also known as
PKCS#1), using SHA-1 as the hash function for EMSA-PKCS1-v1_5. To
use this method, the client MUST have established client credentials
with the server that included its RSA public key (in a manner that is
beyond the scope of this specification).
NOTE: this method requires the python-rsa library.
.. _`section 3.4.3`: http://tools.ietf.org/html/rfc5849#section-3.4.3
.. _`RFC3447, Section 8.2`: http://tools.ietf.org/html/rfc3447#section-8.2
"""
# TODO: finish RSA documentation
import rsa
key = rsa.PrivateKey.load_pkcs1(rsa_private_key)
sig = rsa.sign(base_string, key, 'SHA-1')
return binascii.b2a_base64(sig)[:-1]
def sign_plaintext(client_secret, resource_owner_secret):
"""Sign a request using plaintext.
Per `section 3.4.4`_ of the spec.
The "PLAINTEXT" method does not employ a signature algorithm. It
MUST be used with a transport-layer mechanism such as TLS or SSL (or
sent over a secure channel with equivalent protections). It does not
utilize the signature base string or the "oauth_timestamp" and
"oauth_nonce" parameters.
.. _`section 3.4.4`: http://tools.ietf.org/html/rfc5849#section-3.4.4
"""
# The "oauth_signature" protocol parameter is set to the concatenated
# value of:
# 1. The client shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
signature = utils.escape(client_secret or u'')
# 2. An "&" character (ASCII code 38), which MUST be included even
# when either secret is empty.
signature += u'&'
# 3. The token shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
signature += utils.escape(resource_owner_secret or u'')
return signature
| gpl-3.0 | -4,068,094,883,390,585,000 | 36.964072 | 99 | 0.583649 | false |
indashnet/InDashNet.Open.UN2000 | android/external/antlr/antlr-3.4/runtime/Python/tests/t047treeparser.py | 20 | 4342 | import unittest
import textwrap
import antlr3
import antlr3.tree
import testbase
class T(testbase.ANTLRTest):
def walkerClass(self, base):
class TWalker(base):
def __init__(self, *args, **kwargs):
base.__init__(self, *args, **kwargs)
self.traces = []
def traceIn(self, ruleName, ruleIndex):
self.traces.append('>'+ruleName)
def traceOut(self, ruleName, ruleIndex):
self.traces.append('<'+ruleName)
def recover(self, input, re):
# no error recovery yet, just crash!
raise
return TWalker
def setUp(self):
self.compileGrammar()
self.compileGrammar('t047treeparserWalker.g', options='-trace')
def testWalker(self):
input = textwrap.dedent(
'''\
char c;
int x;
void bar(int x);
int foo(int y, char d) {
int i;
for (i=0; i<3; i=i+1) {
x=3;
y=5;
}
}
''')
cStream = antlr3.StringStream(input)
lexer = self.getLexer(cStream)
tStream = antlr3.CommonTokenStream(lexer)
parser = self.getParser(tStream)
r = parser.program()
self.failUnlessEqual(
r.tree.toStringTree(),
"(VAR_DEF char c) (VAR_DEF int x) (FUNC_DECL (FUNC_HDR void bar (ARG_DEF int x))) (FUNC_DEF (FUNC_HDR int foo (ARG_DEF int y) (ARG_DEF char d)) (BLOCK (VAR_DEF int i) (for (= i 0) (< i 3) (= i (+ i 1)) (BLOCK (= x 3) (= y 5)))))"
)
nodes = antlr3.tree.CommonTreeNodeStream(r.tree)
nodes.setTokenStream(tStream)
walker = self.getWalker(nodes)
walker.program()
# FIXME: need to crosscheck with Java target (compile walker with
# -trace option), if this is the real list. For now I'm happy that
# it does not crash ;)
self.failUnlessEqual(
walker.traces,
[ '>program', '>declaration', '>variable', '>type', '<type',
'>declarator', '<declarator', '<variable', '<declaration',
'>declaration', '>variable', '>type', '<type', '>declarator',
'<declarator', '<variable', '<declaration', '>declaration',
'>functionHeader', '>type', '<type', '>formalParameter',
'>type', '<type', '>declarator', '<declarator',
'<formalParameter', '<functionHeader', '<declaration',
'>declaration', '>functionHeader', '>type', '<type',
'>formalParameter', '>type', '<type', '>declarator',
'<declarator', '<formalParameter', '>formalParameter', '>type',
'<type', '>declarator', '<declarator', '<formalParameter',
'<functionHeader', '>block', '>variable', '>type', '<type',
'>declarator', '<declarator', '<variable', '>stat', '>forStat',
'>expr', '>expr', '>atom', '<atom', '<expr', '<expr', '>expr',
'>expr', '>atom', '<atom', '<expr', '>expr', '>atom', '<atom',
'<expr', '<expr', '>expr', '>expr', '>expr', '>atom', '<atom',
'<expr', '>expr', '>atom', '<atom', '<expr', '<expr', '<expr',
'>block', '>stat', '>expr', '>expr', '>atom', '<atom', '<expr',
'<expr', '<stat', '>stat', '>expr', '>expr', '>atom', '<atom',
'<expr', '<expr', '<stat', '<block', '<forStat', '<stat',
'<block', '<declaration', '<program'
]
)
def testRuleLabelPropertyRefText(self):
self.compileGrammar()
self.compileGrammar('t047treeparserWalker.g', options='-trace')
input = textwrap.dedent(
'''\
char c;
''')
cStream = antlr3.StringStream(input)
lexer = self.getLexer(cStream)
tStream = antlr3.CommonTokenStream(lexer)
parser = self.getParser(tStream)
r = parser.variable()
nodes = antlr3.tree.CommonTreeNodeStream(r.tree)
nodes.setTokenStream(tStream)
walker = self.getWalker(nodes)
r = walker.variable()
self.failUnlessEqual(r, 'c')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 1,523,813,629,345,287,200 | 34.590164 | 241 | 0.500921 | false |
hillwoodroc/deepin-music-player | src/common.py | 1 | 3015 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 ~ 2012 Deepin, Inc.
# 2011 ~ 2012 Hou Shaohui
#
# Author: Hou Shaohui <[email protected]>
# Maintainer: Hou Shaohui <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gio
from mutagen import File as MutagenFile
from mutagen.asf import ASF
from mutagen.apev2 import APEv2File
from mutagen.flac import FLAC
from mutagen.id3 import ID3FileType
from mutagen.oggflac import OggFLAC
from mutagen.oggspeex import OggSpeex
from mutagen.oggtheora import OggTheora
from mutagen.oggvorbis import OggVorbis
from mutagen.trueaudio import TrueAudio
from mutagen.wavpack import WavPack
try: from mutagen.mp4 import MP4 #@UnusedImport
except: from mutagen.m4a import M4A as MP4 #@Reimport
from mutagen.musepack import Musepack
from mutagen.monkeysaudio import MonkeysAudio
from mutagen.optimfrog import OptimFROG
from easymp3 import EasyMP3
FORMATS = [EasyMP3, TrueAudio, OggTheora, OggSpeex, OggVorbis, OggFLAC,
FLAC, APEv2File, MP4, ID3FileType, WavPack, Musepack,
MonkeysAudio, OptimFROG, ASF]
UNTRUST_AUDIO_EXT = [
"669", "ac3", "aif", "aiff", "ape", "amf", "au",
"dsm", "far", "it", "med", "mka", "mpc", "mid",
"mod", "mtm", "midi", "oga", "ogx", "okt", "ra",
"ram", "s3m", "sid", "shn", "snd", "spc", "spx",
"stm", "tta", "ult", "wv", "xm"
]
TRUST_AUDIO_EXT = [
"wav", "wma", "mp2", "mp3", "mp4", "m4a", "flac", "ogg"
]
def file_is_supported(filename, strict=False):
''' whther file is supported. '''
results = gio.File(filename).get_basename().split(".")
if len(results) < 2:
return False
else:
extension = results[-1].lower()
if extension in TRUST_AUDIO_EXT:
return True
elif extension in UNTRUST_AUDIO_EXT:
try:
fileobj = file(filename, "rb")
except:
return False
try:
header = fileobj.read(128)
results = [Kind.score(filename, fileobj, header) for Kind in FORMATS]
except:
return False
finally:
fileobj.close()
results = zip(results, FORMATS)
results.sort()
score, Kind = results[-1]
if score > 0: return True
else: return False
else:
return False
| gpl-3.0 | 2,009,000,018,069,555,200 | 32.876404 | 85 | 0.635489 | false |
Stavitsky/neutron | neutron/common/topics.py | 45 | 1863 | # Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
NETWORK = 'network'
SUBNET = 'subnet'
PORT = 'port'
SECURITY_GROUP = 'security_group'
L2POPULATION = 'l2population'
DVR = 'dvr'
CREATE = 'create'
DELETE = 'delete'
UPDATE = 'update'
AGENT = 'q-agent-notifier'
PLUGIN = 'q-plugin'
L3PLUGIN = 'q-l3-plugin'
DHCP = 'q-dhcp-notifer'
FIREWALL_PLUGIN = 'q-firewall-plugin'
METERING_PLUGIN = 'q-metering-plugin'
LOADBALANCER_PLUGIN = 'n-lbaas-plugin'
L3_AGENT = 'l3_agent'
DHCP_AGENT = 'dhcp_agent'
METERING_AGENT = 'metering_agent'
LOADBALANCER_AGENT = 'n-lbaas_agent'
def get_topic_name(prefix, table, operation, host=None):
"""Create a topic name.
The topic name needs to be synced between the agent and the
plugin. The plugin will send a fanout message to all of the
listening agents so that the agents in turn can perform their
updates accordingly.
:param prefix: Common prefix for the plugin/agent message queues.
:param table: The table in question (NETWORK, SUBNET, PORT).
:param operation: The operation that invokes notification (CREATE,
DELETE, UPDATE)
:param host: Add host to the topic
:returns: The topic name.
"""
if host:
return '%s-%s-%s.%s' % (prefix, table, operation, host)
return '%s-%s-%s' % (prefix, table, operation)
| apache-2.0 | -5,900,867,314,987,998,000 | 31.12069 | 70 | 0.706388 | false |
hellozt/gnome15 | src/gamewrap/gw/__init__.py | 8 | 4907 | # Gnome15 - Suite of tools for the Logitech G series keyboards and headsets
# Copyright (C) 2011 Brett Smith <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gobject
import subprocess
import signal
import sys
import dbus.service
import threading
import re
# Logging
import logging
logger = logging.getLogger(__name__)
NAME = "GameWrap"
VERSION = "0.1"
BUS_NAME = "org.gnome15.GameWrap"
OBJECT_PATH = "/org/gnome15/GameWrap"
IF_NAME = "org.gnome15.GameWrap"
class RunThread(threading.Thread):
def __init__(self, controller):
threading.Thread.__init__(self, name = "ExecCommand")
self.controller = controller
def run(self):
logger.info("Running '%s'", str(self.controller.args))
self.process = subprocess.Popen(self.controller.args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
logger.info("Process started OK")
while True:
line = self.process.stdout.readline(1024)
if line:
logger.info(">%s<", line)
for pattern_id in self.controller.patterns:
pattern = self.controller.patterns[pattern_id]
match = re.search(pattern, line)
if match:
logger.info("Match! %s", str(match))
gobject.idle_add(self.controller.PatternMatched(patter_id, line))
else:
break
logger.info("Waiting for process to complete")
self.controller.status = self.process.wait()
logger.info("Process complete with %s", self.controller.status)
self.controller.Stop()
class G15GameWrapperServiceController(dbus.service.Object):
def __init__(self, args, bus, no_trap=False):
bus_name = dbus.service.BusName(BUS_NAME, bus=bus, replace_existing=False, allow_replacement=False, do_not_queue=True)
dbus.service.Object.__init__(self, None, OBJECT_PATH, bus_name)
self._page_sequence_number = 1
self._bus = bus
self.args = args
self.status = 0
self.patterns = {}
logger.info("Exposing service for '%s'. Wait for signal to wait", str(args))
if not no_trap:
signal.signal(signal.SIGINT, self.sigint_handler)
signal.signal(signal.SIGTERM, self.sigterm_handler)
self._loop = gobject.MainLoop()
def start_loop(self):
logger.info("Starting GLib loop")
self._loop.run()
logger.debug("Exited GLib loop")
def sigint_handler(self, signum, frame):
logger.info("Got SIGINT signal, shutting down")
self.shutdown()
def sigterm_handler(self, signum, frame):
logger.info("Got SIGTERM signal, shutting down")
self.shutdown()
"""
DBUS API
"""
@dbus.service.method(IF_NAME)
def Start(self):
RunThread(self).start()
@dbus.service.method(IF_NAME)
def Stop(self):
gobject.idle_add(self._shutdown())
@dbus.service.method(IF_NAME, in_signature='ss')
def AddPattern(self, pattern_id, pattern):
logger.info("Adding pattern '%s' with id '%s'", pattern, pattern_id)
if pattern_id in self.patterns:
raise Exception("Pattern with ID %s already registered." % pattern_id)
self.patterns[pattern_id] = pattern
@dbus.service.method(IF_NAME, in_signature='s')
def RemovePattern(self, pattern_id):
logger.info("Removing pattern with id '%s'", pattern_id)
if not pattern_id in self.patterns:
raise Exception("Pattern with ID %s not registered." % pattern_id)
del self.patterns[id]
@dbus.service.method(IF_NAME, in_signature='', out_signature='ssssas')
def GetInformation(self):
return ("GameWrapper Service", "Gnome15 Project", VERSION, "1.0", self.args)
"""
Signals
"""
"""
DBUS Signals
"""
@dbus.service.signal(SCREEN_IF_NAME, signature='ss')
def PatternMatch(self, pattern_id, line):
pass
"""
Private
"""
def _shutdown(self):
logger.info("Shutting down")
self._loop.quit()
sys.exit(self.status) | gpl-3.0 | 7,160,617,263,435,965,000 | 33.321678 | 126 | 0.618504 | false |
axonchisel/ax_metrics | py/axonchisel/metrics/foundation/metricdef/mdefl.py | 1 | 9203 | """
Ax_Metrics - MDefL Metric Definition Language Parser
------------------------------------------------------------------------------
Author: Dan Kamins <dos at axonchisel dot net>
Copyright (c) 2014 Dan Kamins, AxonChisel.net
"""
# ----------------------------------------------------------------------------
import copy
import yaml
import collections
from axonchisel.metrics.foundation.ax.obj import AxObj
from axonchisel.metrics.foundation.ax.dictutil import dict_update_recursive
from .metricdef import MetricDef
from .filters import Filter
from .metset import MetSet
# ----------------------------------------------------------------------------
class MDefLParseError(Exception):
"""Error parsing MDefL"""
pass
# ----------------------------------------------------------------------------
class MetricDefParser(AxObj):
"""
Given raw MDefL YAML strings, parse into MetricDef objects.
Supports multiple parse passes on the same object, allowing defaults and
extensions of defaults.
Usage: Create, parse, parse, parse, ..., get, destroy (or reset).
Variables prefixed with "y" reference parsed YAML currently existing as
Python structures.
Variables prefixed with "ystr_" reference raw YAML strings.
Example partial YAML:
---
id: num_new_sales
type: NUMBER
func: COUNT
table: first_sales
time_field: timeCreated
"""
def __init__(self, base=None):
"""
Initialize parser and internal MetricDef.
If base MetricDef specified, copies as base to extend.
After parsing, either destroy or call reset to parse a new object.
"""
self.reset(base=base)
#
# Public Methods
#
def reset(self, base=None):
"""
Reset and prepare new MetricDef object for parsing.
If base MetricDef specified, copies as base to extend.
"""
if base is not None:
self._assert_type("base", base, MetricDef)
base = copy.deepcopy(base)
else:
base = MetricDef()
self._metricdef = base
def get_metricdef(self):
"""
Get the wrapped up MetricDef object after parsing.
"""
return self._metricdef
def parse_ystr_metric(self, ystr_metric):
"""
Given raw MDefL YAML str, parse into internal MetricDef.
Only set attributes that are specified, leaving others at default.
Can be called multiple times to build up the MetricDef.
Returns currently wrapped MetricDef.
Raises MDefLParseError on MDefL parse errors.
Raises yaml.YAMLError on underlying YAML parse errors.
"""
ymetric = yaml.load(ystr_metric)
return self.parse_ymetric(ymetric)
def parse_ymetric(self, ymetric):
"""
Given dict as parsed from YAML, parse into internal MetricDef.
Only set attributes that are specified, leaving others at default.
Can be called multiple times to build up the MetricDef.
Returns currently wrapped MetricDef.
Raises MDefLParseError on MDefL parse errors.
"""
self._parse_item(ymetric, 'id')
self._parse_item(ymetric, 'emfetch_id')
self._parse_item(ymetric, 'emfetch_opts', extend=True)
self._parse_item(ymetric, 'table')
self._parse_item(ymetric, 'func')
self._parse_item(ymetric, 'time_field')
self._parse_item(ymetric, 'time_type')
self._parse_item(ymetric, 'data_field')
self._parse_item(ymetric, 'data_type')
self._parse_filters(ymetric)
return self.get_metricdef()
#
# Internal Methods
#
def _parse_item(self, yobj, yname, attr=None, obj=None, extend=False):
"""
Set object attr to value of specific item in yobj,
but only if present.
If attr unspecified, uses same name as yname.
If obj unspecified, uses self._metricdef.
If extend==True, extends dict instead of replacing it.
"""
if obj is None:
obj = self._metricdef
if yname not in yobj:
return
if attr is None:
attr = yname
if extend:
d = getattr(obj, attr, {})
setattr(obj, attr, dict_update_recursive(d, yobj[yname]))
else:
setattr(obj, attr, yobj[yname])
def _parse_filters(self, ymetric):
"""
Helper - Parse filters in ymetric.
"""
yfilters = ymetric.get('filters')
if yfilters is None:
return
if not isinstance(yfilters, list):
raise MDefLParseError(
"MetricDef #{metricdef.id} filters list not a list: {t}"
.format(metricdef=self._metricdef, t=type(yfilters)))
for fnum in range(len(yfilters)):
if self._metricdef.filters.count_filters() <= fnum:
self._metricdef.filters.add_filter(Filter())
f = self._metricdef.filters.get_filters()[fnum]
self._parse_item(yfilters[fnum], 'field', obj=f)
self._parse_item(yfilters[fnum], 'op', obj=f)
self._parse_item(yfilters[fnum], 'value', obj=f)
def __unicode__(self):
return u"MetricDefParser({mdef})".format(mdef=self._metricdef)
# ----------------------------------------------------------------------------
class MetSetParser(AxObj):
"""
Given raw MDefL YAML strings, parse into MetSet object.
Handles table_defaults.
Usage: Create, parse, get, destroy (or reset).
Variables prefixed with "y" reference parsed YAML currently existing as
Python structures.
Variables prefixed with "ystr_" reference raw YAML strings.
Example partial YAML:
---
table_defaults:
- table: table1_name
emfetch_id: ...
time_field: ...
- table: table2_name
func: COUNT
metrics:
- id: metric1_id
table: table1_name
data_field: ...
data_type: ..
- id: metric2_id
table: ...
- id: metric3_id
table: ...
"""
def __init__(self):
"""
Initialize parser and internal MetSet.
After parsing, either destroy or call reset to parse a new object.
"""
self.reset()
#
# Public Methods
#
def reset(self):
"""
Reset and prepare new MetSet object for parsing.
"""
self._metset = MetSet()
self._table_defaults = dict()
def get_metset(self):
"""
Get the wrapped up MetSet object after parsing.
"""
return self._metset
def parse_ystr_metset(self, ystr_metset):
"""
Given raw YAML str, parse into internal MetSet.
Returns currently wrapped MetSet.
"""
ymetset = yaml.load(ystr_metset)
return self.parse_ymetset(ymetset)
def parse_ymetset(self, ymetset):
"""
Given dict as parsed from YAML, parse into internal MetSet.
Returns currently wrapped MetSet.
"""
self._parse_table_defaults(ymetset)
self._parse_metricdefs(ymetset)
return self.get_metset()
#
# Internal Methods
#
def _parse_table_defaults(self, ymetset):
"""
Parse helper - parse 'table_defaults' into self._table_defaults dict
keyed by table.
"""
ytabledefs = ymetset.get('table_defaults')
if ytabledefs is None:
return
if not isinstance(ytabledefs, list):
raise MDefLParseError("'table_defaults' list not a list: {t}"
.format(t=type(ytabledefs)))
for ytbldef in ytabledefs:
mdefparser = MetricDefParser()
mdef = mdefparser.parse_ymetric(ytbldef)
if not mdef.table:
raise MDefLParseError("Table default missing table: {t}"
.format(t=ytbldef))
self._table_defaults[mdef.table] = mdef
def _parse_metricdefs(self, ymetset):
"""
Parse helper - parse 'metrics' into self.metset._metrics dict
keyed by metric id.
"""
ymetrics = ymetset.get('metrics')
if ymetrics is None:
raise MDefLParseError("'metrics' list not found")
if not isinstance(ymetrics, list):
raise MDefLParseError("'metrics' list not a list: {t}"
.format(t=type(ymetrics)))
for ymetric in ymetrics:
base = None
table = ymetric.get('table')
if table:
tabledef = self._table_defaults.get(table)
if tabledef:
base = tabledef
mdefparser = MetricDefParser(base=base)
mdef1 = mdefparser.parse_ymetric(ymetric)
if not mdef1.id:
raise MDefLParseError("Metric missing id: {ym}"
.format(ym=ymetric))
self._metset.add_metric(mdef1)
def __unicode__(self):
return u"MetSetParser({set})".format(set=self._metset)
| mit | 4,013,234,294,652,391,000 | 28.031546 | 78 | 0.557644 | false |
jtyr/ansible-modules-extras | notification/sns.py | 31 | 6005 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Michael J. Schultz <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
module: sns
short_description: Send Amazon Simple Notification Service (SNS) messages
description:
- The M(sns) module sends notifications to a topic on your Amazon SNS account
version_added: 1.6
author: "Michael J. Schultz (@mjschultz)"
options:
msg:
description:
- Default message to send.
required: true
aliases: [ "default" ]
subject:
description:
- Subject line for email delivery.
required: false
topic:
description:
- The topic you want to publish to.
required: true
email:
description:
- Message to send to email-only subscription
required: false
sqs:
description:
- Message to send to SQS-only subscription
required: false
sms:
description:
- Message to send to SMS-only subscription
required: false
http:
description:
- Message to send to HTTP-only subscription
required: false
https:
description:
- Message to send to HTTPS-only subscription
required: false
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
default: None
aliases: ['ec2_secret_key', 'secret_key']
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: None
aliases: ['ec2_access_key', 'access_key']
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
requirements:
- "boto"
"""
EXAMPLES = """
- name: Send default notification message via SNS
local_action:
module: sns
msg: "{{ inventory_hostname }} has completed the play."
subject: "Deploy complete!"
topic: "deploy"
- name: Send notification messages via SNS with short message for SMS
local_action:
module: sns
msg: "{{ inventory_hostname }} has completed the play."
sms: "deployed!"
subject: "Deploy complete!"
topic: "deploy"
"""
try:
import json
except ImportError:
import simplejson as json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import ec2_argument_spec, connect_to_aws, get_aws_connection_info
from ansible.module_utils.pycompat24 import get_exception
try:
import boto
import boto.ec2
import boto.sns
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def arn_topic_lookup(connection, short_topic):
response = connection.get_all_topics()
result = response[u'ListTopicsResponse'][u'ListTopicsResult']
# topic names cannot have colons, so this captures the full topic name
lookup_topic = ':{}'.format(short_topic)
for topic in result[u'Topics']:
if topic[u'TopicArn'].endswith(lookup_topic):
return topic[u'TopicArn']
return None
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
msg=dict(type='str', required=True, aliases=['default']),
subject=dict(type='str', default=None),
topic=dict(type='str', required=True),
email=dict(type='str', default=None),
sqs=dict(type='str', default=None),
sms=dict(type='str', default=None),
http=dict(type='str', default=None),
https=dict(type='str', default=None),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
msg = module.params['msg']
subject = module.params['subject']
topic = module.params['topic']
email = module.params['email']
sqs = module.params['sqs']
sms = module.params['sms']
http = module.params['http']
https = module.params['https']
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="region must be specified")
try:
connection = connect_to_aws(boto.sns, region, **aws_connect_params)
except boto.exception.NoAuthHandlerFound:
e = get_exception()
module.fail_json(msg=str(e))
# .publish() takes full ARN topic id, but I'm lazy and type shortnames
# so do a lookup (topics cannot contain ':', so thats the decider)
if ':' in topic:
arn_topic = topic
else:
arn_topic = arn_topic_lookup(connection, topic)
if not arn_topic:
module.fail_json(msg='Could not find topic: {}'.format(topic))
dict_msg = {'default': msg}
if email:
dict_msg.update(email=email)
if sqs:
dict_msg.update(sqs=sqs)
if sms:
dict_msg.update(sms=sms)
if http:
dict_msg.update(http=http)
if https:
dict_msg.update(https=https)
json_msg = json.dumps(dict_msg)
try:
connection.publish(topic=arn_topic, subject=subject,
message_structure='json', message=json_msg)
except boto.exception.BotoServerError:
e = get_exception()
module.fail_json(msg=str(e))
module.exit_json(msg="OK")
if __name__ == '__main__':
main()
| gpl-3.0 | -6,877,596,757,618,771,000 | 28.875622 | 119 | 0.657286 | false |
tysonclugg/django | django/db/models/fields/files.py | 19 | 17906 | import datetime
import posixpath
from django import forms
from django.core import checks
from django.core.files.base import File
from django.core.files.images import ImageFile
from django.core.files.storage import default_storage
from django.db.models import signals
from django.db.models.fields import Field
from django.utils.translation import gettext_lazy as _
class FieldFile(File):
def __init__(self, instance, field, name):
super().__init__(None, name)
self.instance = instance
self.field = field
self.storage = field.storage
self._committed = True
def __eq__(self, other):
# Older code may be expecting FileField values to be simple strings.
# By overriding the == operator, it can remain backwards compatibility.
if hasattr(other, 'name'):
return self.name == other.name
return self.name == other
def __hash__(self):
return hash(self.name)
# The standard File contains most of the necessary properties, but
# FieldFiles can be instantiated without a name, so that needs to
# be checked for here.
def _require_file(self):
if not self:
raise ValueError("The '%s' attribute has no file associated with it." % self.field.name)
def _get_file(self):
self._require_file()
if not hasattr(self, '_file') or self._file is None:
self._file = self.storage.open(self.name, 'rb')
return self._file
def _set_file(self, file):
self._file = file
def _del_file(self):
del self._file
file = property(_get_file, _set_file, _del_file)
@property
def path(self):
self._require_file()
return self.storage.path(self.name)
@property
def url(self):
self._require_file()
return self.storage.url(self.name)
@property
def size(self):
self._require_file()
if not self._committed:
return self.file.size
return self.storage.size(self.name)
def open(self, mode='rb'):
self._require_file()
if hasattr(self, '_file') and self._file is not None:
self.file.open(mode)
else:
self.file = self.storage.open(self.name, mode)
return self
# open() doesn't alter the file's contents, but it does reset the pointer
open.alters_data = True
# In addition to the standard File API, FieldFiles have extra methods
# to further manipulate the underlying file, as well as update the
# associated model instance.
def save(self, name, content, save=True):
name = self.field.generate_filename(self.instance, name)
self.name = self.storage.save(name, content, max_length=self.field.max_length)
setattr(self.instance, self.field.name, self.name)
self._committed = True
# Save the object because it has changed, unless save is False
if save:
self.instance.save()
save.alters_data = True
def delete(self, save=True):
if not self:
return
# Only close the file if it's already open, which we know by the
# presence of self._file
if hasattr(self, '_file'):
self.close()
del self.file
self.storage.delete(self.name)
self.name = None
setattr(self.instance, self.field.name, self.name)
self._committed = False
if save:
self.instance.save()
delete.alters_data = True
@property
def closed(self):
file = getattr(self, '_file', None)
return file is None or file.closed
def close(self):
file = getattr(self, '_file', None)
if file is not None:
file.close()
def __getstate__(self):
# FieldFile needs access to its associated model field and an instance
# it's attached to in order to work properly, but the only necessary
# data to be pickled is the file's name itself. Everything else will
# be restored later, by FileDescriptor below.
return {'name': self.name, 'closed': False, '_committed': True, '_file': None}
class FileDescriptor:
"""
The descriptor for the file attribute on the model instance. Return a
FieldFile when accessed so you can write code like::
>>> from myapp.models import MyModel
>>> instance = MyModel.objects.get(pk=1)
>>> instance.file.size
Assign a file object on assignment so you can do::
>>> with open('/path/to/hello.world', 'r') as f:
... instance.file = File(f)
"""
def __init__(self, field):
self.field = field
def __get__(self, instance, cls=None):
if instance is None:
return self
# This is slightly complicated, so worth an explanation.
# instance.file`needs to ultimately return some instance of `File`,
# probably a subclass. Additionally, this returned object needs to have
# the FieldFile API so that users can easily do things like
# instance.file.path and have that delegated to the file storage engine.
# Easy enough if we're strict about assignment in __set__, but if you
# peek below you can see that we're not. So depending on the current
# value of the field we have to dynamically construct some sort of
# "thing" to return.
# The instance dict contains whatever was originally assigned
# in __set__.
if self.field.name in instance.__dict__:
file = instance.__dict__[self.field.name]
else:
instance.refresh_from_db(fields=[self.field.name])
file = getattr(instance, self.field.name)
# If this value is a string (instance.file = "path/to/file") or None
# then we simply wrap it with the appropriate attribute class according
# to the file field. [This is FieldFile for FileFields and
# ImageFieldFile for ImageFields; it's also conceivable that user
# subclasses might also want to subclass the attribute class]. This
# object understands how to convert a path to a file, and also how to
# handle None.
if isinstance(file, str) or file is None:
attr = self.field.attr_class(instance, self.field, file)
instance.__dict__[self.field.name] = attr
# Other types of files may be assigned as well, but they need to have
# the FieldFile interface added to them. Thus, we wrap any other type of
# File inside a FieldFile (well, the field's attr_class, which is
# usually FieldFile).
elif isinstance(file, File) and not isinstance(file, FieldFile):
file_copy = self.field.attr_class(instance, self.field, file.name)
file_copy.file = file
file_copy._committed = False
instance.__dict__[self.field.name] = file_copy
# Finally, because of the (some would say boneheaded) way pickle works,
# the underlying FieldFile might not actually itself have an associated
# file. So we need to reset the details of the FieldFile in those cases.
elif isinstance(file, FieldFile) and not hasattr(file, 'field'):
file.instance = instance
file.field = self.field
file.storage = self.field.storage
# Make sure that the instance is correct.
elif isinstance(file, FieldFile) and instance is not file.instance:
file.instance = instance
# That was fun, wasn't it?
return instance.__dict__[self.field.name]
def __set__(self, instance, value):
instance.__dict__[self.field.name] = value
class FileField(Field):
# The class to wrap instance attributes in. Accessing the file object off
# the instance will always return an instance of attr_class.
attr_class = FieldFile
# The descriptor to use for accessing the attribute off of the class.
descriptor_class = FileDescriptor
description = _("File")
def __init__(self, verbose_name=None, name=None, upload_to='', storage=None, **kwargs):
self._primary_key_set_explicitly = 'primary_key' in kwargs
self.storage = storage or default_storage
self.upload_to = upload_to
kwargs['max_length'] = kwargs.get('max_length', 100)
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_primary_key())
errors.extend(self._check_upload_to())
return errors
def _check_primary_key(self):
if self._primary_key_set_explicitly:
return [
checks.Error(
"'primary_key' is not a valid argument for a %s." % self.__class__.__name__,
obj=self,
id='fields.E201',
)
]
else:
return []
def _check_upload_to(self):
if isinstance(self.upload_to, str) and self.upload_to.startswith('/'):
return [
checks.Error(
"%s's 'upload_to' argument must be a relative path, not an "
"absolute path." % self.__class__.__name__,
obj=self,
id='fields.E202',
hint='Remove the leading slash.',
)
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 100:
del kwargs["max_length"]
kwargs['upload_to'] = self.upload_to
if self.storage is not default_storage:
kwargs['storage'] = self.storage
return name, path, args, kwargs
def get_internal_type(self):
return "FileField"
def get_prep_value(self, value):
value = super().get_prep_value(value)
# Need to convert File objects provided via a form to string for database insertion
if value is None:
return None
return str(value)
def pre_save(self, model_instance, add):
file = super().pre_save(model_instance, add)
if file and not file._committed:
# Commit the file to storage prior to saving the model
file.save(file.name, file.file, save=False)
return file
def contribute_to_class(self, cls, name, **kwargs):
super().contribute_to_class(cls, name, **kwargs)
setattr(cls, self.name, self.descriptor_class(self))
def generate_filename(self, instance, filename):
"""
Apply (if callable) or prepend (if a string) upload_to to the filename,
then delegate further processing of the name to the storage backend.
Until the storage layer, all file paths are expected to be Unix style
(with forward slashes).
"""
if callable(self.upload_to):
filename = self.upload_to(instance, filename)
else:
dirname = datetime.datetime.now().strftime(self.upload_to)
filename = posixpath.join(dirname, filename)
return self.storage.generate_filename(filename)
def save_form_data(self, instance, data):
# Important: None means "no change", other false value means "clear"
# This subtle distinction (rather than a more explicit marker) is
# needed because we need to consume values that are also sane for a
# regular (non Model-) Form to find in its cleaned_data dictionary.
if data is not None:
# This value will be converted to str and stored in the
# database, so leaving False as-is is not acceptable.
if not data:
data = ''
setattr(instance, self.name, data)
def formfield(self, **kwargs):
defaults = {'form_class': forms.FileField, 'max_length': self.max_length}
defaults.update(kwargs)
return super().formfield(**defaults)
class ImageFileDescriptor(FileDescriptor):
"""
Just like the FileDescriptor, but for ImageFields. The only difference is
assigning the width/height to the width_field/height_field, if appropriate.
"""
def __set__(self, instance, value):
previous_file = instance.__dict__.get(self.field.name)
super().__set__(instance, value)
# To prevent recalculating image dimensions when we are instantiating
# an object from the database (bug #11084), only update dimensions if
# the field had a value before this assignment. Since the default
# value for FileField subclasses is an instance of field.attr_class,
# previous_file will only be None when we are called from
# Model.__init__(). The ImageField.update_dimension_fields method
# hooked up to the post_init signal handles the Model.__init__() cases.
# Assignment happening outside of Model.__init__() will trigger the
# update right here.
if previous_file is not None:
self.field.update_dimension_fields(instance, force=True)
class ImageFieldFile(ImageFile, FieldFile):
def delete(self, save=True):
# Clear the image dimensions cache
if hasattr(self, '_dimensions_cache'):
del self._dimensions_cache
super().delete(save)
class ImageField(FileField):
attr_class = ImageFieldFile
descriptor_class = ImageFileDescriptor
description = _("Image")
def __init__(self, verbose_name=None, name=None, width_field=None, height_field=None, **kwargs):
self.width_field, self.height_field = width_field, height_field
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_image_library_installed())
return errors
def _check_image_library_installed(self):
try:
from PIL import Image # NOQA
except ImportError:
return [
checks.Error(
'Cannot use ImageField because Pillow is not installed.',
hint=('Get Pillow at https://pypi.python.org/pypi/Pillow '
'or run command "pip install Pillow".'),
obj=self,
id='fields.E210',
)
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.width_field:
kwargs['width_field'] = self.width_field
if self.height_field:
kwargs['height_field'] = self.height_field
return name, path, args, kwargs
def contribute_to_class(self, cls, name, **kwargs):
super().contribute_to_class(cls, name, **kwargs)
# Attach update_dimension_fields so that dimension fields declared
# after their corresponding image field don't stay cleared by
# Model.__init__, see bug #11196.
# Only run post-initialization dimension update on non-abstract models
if not cls._meta.abstract:
signals.post_init.connect(self.update_dimension_fields, sender=cls)
def update_dimension_fields(self, instance, force=False, *args, **kwargs):
"""
Update field's width and height fields, if defined.
This method is hooked up to model's post_init signal to update
dimensions after instantiating a model instance. However, dimensions
won't be updated if the dimensions fields are already populated. This
avoids unnecessary recalculation when loading an object from the
database.
Dimensions can be forced to update with force=True, which is how
ImageFileDescriptor.__set__ calls this method.
"""
# Nothing to update if the field doesn't have dimension fields or if
# the field is deferred.
has_dimension_fields = self.width_field or self.height_field
if not has_dimension_fields or self.attname not in instance.__dict__:
return
# getattr will call the ImageFileDescriptor's __get__ method, which
# coerces the assigned value into an instance of self.attr_class
# (ImageFieldFile in this case).
file = getattr(instance, self.attname)
# Nothing to update if we have no file and not being forced to update.
if not file and not force:
return
dimension_fields_filled = not(
(self.width_field and not getattr(instance, self.width_field)) or
(self.height_field and not getattr(instance, self.height_field))
)
# When both dimension fields have values, we are most likely loading
# data from the database or updating an image field that already had
# an image stored. In the first case, we don't want to update the
# dimension fields because we are already getting their values from the
# database. In the second case, we do want to update the dimensions
# fields and will skip this return because force will be True since we
# were called from ImageFileDescriptor.__set__.
if dimension_fields_filled and not force:
return
# file should be an instance of ImageFieldFile or should be None.
if file:
width = file.width
height = file.height
else:
# No file, so clear dimensions fields.
width = None
height = None
# Update the width and height fields.
if self.width_field:
setattr(instance, self.width_field, width)
if self.height_field:
setattr(instance, self.height_field, height)
def formfield(self, **kwargs):
defaults = {'form_class': forms.ImageField}
defaults.update(kwargs)
return super().formfield(**defaults)
| bsd-3-clause | -7,213,573,523,506,420,000 | 37.673866 | 100 | 0.617726 | false |
isrohutamahopetechnik/ardupilot | mk/PX4/Tools/genmsg/src/genmsg/msgs.py | 161 | 12254 | # Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
"""
ROS msg library for Python
Implements: U{http://ros.org/wiki/msg}
"""
import os
import sys
from . base import InvalidMsgSpec, EXT_MSG, MSG_DIR, SEP, log
from . names import is_legal_resource_name, is_legal_resource_base_name, package_resource_name, resource_name
#TODOXXX: unit test
def bare_msg_type(msg_type):
"""
Compute the bare data type, e.g. for arrays, get the underlying array item type
:param msg_type: ROS msg type (e.g. 'std_msgs/String'), ``str``
:returns: base type, ``str``
"""
if msg_type is None:
return None
if '[' in msg_type:
return msg_type[:msg_type.find('[')]
return msg_type
def resolve_type(msg_type, package_context):
"""
Resolve type name based on current package context.
NOTE: in ROS Diamondback, 'Header' resolves to
'std_msgs/Header'. In previous releases, it resolves to
'roslib/Header' (REP 100).
e.g.::
resolve_type('String', 'std_msgs') -> 'std_msgs/String'
resolve_type('String[]', 'std_msgs') -> 'std_msgs/String[]'
resolve_type('std_msgs/String', 'foo') -> 'std_msgs/String'
resolve_type('uint16', 'std_msgs') -> 'uint16'
resolve_type('uint16[]', 'std_msgs') -> 'uint16[]'
"""
bt = bare_msg_type(msg_type)
if bt in BUILTIN_TYPES:
return msg_type
elif bt == HEADER:
return HEADER_FULL_NAME
elif SEP in msg_type:
return msg_type
else:
return "%s%s%s"%(package_context, SEP, msg_type)
#NOTE: this assumes that we aren't going to support multi-dimensional
def parse_type(msg_type):
"""
Parse ROS message field type
:param msg_type: ROS field type, ``str``
:returns: base_type, is_array, array_length, ``(str, bool, int)``
:raises: :exc:`ValueError` If *msg_type* cannot be parsed
"""
if not msg_type:
raise ValueError("Invalid empty type")
if '[' in msg_type:
var_length = msg_type.endswith('[]')
splits = msg_type.split('[')
if len(splits) > 2:
raise ValueError("Currently only support 1-dimensional array types: %s"%msg_type)
if var_length:
return msg_type[:-2], True, None
else:
try:
length = int(splits[1][:-1])
return splits[0], True, length
except ValueError:
raise ValueError("Invalid array dimension: [%s]"%splits[1][:-1])
else:
return msg_type, False, None
################################################################################
# name validation
def is_valid_msg_type(x):
"""
:returns: True if the name is a syntatically legal message type name, ``bool``
"""
if not x or len(x) != len(x.strip()):
return False
base = bare_msg_type(x)
if not is_legal_resource_name(base):
return False
#parse array indicies
x = x[len(base):]
state = 0
i = 0
for c in x:
if state == 0:
if c != '[':
return False
state = 1 #open
elif state == 1:
if c == ']':
state = 0 #closed
else:
try:
int(c)
except:
return False
return state == 0
def is_valid_constant_type(x):
"""
:returns: ``True`` if the name is a legal constant type. Only simple types are allowed, ``bool``
"""
return x in PRIMITIVE_TYPES
def is_valid_msg_field_name(x):
"""
:returns: ``True`` if the name is a syntatically legal message field name, ``bool``
"""
return is_legal_resource_base_name(x)
# msg spec representation ##########################################
class Constant(object):
"""
Container class for holding a Constant declaration
Attributes:
- ``type``
- ``name``
- ``val``
- ``val_text``
"""
__slots__ = ['type', 'name', 'val', 'val_text']
def __init__(self, type_, name, val, val_text):
"""
:param type_: constant type, ``str``
:param name: constant name, ``str``
:param val: constant value, ``str``
:param val_text: Original text definition of *val*, ``str``
"""
if type is None or name is None or val is None or val_text is None:
raise ValueError('Constant must have non-None parameters')
self.type = type_
self.name = name.strip() #names are always stripped of whitespace
self.val = val
self.val_text = val_text
def __eq__(self, other):
if not isinstance(other, Constant):
return False
return self.type == other.type and self.name == other.name and self.val == other.val
def __repr__(self):
return "%s %s=%s"%(self.type, self.name, self.val)
def __str__(self):
return "%s %s=%s"%(self.type, self.name, self.val)
class Field(object):
"""
Container class for storing information about a single field in a MsgSpec
Attributes:
- ``name``
- ``type``
- ``base_type``
- ``is_array``
- ``array_len``
- ``is_builtin``
- ``is_header``
"""
def __init__(self, name, type):
self.name = name
self.type = type
(self.base_type, self.is_array, self.array_len) = parse_type(type)
self.is_header = is_header_type(self.type)
self.is_builtin = is_builtin(self.base_type)
def __eq__(self, other):
if not isinstance(other, Field):
return False
else:
return self.name == other.name and \
self.type == other.type
def __repr__(self):
return "[%s, %s, %s, %s, %s]"%(self.name, self.type, self.base_type, self.is_array, self.array_len)
class MsgSpec(object):
"""
Container class for storing loaded msg description files. Field
types and names are stored in separate lists with 1-to-1
correspondence. MsgSpec can also return an md5 of the source text.
"""
def __init__(self, types, names, constants, text, full_name, package = '', short_name = ''):
"""
:param types: list of field types, in order of declaration, ``[str]]``
:param names: list of field names, in order of declaration, ``[str]]``
:param constants: List of :class:`Constant` declarations, ``[Constant]``
:param text: text of declaration, ``str`
:raises: :exc:`InvalidMsgSpec` If spec is invalid (e.g. fields with the same name)
"""
alt_package, alt_short_name = package_resource_name(full_name)
if not package:
package = alt_package
if not short_name:
short_name = alt_short_name
self.types = types
if len(set(names)) != len(names):
raise InvalidMsgSpec("Duplicate field names in message: %s"%names)
self.names = names
self.constants = constants
assert len(self.types) == len(self.names), "len(%s) != len(%s)"%(self.types, self.names)
#Header.msg support
if (len(self.types)):
self.header_present = self.types[0] == HEADER_FULL_NAME and self.names[0] == 'header'
else:
self.header_present = False
self.text = text
self.full_name = full_name
self.short_name = short_name
self.package = package
try:
self._parsed_fields = [Field(name, type) for (name, type) in zip(self.names, self.types)]
except ValueError as e:
raise InvalidMsgSpec("invalid field: %s"%(e))
def fields(self):
"""
:returns: zip list of types and names (e.g. [('int32', 'x'), ('int32', 'y')], ``[(str,str),]``
"""
return list(zip(self.types, self.names)) #py3k
def parsed_fields(self):
"""
:returns: list of :class:`Field` classes, ``[Field,]``
"""
return self._parsed_fields
def has_header(self):
"""
:returns: ``True`` if msg decription contains a 'Header header'
declaration at the beginning, ``bool``
"""
return self.header_present
def __eq__(self, other):
if not other or not isinstance(other, MsgSpec):
return False
return self.types == other.types and self.names == other.names and \
self.constants == other.constants and self.text == other.text and \
self.full_name == other.full_name and self.short_name == other.short_name and \
self.package == other.package
def __ne__(self, other):
if not other or not isinstance(other, MsgSpec):
return True
return not self.__eq__(other)
def __repr__(self):
if self.constants:
return "MsgSpec[%s, %s, %s]"%(repr(self.constants), repr(self.types), repr(self.names))
else:
return "MsgSpec[%s, %s]"%(repr(self.types), repr(self.names))
def __str__(self):
return self.text
# .msg file routines ##############################################################
# adjustable constants, in case we change our minds
HEADER = 'Header'
TIME = 'time'
DURATION = 'duration'
HEADER_FULL_NAME = 'std_msgs/Header'
def is_header_type(msg_type):
"""
:param msg_type: message type name, ``str``
:returns: ``True`` if *msg_type* refers to the ROS Header type, ``bool``
"""
# for backwards compatibility, include roslib/Header. REP 100
return msg_type in [HEADER, HEADER_FULL_NAME, 'roslib/Header']
# time and duration types are represented as aggregate data structures
# for the purposes of serialization from the perspective of
# roslib.msgs. genmsg_py will do additional special handling is required
# to convert them into rospy.msg.Time/Duration instances.
## time as msg spec. time is unsigned
TIME_MSG = "uint32 secs\nuint32 nsecs"
## duration as msg spec. duration is just like time except signed
DURATION_MSG = "int32 secs\nint32 nsecs"
## primitive types are those for which we allow constants, i.e. have primitive representation
PRIMITIVE_TYPES = ['int8','uint8','int16','uint16','int32','uint32','int64','uint64','float32','float64',
'string',
'bool',
# deprecated:
'char','byte']
BUILTIN_TYPES = PRIMITIVE_TYPES + [TIME, DURATION]
def is_builtin(msg_type_name):
"""
:param msg_type_name: name of message type, ``str``
:returns: True if msg_type_name is a builtin/primitive type, ``bool``
"""
return msg_type_name in BUILTIN_TYPES
| gpl-3.0 | 258,773,398,167,689,250 | 34.111748 | 109 | 0.594173 | false |
berrange/nova | nova/virt/hyperv/vmutils.py | 8 | 29325 | # Copyright (c) 2010 Cloud.com, Inc
# Copyright 2012 Cloudbase Solutions Srl / Pedro Navarro Perez
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility class for VM related operations on Hyper-V.
"""
import sys
import time
import uuid
if sys.platform == 'win32':
import wmi
from oslo.config import cfg
from nova import exception
from nova.i18n import _, _LW
from nova.openstack.common import log as logging
from nova.virt.hyperv import constants
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
# TODO(alexpilotti): Move the exceptions to a separate module
# TODO(alexpilotti): Add more domain exceptions
class HyperVException(exception.NovaException):
def __init__(self, message=None):
super(HyperVException, self).__init__(message)
# TODO(alexpilotti): Add a storage exception base class
class VHDResizeException(HyperVException):
def __init__(self, message=None):
super(HyperVException, self).__init__(message)
class HyperVAuthorizationException(HyperVException):
def __init__(self, message=None):
super(HyperVException, self).__init__(message)
class UnsupportedConfigDriveFormatException(HyperVException):
def __init__(self, message=None):
super(HyperVException, self).__init__(message)
class VMUtils(object):
# These constants can be overridden by inherited classes
_PHYS_DISK_RES_SUB_TYPE = 'Microsoft Physical Disk Drive'
_DISK_RES_SUB_TYPE = 'Microsoft Synthetic Disk Drive'
_DVD_RES_SUB_TYPE = 'Microsoft Synthetic DVD Drive'
_IDE_DISK_RES_SUB_TYPE = 'Microsoft Virtual Hard Disk'
_IDE_DVD_RES_SUB_TYPE = 'Microsoft Virtual CD/DVD Disk'
_IDE_CTRL_RES_SUB_TYPE = 'Microsoft Emulated IDE Controller'
_SCSI_CTRL_RES_SUB_TYPE = 'Microsoft Synthetic SCSI Controller'
_SERIAL_PORT_RES_SUB_TYPE = 'Microsoft Serial Port'
_SETTINGS_DEFINE_STATE_CLASS = 'Msvm_SettingsDefineState'
_VIRTUAL_SYSTEM_SETTING_DATA_CLASS = 'Msvm_VirtualSystemSettingData'
_RESOURCE_ALLOC_SETTING_DATA_CLASS = 'Msvm_ResourceAllocationSettingData'
_PROCESSOR_SETTING_DATA_CLASS = 'Msvm_ProcessorSettingData'
_MEMORY_SETTING_DATA_CLASS = 'Msvm_MemorySettingData'
_STORAGE_ALLOC_SETTING_DATA_CLASS = _RESOURCE_ALLOC_SETTING_DATA_CLASS
_SYNTHETIC_ETHERNET_PORT_SETTING_DATA_CLASS = \
'Msvm_SyntheticEthernetPortSettingData'
_AFFECTED_JOB_ELEMENT_CLASS = "Msvm_AffectedJobElement"
_SHUTDOWN_COMPONENT = "Msvm_ShutdownComponent"
_VIRTUAL_SYSTEM_CURRENT_SETTINGS = 3
_AUTOMATIC_STARTUP_ACTION_NONE = 0
_vm_power_states_map = {constants.HYPERV_VM_STATE_ENABLED: 2,
constants.HYPERV_VM_STATE_DISABLED: 3,
constants.HYPERV_VM_STATE_SHUTTING_DOWN: 4,
constants.HYPERV_VM_STATE_REBOOT: 10,
constants.HYPERV_VM_STATE_PAUSED: 32768,
constants.HYPERV_VM_STATE_SUSPENDED: 32769}
def __init__(self, host='.'):
self._enabled_states_map = dict((v, k) for k, v in
self._vm_power_states_map.iteritems())
if sys.platform == 'win32':
self._init_hyperv_wmi_conn(host)
self._conn_cimv2 = wmi.WMI(moniker='//%s/root/cimv2' % host)
def _init_hyperv_wmi_conn(self, host):
self._conn = wmi.WMI(moniker='//%s/root/virtualization' % host)
def list_instance_notes(self):
instance_notes = []
for vs in self._conn.Msvm_VirtualSystemSettingData(
['ElementName', 'Notes'],
SettingType=self._VIRTUAL_SYSTEM_CURRENT_SETTINGS):
instance_notes.append((vs.ElementName,
[v for v in vs.Notes.split('\n') if v]))
return instance_notes
def list_instances(self):
"""Return the names of all the instances known to Hyper-V."""
return [v.ElementName for v in
self._conn.Msvm_VirtualSystemSettingData(
['ElementName'],
SettingType=self._VIRTUAL_SYSTEM_CURRENT_SETTINGS)]
def get_vm_summary_info(self, vm_name):
vm = self._lookup_vm_check(vm_name)
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
vmsettings = vm.associators(
wmi_association_class=self._SETTINGS_DEFINE_STATE_CLASS,
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
settings_paths = [v.path_() for v in vmsettings]
# See http://msdn.microsoft.com/en-us/library/cc160706%28VS.85%29.aspx
(ret_val, summary_info) = vs_man_svc.GetSummaryInformation(
[constants.VM_SUMMARY_NUM_PROCS,
constants.VM_SUMMARY_ENABLED_STATE,
constants.VM_SUMMARY_MEMORY_USAGE,
constants.VM_SUMMARY_UPTIME],
settings_paths)
if ret_val:
raise HyperVException(_('Cannot get VM summary data for: %s')
% vm_name)
si = summary_info[0]
memory_usage = None
if si.MemoryUsage is not None:
memory_usage = long(si.MemoryUsage)
up_time = None
if si.UpTime is not None:
up_time = long(si.UpTime)
# Nova requires a valid state to be returned. Hyper-V has more
# states than Nova, typically intermediate ones and since there is
# no direct mapping for those, ENABLED is the only reasonable option
# considering that in all the non mappable states the instance
# is running.
enabled_state = self._enabled_states_map.get(si.EnabledState,
constants.HYPERV_VM_STATE_ENABLED)
summary_info_dict = {'NumberOfProcessors': si.NumberOfProcessors,
'EnabledState': enabled_state,
'MemoryUsage': memory_usage,
'UpTime': up_time}
return summary_info_dict
def _lookup_vm_check(self, vm_name):
vm = self._lookup_vm(vm_name)
if not vm:
raise exception.NotFound(_('VM not found: %s') % vm_name)
return vm
def _lookup_vm(self, vm_name):
vms = self._conn.Msvm_ComputerSystem(ElementName=vm_name)
n = len(vms)
if n == 0:
return None
elif n > 1:
raise HyperVException(_('Duplicate VM name found: %s') % vm_name)
else:
return vms[0]
def vm_exists(self, vm_name):
return self._lookup_vm(vm_name) is not None
def get_vm_id(self, vm_name):
vm = self._lookup_vm_check(vm_name)
return vm.Name
def _get_vm_setting_data(self, vm):
vmsettings = vm.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
# Avoid snapshots
return [s for s in vmsettings if s.SettingType == 3][0]
def _set_vm_memory(self, vm, vmsetting, memory_mb, dynamic_memory_ratio):
mem_settings = vmsetting.associators(
wmi_result_class=self._MEMORY_SETTING_DATA_CLASS)[0]
max_mem = long(memory_mb)
mem_settings.Limit = max_mem
if dynamic_memory_ratio > 1:
mem_settings.DynamicMemoryEnabled = True
# Must be a multiple of 2
reserved_mem = min(
long(max_mem / dynamic_memory_ratio) >> 1 << 1,
max_mem)
else:
mem_settings.DynamicMemoryEnabled = False
reserved_mem = max_mem
mem_settings.Reservation = reserved_mem
# Start with the minimum memory
mem_settings.VirtualQuantity = reserved_mem
self._modify_virt_resource(mem_settings, vm.path_())
def _set_vm_vcpus(self, vm, vmsetting, vcpus_num, limit_cpu_features):
procsetting = vmsetting.associators(
wmi_result_class=self._PROCESSOR_SETTING_DATA_CLASS)[0]
vcpus = long(vcpus_num)
procsetting.VirtualQuantity = vcpus
procsetting.Reservation = vcpus
procsetting.Limit = 100000 # static assignment to 100%
procsetting.LimitProcessorFeatures = limit_cpu_features
self._modify_virt_resource(procsetting, vm.path_())
def update_vm(self, vm_name, memory_mb, vcpus_num, limit_cpu_features,
dynamic_memory_ratio):
vm = self._lookup_vm_check(vm_name)
vmsetting = self._get_vm_setting_data(vm)
self._set_vm_memory(vm, vmsetting, memory_mb, dynamic_memory_ratio)
self._set_vm_vcpus(vm, vmsetting, vcpus_num, limit_cpu_features)
def check_admin_permissions(self):
if not self._conn.Msvm_VirtualSystemManagementService():
msg = _("The Windows account running nova-compute on this Hyper-V"
" host doesn't have the required permissions to create or"
" operate the virtual machine.")
raise HyperVAuthorizationException(msg)
def create_vm(self, vm_name, memory_mb, vcpus_num, limit_cpu_features,
dynamic_memory_ratio, notes=None):
"""Creates a VM."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
LOG.debug('Creating VM %s', vm_name)
vm = self._create_vm_obj(vs_man_svc, vm_name, notes)
vmsetting = self._get_vm_setting_data(vm)
LOG.debug('Setting memory for vm %s', vm_name)
self._set_vm_memory(vm, vmsetting, memory_mb, dynamic_memory_ratio)
LOG.debug('Set vCPUs for vm %s', vm_name)
self._set_vm_vcpus(vm, vmsetting, vcpus_num, limit_cpu_features)
def _create_vm_obj(self, vs_man_svc, vm_name, notes):
vs_gs_data = self._conn.Msvm_VirtualSystemGlobalSettingData.new()
vs_gs_data.ElementName = vm_name
# Don't start automatically on host boot
vs_gs_data.AutomaticStartupAction = self._AUTOMATIC_STARTUP_ACTION_NONE
(vm_path,
job_path,
ret_val) = vs_man_svc.DefineVirtualSystem([], None,
vs_gs_data.GetText_(1))
self.check_ret_val(ret_val, job_path)
vm = self._get_wmi_obj(vm_path)
if notes:
vmsetting = self._get_vm_setting_data(vm)
vmsetting.Notes = '\n'.join(notes)
self._modify_virtual_system(vs_man_svc, vm_path, vmsetting)
return self._get_wmi_obj(vm_path)
def _modify_virtual_system(self, vs_man_svc, vm_path, vmsetting):
(job_path, ret_val) = vs_man_svc.ModifyVirtualSystem(
ComputerSystem=vm_path,
SystemSettingData=vmsetting.GetText_(1))[1:]
self.check_ret_val(ret_val, job_path)
def get_vm_scsi_controller(self, vm_name):
vm = self._lookup_vm_check(vm_name)
vmsettings = vm.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
rasds = vmsettings[0].associators(
wmi_result_class=self._RESOURCE_ALLOC_SETTING_DATA_CLASS)
res = [r for r in rasds
if r.ResourceSubType == self._SCSI_CTRL_RES_SUB_TYPE][0]
return res.path_()
def _get_vm_ide_controller(self, vm, ctrller_addr):
vmsettings = vm.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
rasds = vmsettings[0].associators(
wmi_result_class=self._RESOURCE_ALLOC_SETTING_DATA_CLASS)
return [r for r in rasds
if r.ResourceSubType == self._IDE_CTRL_RES_SUB_TYPE
and r.Address == str(ctrller_addr)][0].path_()
def get_vm_ide_controller(self, vm_name, ctrller_addr):
vm = self._lookup_vm_check(vm_name)
return self._get_vm_ide_controller(vm, ctrller_addr)
def get_attached_disks(self, scsi_controller_path):
volumes = self._conn.query("SELECT * FROM %(class_name)s "
"WHERE ResourceSubType = "
"'%(res_sub_type)s' AND "
"Parent = '%(parent)s'" %
{"class_name":
self._RESOURCE_ALLOC_SETTING_DATA_CLASS,
'res_sub_type':
self._PHYS_DISK_RES_SUB_TYPE,
'parent':
scsi_controller_path.replace("'", "''")})
return volumes
def _get_new_setting_data(self, class_name):
return self._conn.query("SELECT * FROM %s WHERE InstanceID "
"LIKE '%%\\Default'" % class_name)[0]
def _get_new_resource_setting_data(self, resource_sub_type,
class_name=None):
if class_name is None:
class_name = self._RESOURCE_ALLOC_SETTING_DATA_CLASS
return self._conn.query("SELECT * FROM %(class_name)s "
"WHERE ResourceSubType = "
"'%(res_sub_type)s' AND "
"InstanceID LIKE '%%\\Default'" %
{"class_name": class_name,
"res_sub_type": resource_sub_type})[0]
def attach_ide_drive(self, vm_name, path, ctrller_addr, drive_addr,
drive_type=constants.IDE_DISK):
"""Create an IDE drive and attach it to the vm."""
vm = self._lookup_vm_check(vm_name)
ctrller_path = self._get_vm_ide_controller(vm, ctrller_addr)
if drive_type == constants.IDE_DISK:
res_sub_type = self._DISK_RES_SUB_TYPE
elif drive_type == constants.IDE_DVD:
res_sub_type = self._DVD_RES_SUB_TYPE
drive = self._get_new_resource_setting_data(res_sub_type)
# Set the IDE ctrller as parent.
drive.Parent = ctrller_path
drive.Address = drive_addr
# Add the cloned disk drive object to the vm.
new_resources = self._add_virt_resource(drive, vm.path_())
drive_path = new_resources[0]
if drive_type == constants.IDE_DISK:
res_sub_type = self._IDE_DISK_RES_SUB_TYPE
elif drive_type == constants.IDE_DVD:
res_sub_type = self._IDE_DVD_RES_SUB_TYPE
res = self._get_new_resource_setting_data(res_sub_type)
# Set the new drive as the parent.
res.Parent = drive_path
res.Connection = [path]
# Add the new vhd object as a virtual hard disk to the vm.
self._add_virt_resource(res, vm.path_())
def create_scsi_controller(self, vm_name):
"""Create an iscsi controller ready to mount volumes."""
vm = self._lookup_vm_check(vm_name)
scsicontrl = self._get_new_resource_setting_data(
self._SCSI_CTRL_RES_SUB_TYPE)
scsicontrl.VirtualSystemIdentifiers = ['{' + str(uuid.uuid4()) + '}']
self._add_virt_resource(scsicontrl, vm.path_())
def attach_volume_to_controller(self, vm_name, controller_path, address,
mounted_disk_path):
"""Attach a volume to a controller."""
vm = self._lookup_vm_check(vm_name)
diskdrive = self._get_new_resource_setting_data(
self._PHYS_DISK_RES_SUB_TYPE)
diskdrive.Address = address
diskdrive.Parent = controller_path
diskdrive.HostResource = [mounted_disk_path]
self._add_virt_resource(diskdrive, vm.path_())
def _get_disk_resource_address(self, disk_resource):
return disk_resource.Address
def set_disk_host_resource(self, vm_name, controller_path, address,
mounted_disk_path):
disk_found = False
vm = self._lookup_vm_check(vm_name)
(disk_resources, volume_resources) = self._get_vm_disks(vm)
for disk_resource in disk_resources + volume_resources:
if (disk_resource.Parent == controller_path and
self._get_disk_resource_address(disk_resource) ==
str(address)):
if (disk_resource.HostResource and
disk_resource.HostResource[0] != mounted_disk_path):
LOG.debug('Updating disk host resource "%(old)s" to '
'"%(new)s"' %
{'old': disk_resource.HostResource[0],
'new': mounted_disk_path})
disk_resource.HostResource = [mounted_disk_path]
self._modify_virt_resource(disk_resource, vm.path_())
disk_found = True
break
if not disk_found:
LOG.warn(_LW('Disk not found on controller "%(controller_path)s" '
'with address "%(address)s"'),
{'controller_path': controller_path, 'address': address})
def set_nic_connection(self, vm_name, nic_name, vswitch_conn_data):
nic_data = self._get_nic_data_by_name(nic_name)
nic_data.Connection = [vswitch_conn_data]
vm = self._lookup_vm_check(vm_name)
self._modify_virt_resource(nic_data, vm.path_())
def _get_nic_data_by_name(self, name):
return self._conn.Msvm_SyntheticEthernetPortSettingData(
ElementName=name)[0]
def create_nic(self, vm_name, nic_name, mac_address):
"""Create a (synthetic) nic and attach it to the vm."""
# Create a new nic
new_nic_data = self._get_new_setting_data(
self._SYNTHETIC_ETHERNET_PORT_SETTING_DATA_CLASS)
# Configure the nic
new_nic_data.ElementName = nic_name
new_nic_data.Address = mac_address.replace(':', '')
new_nic_data.StaticMacAddress = 'True'
new_nic_data.VirtualSystemIdentifiers = ['{' + str(uuid.uuid4()) + '}']
# Add the new nic to the vm
vm = self._lookup_vm_check(vm_name)
self._add_virt_resource(new_nic_data, vm.path_())
def soft_shutdown_vm(self, vm_name):
vm = self._lookup_vm_check(vm_name)
shutdown_component = vm.associators(
wmi_result_class=self._SHUTDOWN_COMPONENT)
if not shutdown_component:
# If no shutdown_component is found, it means the VM is already
# in a shutdown state.
return
reason = 'Soft shutdown requested by OpenStack Nova.'
(ret_val, ) = shutdown_component[0].InitiateShutdown(Force=False,
Reason=reason)
self.check_ret_val(ret_val, None)
def set_vm_state(self, vm_name, req_state):
"""Set the desired state of the VM."""
vm = self._lookup_vm_check(vm_name)
(job_path,
ret_val) = vm.RequestStateChange(self._vm_power_states_map[req_state])
# Invalid state for current operation (32775) typically means that
# the VM is already in the state requested
self.check_ret_val(ret_val, job_path, [0, 32775])
LOG.debug("Successfully changed vm state of %(vm_name)s "
"to %(req_state)s",
{'vm_name': vm_name, 'req_state': req_state})
def _get_disk_resource_disk_path(self, disk_resource):
return disk_resource.Connection
def get_vm_storage_paths(self, vm_name):
vm = self._lookup_vm_check(vm_name)
(disk_resources, volume_resources) = self._get_vm_disks(vm)
volume_drives = []
for volume_resource in volume_resources:
drive_path = volume_resource.HostResource[0]
volume_drives.append(drive_path)
disk_files = []
for disk_resource in disk_resources:
disk_files.extend(
[c for c in self._get_disk_resource_disk_path(disk_resource)])
return (disk_files, volume_drives)
def _get_vm_disks(self, vm):
vmsettings = vm.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
rasds = vmsettings[0].associators(
wmi_result_class=self._STORAGE_ALLOC_SETTING_DATA_CLASS)
disk_resources = [r for r in rasds if
r.ResourceSubType in
[self._IDE_DISK_RES_SUB_TYPE,
self._IDE_DVD_RES_SUB_TYPE]]
if (self._RESOURCE_ALLOC_SETTING_DATA_CLASS !=
self._STORAGE_ALLOC_SETTING_DATA_CLASS):
rasds = vmsettings[0].associators(
wmi_result_class=self._RESOURCE_ALLOC_SETTING_DATA_CLASS)
volume_resources = [r for r in rasds if
r.ResourceSubType == self._PHYS_DISK_RES_SUB_TYPE]
return (disk_resources, volume_resources)
def destroy_vm(self, vm_name):
vm = self._lookup_vm_check(vm_name)
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
# Remove the VM. Does not destroy disks.
(job_path, ret_val) = vs_man_svc.DestroyVirtualSystem(vm.path_())
self.check_ret_val(ret_val, job_path)
def check_ret_val(self, ret_val, job_path, success_values=[0]):
if ret_val == constants.WMI_JOB_STATUS_STARTED:
return self._wait_for_job(job_path)
elif ret_val not in success_values:
raise HyperVException(_('Operation failed with return value: %s')
% ret_val)
def _wait_for_job(self, job_path):
"""Poll WMI job state and wait for completion."""
job = self._get_wmi_obj(job_path)
while job.JobState == constants.WMI_JOB_STATE_RUNNING:
time.sleep(0.1)
job = self._get_wmi_obj(job_path)
if job.JobState != constants.WMI_JOB_STATE_COMPLETED:
job_state = job.JobState
if job.path().Class == "Msvm_ConcreteJob":
err_sum_desc = job.ErrorSummaryDescription
err_desc = job.ErrorDescription
err_code = job.ErrorCode
raise HyperVException(_("WMI job failed with status "
"%(job_state)d. Error details: "
"%(err_sum_desc)s - %(err_desc)s - "
"Error code: %(err_code)d") %
{'job_state': job_state,
'err_sum_desc': err_sum_desc,
'err_desc': err_desc,
'err_code': err_code})
else:
(error, ret_val) = job.GetError()
if not ret_val and error:
raise HyperVException(_("WMI job failed with status "
"%(job_state)d. Error details: "
"%(error)s") %
{'job_state': job_state,
'error': error})
else:
raise HyperVException(_("WMI job failed with status "
"%d. No error "
"description available") %
job_state)
desc = job.Description
elap = job.ElapsedTime
LOG.debug("WMI job succeeded: %(desc)s, Elapsed=%(elap)s",
{'desc': desc, 'elap': elap})
return job
def _get_wmi_obj(self, path):
return wmi.WMI(moniker=path.replace('\\', '/'))
def _add_virt_resource(self, res_setting_data, vm_path):
"""Adds a new resource to the VM."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
res_xml = [res_setting_data.GetText_(1)]
(job_path,
new_resources,
ret_val) = vs_man_svc.AddVirtualSystemResources(res_xml, vm_path)
self.check_ret_val(ret_val, job_path)
return new_resources
def _modify_virt_resource(self, res_setting_data, vm_path):
"""Updates a VM resource."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
(job_path, ret_val) = vs_man_svc.ModifyVirtualSystemResources(
ResourceSettingData=[res_setting_data.GetText_(1)],
ComputerSystem=vm_path)
self.check_ret_val(ret_val, job_path)
def _remove_virt_resource(self, res_setting_data, vm_path):
"""Removes a VM resource."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
res_path = [res_setting_data.path_()]
(job_path, ret_val) = vs_man_svc.RemoveVirtualSystemResources(res_path,
vm_path)
self.check_ret_val(ret_val, job_path)
def take_vm_snapshot(self, vm_name):
vm = self._lookup_vm_check(vm_name)
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
(job_path, ret_val,
snp_setting_data) = vs_man_svc.CreateVirtualSystemSnapshot(vm.path_())
self.check_ret_val(ret_val, job_path)
job_wmi_path = job_path.replace('\\', '/')
job = wmi.WMI(moniker=job_wmi_path)
snp_setting_data = job.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)[0]
return snp_setting_data.path_()
def remove_vm_snapshot(self, snapshot_path):
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
(job_path, ret_val) = vs_man_svc.RemoveVirtualSystemSnapshot(
snapshot_path)
self.check_ret_val(ret_val, job_path)
def detach_vm_disk(self, vm_name, disk_path):
vm = self._lookup_vm_check(vm_name)
physical_disk = self._get_mounted_disk_resource_from_path(disk_path)
if physical_disk:
self._remove_virt_resource(physical_disk, vm.path_())
def _get_mounted_disk_resource_from_path(self, disk_path):
physical_disks = self._conn.query("SELECT * FROM %(class_name)s "
"WHERE ResourceSubType = '%(res_sub_type)s'" %
{"class_name":
self._RESOURCE_ALLOC_SETTING_DATA_CLASS,
'res_sub_type':
self._PHYS_DISK_RES_SUB_TYPE})
for physical_disk in physical_disks:
if physical_disk.HostResource:
if physical_disk.HostResource[0].lower() == disk_path.lower():
return physical_disk
def get_mounted_disk_by_drive_number(self, device_number):
mounted_disks = self._conn.query("SELECT * FROM Msvm_DiskDrive "
"WHERE DriveNumber=" +
str(device_number))
if len(mounted_disks):
return mounted_disks[0].path_()
def get_controller_volume_paths(self, controller_path):
disks = self._conn.query("SELECT * FROM %(class_name)s "
"WHERE ResourceSubType = '%(res_sub_type)s' "
"AND Parent='%(parent)s'" %
{"class_name":
self._RESOURCE_ALLOC_SETTING_DATA_CLASS,
"res_sub_type":
self._PHYS_DISK_RES_SUB_TYPE,
"parent":
controller_path})
disk_data = {}
for disk in disks:
if disk.HostResource:
disk_data[disk.path().RelPath] = disk.HostResource[0]
return disk_data
def enable_vm_metrics_collection(self, vm_name):
raise NotImplementedError(_("Metrics collection is not supported on "
"this version of Hyper-V"))
def get_vm_serial_port_connection(self, vm_name, update_connection=None):
vm = self._lookup_vm_check(vm_name)
vmsettings = vm.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
rasds = vmsettings[0].associators(
wmi_result_class=self._RESOURCE_ALLOC_SETTING_DATA_CLASS)
serial_port = (
[r for r in rasds if
r.ResourceSubType == self._SERIAL_PORT_RES_SUB_TYPE][0])
if update_connection:
serial_port.Connection = [update_connection]
self._modify_virt_resource(serial_port, vm.path_())
if len(serial_port.Connection) > 0:
return serial_port.Connection[0]
def get_active_instances(self):
"""Return the names of all the active instances known to Hyper-V."""
vm_names = self.list_instances()
vms = [self._lookup_vm(vm_name) for vm_name in vm_names]
active_vm_names = [v.ElementName for v in vms
if v.EnabledState == constants.HYPERV_VM_STATE_ENABLED]
return active_vm_names
| apache-2.0 | 5,866,014,623,009,019,000 | 41.377168 | 79 | 0.573708 | false |
steventimberman/masterDebater | env/lib/python2.7/site-packages/django/db/backends/postgresql/creation.py | 58 | 2275 | import sys
from django.db.backends.base.creation import BaseDatabaseCreation
class DatabaseCreation(BaseDatabaseCreation):
def _quote_name(self, name):
return self.connection.ops.quote_name(name)
def _get_database_create_suffix(self, encoding=None, template=None):
suffix = ""
if encoding:
suffix += " ENCODING '{}'".format(encoding)
if template:
suffix += " TEMPLATE {}".format(self._quote_name(template))
if suffix:
suffix = "WITH" + suffix
return suffix
def sql_table_creation_suffix(self):
test_settings = self.connection.settings_dict['TEST']
assert test_settings['COLLATION'] is None, (
"PostgreSQL does not support collation setting at database creation time."
)
return self._get_database_create_suffix(
encoding=test_settings['CHARSET'],
template=test_settings.get('TEMPLATE'),
)
def _clone_test_db(self, number, verbosity, keepdb=False):
# CREATE DATABASE ... WITH TEMPLATE ... requires closing connections
# to the template database.
self.connection.close()
source_database_name = self.connection.settings_dict['NAME']
target_database_name = self.get_test_db_clone_settings(number)['NAME']
suffix = self._get_database_create_suffix(template=source_database_name)
creation_sql = "CREATE DATABASE {} {}".format(self._quote_name(target_database_name), suffix)
with self._nodb_connection.cursor() as cursor:
try:
cursor.execute(creation_sql)
except Exception:
if keepdb:
return
try:
if verbosity >= 1:
print("Destroying old test database for alias %s..." % (
self._get_database_display_str(verbosity, target_database_name),
))
cursor.execute("DROP DATABASE %s" % self._quote_name(target_database_name))
cursor.execute(creation_sql)
except Exception as e:
sys.stderr.write("Got an error cloning the test database: %s\n" % e)
sys.exit(2)
| mit | -5,228,488,110,730,801,000 | 39.625 | 101 | 0.582418 | false |
ecolitan/fatics | venv/lib/python2.7/site-packages/twisted/application/app.py | 13 | 22166 | # -*- test-case-name: twisted.test.test_application,twisted.test.test_twistd -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
import sys, os, pdb, getpass, traceback, signal
from operator import attrgetter
from twisted.python import runtime, log, usage, failure, util, logfile
from twisted.python.versions import Version
from twisted.python.reflect import qual, namedAny
from twisted.python.deprecate import deprecated
from twisted.python.log import ILogObserver
from twisted.persisted import sob
from twisted.application import service, reactors
from twisted.internet import defer
from twisted import copyright, plugin
# Expose the new implementation of installReactor at the old location.
from twisted.application.reactors import installReactor
from twisted.application.reactors import NoSuchReactor
class _BasicProfiler(object):
"""
@ivar saveStats: if C{True}, save the stats information instead of the
human readable format
@type saveStats: C{bool}
@ivar profileOutput: the name of the file use to print profile data.
@type profileOutput: C{str}
"""
def __init__(self, profileOutput, saveStats):
self.profileOutput = profileOutput
self.saveStats = saveStats
def _reportImportError(self, module, e):
"""
Helper method to report an import error with a profile module. This
has to be explicit because some of these modules are removed by
distributions due to them being non-free.
"""
s = "Failed to import module %s: %s" % (module, e)
s += """
This is most likely caused by your operating system not including
the module due to it being non-free. Either do not use the option
--profile, or install the module; your operating system vendor
may provide it in a separate package.
"""
raise SystemExit(s)
class ProfileRunner(_BasicProfiler):
"""
Runner for the standard profile module.
"""
def run(self, reactor):
"""
Run reactor under the standard profiler.
"""
try:
import profile
except ImportError, e:
self._reportImportError("profile", e)
p = profile.Profile()
p.runcall(reactor.run)
if self.saveStats:
p.dump_stats(self.profileOutput)
else:
tmp, sys.stdout = sys.stdout, open(self.profileOutput, 'a')
try:
p.print_stats()
finally:
sys.stdout, tmp = tmp, sys.stdout
tmp.close()
class HotshotRunner(_BasicProfiler):
"""
Runner for the hotshot profile module.
"""
def run(self, reactor):
"""
Run reactor under the hotshot profiler.
"""
try:
import hotshot.stats
except (ImportError, SystemExit), e:
# Certain versions of Debian (and Debian derivatives) raise
# SystemExit when importing hotshot if the "non-free" profiler
# module is not installed. Someone eventually recognized this
# as a bug and changed the Debian packaged Python to raise
# ImportError instead. Handle both exception types here in
# order to support the versions of Debian which have this
# behavior. The bug report which prompted the introduction of
# this highly undesirable behavior should be available online at
# <http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=334067>.
# There seems to be no corresponding bug report which resulted
# in the behavior being removed. -exarkun
self._reportImportError("hotshot", e)
# this writes stats straight out
p = hotshot.Profile(self.profileOutput)
p.runcall(reactor.run)
if self.saveStats:
# stats are automatically written to file, nothing to do
return
else:
s = hotshot.stats.load(self.profileOutput)
s.strip_dirs()
s.sort_stats(-1)
if getattr(s, 'stream', None) is not None:
# Python 2.5 and above supports a stream attribute
s.stream = open(self.profileOutput, 'w')
s.print_stats()
s.stream.close()
else:
# But we have to use a trick for Python < 2.5
tmp, sys.stdout = sys.stdout, open(self.profileOutput, 'w')
try:
s.print_stats()
finally:
sys.stdout, tmp = tmp, sys.stdout
tmp.close()
class CProfileRunner(_BasicProfiler):
"""
Runner for the cProfile module.
"""
def run(self, reactor):
"""
Run reactor under the cProfile profiler.
"""
try:
import cProfile, pstats
except ImportError, e:
self._reportImportError("cProfile", e)
p = cProfile.Profile()
p.runcall(reactor.run)
if self.saveStats:
p.dump_stats(self.profileOutput)
else:
stream = open(self.profileOutput, 'w')
s = pstats.Stats(p, stream=stream)
s.strip_dirs()
s.sort_stats(-1)
s.print_stats()
stream.close()
class AppProfiler(object):
"""
Class which selects a specific profile runner based on configuration
options.
@ivar profiler: the name of the selected profiler.
@type profiler: C{str}
"""
profilers = {"profile": ProfileRunner, "hotshot": HotshotRunner,
"cprofile": CProfileRunner}
def __init__(self, options):
saveStats = options.get("savestats", False)
profileOutput = options.get("profile", None)
self.profiler = options.get("profiler", "hotshot").lower()
if self.profiler in self.profilers:
profiler = self.profilers[self.profiler](profileOutput, saveStats)
self.run = profiler.run
else:
raise SystemExit("Unsupported profiler name: %s" % (self.profiler,))
class AppLogger(object):
"""
Class managing logging faciliy of the application.
@ivar _logfilename: The name of the file to which to log, if other than the
default.
@type _logfilename: C{str}
@ivar _observerFactory: Callable object that will create a log observer, or
None.
@ivar _observer: log observer added at C{start} and removed at C{stop}.
@type _observer: C{callable}
"""
_observer = None
def __init__(self, options):
self._logfilename = options.get("logfile", "")
self._observerFactory = options.get("logger") or None
def start(self, application):
"""
Initialize the logging system.
If a customer logger was specified on the command line it will be
used. If not, and an L{ILogObserver} component has been set on
C{application}, then it will be used as the log observer. Otherwise a
log observer will be created based on the command-line options for
built-in loggers (e.g. C{--logfile}).
@param application: The application on which to check for an
L{ILogObserver}.
"""
if self._observerFactory is not None:
observer = self._observerFactory()
else:
observer = application.getComponent(ILogObserver, None)
if observer is None:
observer = self._getLogObserver()
self._observer = observer
log.startLoggingWithObserver(self._observer)
self._initialLog()
def _initialLog(self):
"""
Print twistd start log message.
"""
from twisted.internet import reactor
log.msg("twistd %s (%s %s) starting up." % (copyright.version,
sys.executable,
runtime.shortPythonVersion()))
log.msg('reactor class: %s.' % (qual(reactor.__class__),))
def _getLogObserver(self):
"""
Create a log observer to be added to the logging system before running
this application.
"""
if self._logfilename == '-' or not self._logfilename:
logFile = sys.stdout
else:
logFile = logfile.LogFile.fromFullPath(self._logfilename)
return log.FileLogObserver(logFile).emit
def stop(self):
"""
Print twistd stop log message.
"""
log.msg("Server Shut Down.")
if self._observer is not None:
log.removeObserver(self._observer)
self._observer = None
def fixPdb():
def do_stop(self, arg):
self.clear_all_breaks()
self.set_continue()
from twisted.internet import reactor
reactor.callLater(0, reactor.stop)
return 1
def help_stop(self):
print """stop - Continue execution, then cleanly shutdown the twisted reactor."""
def set_quit(self):
os._exit(0)
pdb.Pdb.set_quit = set_quit
pdb.Pdb.do_stop = do_stop
pdb.Pdb.help_stop = help_stop
def runReactorWithLogging(config, oldstdout, oldstderr, profiler=None, reactor=None):
"""
Start the reactor, using profiling if specified by the configuration, and
log any error happening in the process.
@param config: configuration of the twistd application.
@type config: L{ServerOptions}
@param oldstdout: initial value of C{sys.stdout}.
@type oldstdout: C{file}
@param oldstderr: initial value of C{sys.stderr}.
@type oldstderr: C{file}
@param profiler: object used to run the reactor with profiling.
@type profiler: L{AppProfiler}
@param reactor: The reactor to use. If C{None}, the global reactor will
be used.
"""
if reactor is None:
from twisted.internet import reactor
try:
if config['profile']:
if profiler is not None:
profiler.run(reactor)
elif config['debug']:
sys.stdout = oldstdout
sys.stderr = oldstderr
if runtime.platformType == 'posix':
signal.signal(signal.SIGUSR2, lambda *args: pdb.set_trace())
signal.signal(signal.SIGINT, lambda *args: pdb.set_trace())
fixPdb()
pdb.runcall(reactor.run)
else:
reactor.run()
except:
if config['nodaemon']:
file = oldstdout
else:
file = open("TWISTD-CRASH.log",'a')
traceback.print_exc(file=file)
file.flush()
def getPassphrase(needed):
if needed:
return getpass.getpass('Passphrase: ')
else:
return None
def getSavePassphrase(needed):
if needed:
passphrase = util.getPassword("Encryption passphrase: ")
else:
return None
class ApplicationRunner(object):
"""
An object which helps running an application based on a config object.
Subclass me and implement preApplication and postApplication
methods. postApplication generally will want to run the reactor
after starting the application.
@ivar config: The config object, which provides a dict-like interface.
@ivar application: Available in postApplication, but not
preApplication. This is the application object.
@ivar profilerFactory: Factory for creating a profiler object, able to
profile the application if options are set accordingly.
@ivar profiler: Instance provided by C{profilerFactory}.
@ivar loggerFactory: Factory for creating object responsible for logging.
@ivar logger: Instance provided by C{loggerFactory}.
"""
profilerFactory = AppProfiler
loggerFactory = AppLogger
def __init__(self, config):
self.config = config
self.profiler = self.profilerFactory(config)
self.logger = self.loggerFactory(config)
def run(self):
"""
Run the application.
"""
self.preApplication()
self.application = self.createOrGetApplication()
self.logger.start(self.application)
self.postApplication()
self.logger.stop()
def startReactor(self, reactor, oldstdout, oldstderr):
"""
Run the reactor with the given configuration. Subclasses should
probably call this from C{postApplication}.
@see: L{runReactorWithLogging}
"""
runReactorWithLogging(
self.config, oldstdout, oldstderr, self.profiler, reactor)
def preApplication(self):
"""
Override in subclass.
This should set up any state necessary before loading and
running the Application.
"""
raise NotImplementedError()
def postApplication(self):
"""
Override in subclass.
This will be called after the application has been loaded (so
the C{application} attribute will be set). Generally this
should start the application and run the reactor.
"""
raise NotImplementedError()
def createOrGetApplication(self):
"""
Create or load an Application based on the parameters found in the
given L{ServerOptions} instance.
If a subcommand was used, the L{service.IServiceMaker} that it
represents will be used to construct a service to be added to
a newly-created Application.
Otherwise, an application will be loaded based on parameters in
the config.
"""
if self.config.subCommand:
# If a subcommand was given, it's our responsibility to create
# the application, instead of load it from a file.
# loadedPlugins is set up by the ServerOptions.subCommands
# property, which is iterated somewhere in the bowels of
# usage.Options.
plg = self.config.loadedPlugins[self.config.subCommand]
ser = plg.makeService(self.config.subOptions)
application = service.Application(plg.tapname)
ser.setServiceParent(application)
else:
passphrase = getPassphrase(self.config['encrypted'])
application = getApplication(self.config, passphrase)
return application
def getApplication(config, passphrase):
s = [(config[t], t)
for t in ['python', 'source', 'file'] if config[t]][0]
filename, style = s[0], {'file':'pickle'}.get(s[1],s[1])
try:
log.msg("Loading %s..." % filename)
application = service.loadApplication(filename, style, passphrase)
log.msg("Loaded.")
except Exception, e:
s = "Failed to load application: %s" % e
if isinstance(e, KeyError) and e.args[0] == "application":
s += """
Could not find 'application' in the file. To use 'twistd -y', your .tac
file must create a suitable object (e.g., by calling service.Application())
and store it in a variable named 'application'. twistd loads your .tac file
and scans the global variables for one of this name.
Please read the 'Using Application' HOWTO for details.
"""
traceback.print_exc(file=log.logfile)
log.msg(s)
log.deferr()
sys.exit('\n' + s + '\n')
return application
def _reactorAction():
return usage.CompleteList([r.shortName for r in reactors.getReactorTypes()])
class ReactorSelectionMixin:
"""
Provides options for selecting a reactor to install.
If a reactor is installed, the short name which was used to locate it is
saved as the value for the C{"reactor"} key.
"""
compData = usage.Completions(
optActions={"reactor": _reactorAction})
messageOutput = sys.stdout
_getReactorTypes = staticmethod(reactors.getReactorTypes)
def opt_help_reactors(self):
"""
Display a list of possibly available reactor names.
"""
rcts = sorted(self._getReactorTypes(), key=attrgetter('shortName'))
for r in rcts:
self.messageOutput.write(' %-4s\t%s\n' %
(r.shortName, r.description))
raise SystemExit(0)
def opt_reactor(self, shortName):
"""
Which reactor to use (see --help-reactors for a list of possibilities)
"""
# Actually actually actually install the reactor right at this very
# moment, before any other code (for example, a sub-command plugin)
# runs and accidentally imports and installs the default reactor.
#
# This could probably be improved somehow.
try:
installReactor(shortName)
except NoSuchReactor:
msg = ("The specified reactor does not exist: '%s'.\n"
"See the list of available reactors with "
"--help-reactors" % (shortName,))
raise usage.UsageError(msg)
except Exception, e:
msg = ("The specified reactor cannot be used, failed with error: "
"%s.\nSee the list of available reactors with "
"--help-reactors" % (e,))
raise usage.UsageError(msg)
else:
self["reactor"] = shortName
opt_r = opt_reactor
class ServerOptions(usage.Options, ReactorSelectionMixin):
longdesc = ("twistd reads a twisted.application.service.Application out "
"of a file and runs it.")
optFlags = [['savestats', None,
"save the Stats object rather than the text output of "
"the profiler."],
['no_save','o', "do not save state on shutdown"],
['encrypted', 'e',
"The specified tap/aos file is encrypted."]]
optParameters = [['logfile','l', None,
"log to a specified file, - for stdout"],
['logger', None, None,
"A fully-qualified name to a log observer factory to use "
"for the initial log observer. Takes precedence over "
"--logfile and --syslog (when available)."],
['profile', 'p', None,
"Run in profile mode, dumping results to specified file"],
['profiler', None, "hotshot",
"Name of the profiler to use (%s)." %
", ".join(AppProfiler.profilers)],
['file','f','twistd.tap',
"read the given .tap file"],
['python','y', None,
"read an application from within a Python file "
"(implies -o)"],
['source', 's', None,
"Read an application from a .tas file (AOT format)."],
['rundir','d','.',
'Change to a supplied directory before running']]
compData = usage.Completions(
mutuallyExclusive=[("file", "python", "source")],
optActions={"file": usage.CompleteFiles("*.tap"),
"python": usage.CompleteFiles("*.(tac|py)"),
"source": usage.CompleteFiles("*.tas"),
"rundir": usage.CompleteDirs()}
)
_getPlugins = staticmethod(plugin.getPlugins)
def __init__(self, *a, **kw):
self['debug'] = False
usage.Options.__init__(self, *a, **kw)
def opt_debug(self):
"""
Run the application in the Python Debugger (implies nodaemon),
sending SIGUSR2 will drop into debugger
"""
defer.setDebugging(True)
failure.startDebugMode()
self['debug'] = True
opt_b = opt_debug
def opt_spew(self):
"""
Print an insanely verbose log of everything that happens.
Useful when debugging freezes or locks in complex code."""
sys.settrace(util.spewer)
try:
import threading
except ImportError:
return
threading.settrace(util.spewer)
def parseOptions(self, options=None):
if options is None:
options = sys.argv[1:] or ["--help"]
usage.Options.parseOptions(self, options)
def postOptions(self):
if self.subCommand or self['python']:
self['no_save'] = True
if self['logger'] is not None:
try:
self['logger'] = namedAny(self['logger'])
except Exception, e:
raise usage.UsageError("Logger '%s' could not be imported: %s"
% (self['logger'], e))
def subCommands(self):
plugins = self._getPlugins(service.IServiceMaker)
self.loadedPlugins = {}
for plug in sorted(plugins, key=attrgetter('tapname')):
self.loadedPlugins[plug.tapname] = plug
yield (plug.tapname,
None,
# Avoid resolving the options attribute right away, in case
# it's a property with a non-trivial getter (eg, one which
# imports modules).
lambda plug=plug: plug.options(),
plug.description)
subCommands = property(subCommands)
def run(runApp, ServerOptions):
config = ServerOptions()
try:
config.parseOptions()
except usage.error, ue:
print config
print "%s: %s" % (sys.argv[0], ue)
else:
runApp(config)
def convertStyle(filein, typein, passphrase, fileout, typeout, encrypt):
application = service.loadApplication(filein, typein, passphrase)
sob.IPersistable(application).setStyle(typeout)
passphrase = getSavePassphrase(encrypt)
if passphrase:
fileout = None
sob.IPersistable(application).save(filename=fileout, passphrase=passphrase)
def startApplication(application, save):
from twisted.internet import reactor
service.IService(application).startService()
if save:
p = sob.IPersistable(application)
reactor.addSystemEventTrigger('after', 'shutdown', p.save, 'shutdown')
reactor.addSystemEventTrigger('before', 'shutdown',
service.IService(application).stopService)
| agpl-3.0 | 92,364,532,039,956,690 | 31.88724 | 89 | 0.599883 | false |
bonsai-team/matam | scripts/fastq_get_pairs.py | 3 | 4310 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
fastq_get_pairs
Description: Retrieve paired and singleton reads from a single fastq file
fastq_get_pairs.py -i input.fq
-----------------------------------------------------------------------
Author: This software is written and maintained by Pierre Pericard
([email protected])
Created: 2016-08-25
Last Modified: 2016-08-25
Licence: GNU GPL 3.0
Copyright 2016 Pierre Pericard
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
import os
import argparse
import string
import re
def read_fastq_file_handle(fastq_file_handle):
"""
Parse a fastq file and return a generator
"""
# Variables initialization
count = 0
header = ''
seq = ''
qual = ''
# Reading input file
for line in (l.strip() for l in fastq_file_handle if l.strip()):
count += 1
if count % 4 == 1:
if header:
yield header, seq, qual
header = line[1:]
elif count % 4 == 2:
seq = line
elif count % 4 == 0:
qual = line
yield header, seq, qual
# Close input file
fastq_file_handle.close()
def buffer_paired_reads(fastq_fh):
"""
"""
previous_read_id = ''
read_buffer = list()
# Reading each read in fastq file
for header, seq, qual in read_fastq_file_handle(fastq_fh):
#~ read_id = header.split()[0]
read_id = header.split()[0][:-2]
# Yield read buffer
if read_id != previous_read_id:
if previous_read_id:
yield read_buffer
read_buffer = list()
# Append read into read buffer
read = (header, seq, qual)
read_buffer.append(read)
# Store previous read id
previous_read_id = read_id
# Yield last read buffer
yield read_buffer
# Close input file
fastq_fh.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Retrieve paired and singleton reads from a fastq file')
parser.add_argument('-i', '--input_fastq',
metavar='INFQ',
type=argparse.FileType('r'),
default='-',
help='input fastq file')
parser.add_argument('-b', '--basename',
metavar='BASENAME',
type=str,
help='Output files basename. '
'Default will be based on input file name')
args = parser.parse_args()
if not args.basename:
input_basename = 'stdin'
if args.input_fastq.name != '<stdin>':
input_filename = os.path.basename(args.input_fastq.name)
input_basename, input_extension = os.path.splitext(input_filename)
args.basename = input_basename
output_left_filename = args.basename + '.left' + input_extension
out_left_fh = open(output_left_filename, 'w')
output_right_filename = args.basename + '.right' + input_extension
out_right_fh = open(output_right_filename, 'w')
output_singleton_filename = args.basename + '.singleton' + input_extension
out_single_fh = open(output_singleton_filename, 'w')
previous_read_id = ''
reads_buffer = list()
for read_buffer in buffer_paired_reads(args.input_fastq):
if len(read_buffer) == 2:
read_left = read_buffer[0]
out_left_fh.write('@{0}\n{1}\n+\n{2}\n'.format(*read_left))
read_right = read_buffer[1]
out_right_fh.write('@{0}\n{1}\n+\n{2}\n'.format(*read_right))
elif len(read_buffer) == 1:
read = read_buffer[0]
out_single_fh.write('@{0}\n{1}\n+\n{2}\n'.format(*read))
| agpl-3.0 | -3,224,927,057,190,629,400 | 30.925926 | 105 | 0.592807 | false |
philanthropy-u/edx-platform | lms/djangoapps/grades/migrations/0013_persistentsubsectiongradeoverride.py | 21 | 1201 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from coursewarehistoryextended.fields import UnsignedBigIntOneToOneField
class Migration(migrations.Migration):
dependencies = [
('grades', '0012_computegradessetting'),
]
operations = [
migrations.CreateModel(
name='PersistentSubsectionGradeOverride',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True, db_index=True)),
('modified', models.DateTimeField(auto_now=True, db_index=True)),
('earned_all_override', models.FloatField(null=True, blank=True)),
('possible_all_override', models.FloatField(null=True, blank=True)),
('earned_graded_override', models.FloatField(null=True, blank=True)),
('possible_graded_override', models.FloatField(null=True, blank=True)),
('grade', UnsignedBigIntOneToOneField(related_name='override', to='grades.PersistentSubsectionGrade')),
],
),
]
| agpl-3.0 | -183,943,342,400,203,200 | 40.413793 | 119 | 0.631973 | false |
farhi-naz/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/applywatchlist_unittest.py | 124 | 2302 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.tool.mocktool import MockOptions, MockTool
from webkitpy.tool.steps.applywatchlist import ApplyWatchList
class ApplyWatchListTest(unittest.TestCase):
def test_apply_watch_list_local(self):
capture = OutputCapture()
step = ApplyWatchList(MockTool(log_executive=True), MockOptions())
state = {
'bug_id': '50001',
'diff': 'The diff',
}
expected_logs = """MockWatchList: determine_cc_and_messages
MOCK bug comment: bug_id=50001, cc=set(['[email protected]'])
--- Begin comment ---
Message2.
--- End comment ---
"""
capture.assert_outputs(self, step.run, [state], expected_logs=expected_logs)
| bsd-3-clause | -3,476,106,841,369,863,700 | 44.137255 | 84 | 0.752389 | false |
pferreir/indico-backup | indico/tests/python/unit/indico_tests/modules_tests/scheduler_tests/tasks_test.py | 1 | 2783 | # -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from datetime import datetime, timedelta
from dateutil import rrule
from indico.modules.scheduler.tasks.periodic import PeriodicTask, TaskOccurrence
from indico.tests.python.unit.util import IndicoTestCase
class TestPeriodicTask(IndicoTestCase):
def testPeriodicTaskFrequency(self):
dt = datetime(2010, 1, 1, 20, 0, 0)
pt = PeriodicTask(rrule.MINUTELY, dtstart=dt)
self.assertEqual(pt.getStartOn(), datetime(2010, 1, 1, 20, 0, 0))
pt.setNextOccurrence(dateAfter=dt)
self.assertEqual(pt.getStartOn(), datetime(2010, 1, 1, 20, 1, 0))
pt = PeriodicTask(rrule.HOURLY, dtstart=dt)
self.assertEqual(pt.getStartOn(), datetime(2010, 1, 1, 20, 0, 0))
pt.setNextOccurrence(dateAfter=dt)
self.assertEqual(pt.getStartOn(), datetime(2010, 1, 1, 21, 0, 0))
def testPeriodicTaskNoMoreLeft(self):
dt = datetime(2010, 1, 1, 20, 0, 0)
# date + 1 month
pt = PeriodicTask(rrule.YEARLY, dtstart=dt, until=dt + timedelta(days=30))
self.assertEqual(pt.getStartOn(), datetime(2010, 1, 1, 20, 0, 0))
pt.setNextOccurrence(dateAfter=dt)
self.assertEqual(pt.getStartOn(), None)
pt.setNextOccurrence(dateAfter=dt)
self.assertEqual(pt.getStartOn(), None)
def testPeriodicTaskOrder(self):
dt = datetime(2010, 1, 1, 20, 0, 0)
pt = PeriodicTask(rrule.MINUTELY, dtstart=dt)
pt.id = 0
pt2 = PeriodicTask(rrule.MINUTELY, dtstart=dt)
pt2.id = 1
for i in range(5):
pt.addOccurrence(TaskOccurrence(pt))
pt2.addOccurrence(TaskOccurrence(pt2))
self.assertEqual(cmp(pt, pt2), -1)
self.assertEqual(cmp(pt._occurrences[0], pt2), -1)
self.assertEqual(cmp(pt._occurrences[0], pt), 1)
self.assertEqual(cmp(pt._occurrences[0], pt._occurrences[1]), -1)
self.assertEqual(cmp(pt._occurrences[0], pt._occurrences[0]), 0)
self.assertEqual(cmp(pt._occurrences[0], pt2._occurrences[0]), -1)
| gpl-3.0 | 2,951,336,387,316,187,600 | 41.815385 | 82 | 0.671577 | false |
PersonalGenomesOrg/open-humans | data_import/templatetags/data_import.py | 2 | 1245 | from django import template
from django.apps import apps
from private_sharing.models import project_membership_visible
from public_data.models import is_public
register = template.Library()
@register.simple_tag
def source_is_connected(source, user):
"""
Return True if the given source is connected (has the required data for
retrieving the user's data, like a huID or an access token).
"""
try:
return getattr(user, source).is_connected
except: # pylint: disable=bare-except
return False
@register.simple_tag
def source_is_individual_deletion(source):
"""
Return True if the given source allows users to manage each file
individually.
"""
return apps.get_app_config(source).individual_deletion
@register.simple_tag(takes_context=True)
def source_is_public(context, source):
"""
Return True if the given source is public for the user in the current
request context.
"""
return is_public(context.request.user.member, source)
@register.simple_tag(takes_context=True)
def source_is_visible(context, source):
"""
Returns true if the given source is publicly visible.
"""
return project_membership_visible(context.request.user.member, source)
| mit | 2,835,184,546,305,820,000 | 26.666667 | 75 | 0.718876 | false |
kirillzhuravlev/numpy | numpy/ma/tests/test_regression.py | 113 | 2470 | from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
from numpy.testing import (assert_, TestCase, assert_array_equal,
assert_allclose, run_module_suite)
from numpy.compat import sixu
rlevel = 1
class TestRegression(TestCase):
def test_masked_array_create(self,level=rlevel):
# Ticket #17
x = np.ma.masked_array([0, 1, 2, 3, 0, 4, 5, 6],
mask=[0, 0, 0, 1, 1, 1, 0, 0])
assert_array_equal(np.ma.nonzero(x), [[1, 2, 6, 7]])
def test_masked_array(self,level=rlevel):
# Ticket #61
np.ma.array(1, mask=[1])
def test_mem_masked_where(self,level=rlevel):
# Ticket #62
from numpy.ma import masked_where, MaskType
a = np.zeros((1, 1))
b = np.zeros(a.shape, MaskType)
c = masked_where(b, a)
a-c
def test_masked_array_multiply(self,level=rlevel):
# Ticket #254
a = np.ma.zeros((4, 1))
a[2, 0] = np.ma.masked
b = np.zeros((4, 2))
a*b
b*a
def test_masked_array_repeat(self, level=rlevel):
# Ticket #271
np.ma.array([1], mask=False).repeat(10)
def test_masked_array_repr_unicode(self):
# Ticket #1256
repr(np.ma.array(sixu("Unicode")))
def test_atleast_2d(self):
# Ticket #1559
a = np.ma.masked_array([0.0, 1.2, 3.5], mask=[False, True, False])
b = np.atleast_2d(a)
assert_(a.mask.ndim == 1)
assert_(b.mask.ndim == 2)
def test_set_fill_value_unicode_py3(self):
# Ticket #2733
a = np.ma.masked_array(['a', 'b', 'c'], mask=[1, 0, 0])
a.fill_value = 'X'
assert_(a.fill_value == 'X')
def test_var_sets_maskedarray_scalar(self):
# Issue gh-2757
a = np.ma.array(np.arange(5), mask=True)
mout = np.ma.array(-1, dtype=float)
a.var(out=mout)
assert_(mout._data == 0)
def test_ddof_corrcoef(self):
# See gh-3336
x = np.ma.masked_equal([1, 2, 3, 4, 5], 4)
y = np.array([2, 2.5, 3.1, 3, 5])
with warnings.catch_warnings():
warnings.simplefilter("ignore")
r0 = np.ma.corrcoef(x, y, ddof=0)
r1 = np.ma.corrcoef(x, y, ddof=1)
# ddof should not have an effect (it gets cancelled out)
assert_allclose(r0.data, r1.data)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | -1,710,622,473,703,747,000 | 29.875 | 74 | 0.542105 | false |
salaria/odoo | addons/hr_timesheet_invoice/wizard/__init__.py | 433 | 1159 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_timesheet_invoice_create
import hr_timesheet_analytic_profit
import hr_timesheet_final_invoice_create
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 3,345,627,282,409,993,700 | 41.925926 | 78 | 0.634167 | false |
aospx-kitkat/platform_external_chromium_org | chrome/test/functional/multiprofile.py | 50 | 13257 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import pyauto_functional
import pyauto
class MultiprofileTest(pyauto.PyUITest):
"""Tests for Multi-Profile / Multi-users"""
_RESTORE_STARTUP_URL_VALUE = 4
_RESTORE_LASTOPEN_URL_VALUE = 1
_RESTORE_DEFAULT_URL_VALUE = 0
def Debug(self):
"""Test method for experimentation.
This method will not run automatically.
"""
while True:
raw_input('Hit <enter> to dump info.. ')
self.pprint(self.GetMultiProfileInfo())
def _GetSearchEngineWithKeyword(self, keyword, windex=0):
"""Get search engine info and return an element that matches keyword.
Args:
keyword: Search engine keyword field.
windex: The window index, default is 0.
Returns:
A search engine info dict or None.
"""
match_list = ([x for x in self.GetSearchEngineInfo(windex=windex)
if x['keyword'] == keyword])
if match_list:
return match_list[0]
return None
def _SetPreferences(self, dict, windex=0):
"""Sets preferences settings.
Args:
_dict: Dictionary of key preferences and its value to be set.
windex: The window index, defaults to 0 (the first window).
"""
for key in dict.iterkeys():
self.SetPrefs(key, dict[key], windex=windex)
def _SetStartUpPage(self, url, windex=0):
"""Set start up page.
Args:
url: URL of the page to be set as start up page.
windex: The window index, default is 0.
"""
_dict = {pyauto.kURLsToRestoreOnStartup: [url],
pyauto.kRestoreOnStartup: self._RESTORE_STARTUP_URL_VALUE}
self._SetPreferences(_dict, windex=windex)
prefs_info = self.GetPrefsInfo(windex=windex).Prefs(
pyauto.kURLsToRestoreOnStartup)
self.assertTrue(url in prefs_info)
def _SetHomePage(self, url, windex=0):
"""Create new profile and set home page.
Args:
url: URL of the page to be set as home page
windex: The window index, default is 0.
"""
_dict = {pyauto.kHomePage: url,
pyauto.kHomePageIsNewTabPage: False, pyauto.kShowHomeButton: True,
pyauto.kRestoreOnStartup: self._RESTORE_DEFAULT_URL_VALUE}
self._SetPreferences(_dict, windex=windex)
self.assertTrue(url in
self.GetPrefsInfo(windex=windex).Prefs(pyauto.kHomePage))
def _SetSessionRestoreURLs(self, set_restore, windex=0):
"""Create new profile and set home page.
Args:
set_restore: Value of action of start up.
windex: The window index, default is 0.
"""
self.NavigateToURL('http://www.google.com/', windex)
self.AppendTab(pyauto.GURL('http://news.google.com/'), windex)
num_tabs = self.GetTabCount(windex)
dict = {pyauto.kRestoreOnStartup: set_restore}
self._SetPreferences(dict, windex=windex)
def _AddSearchEngine(self, title, keyword, url, windex=0):
"""Add search engine.
Args:
title: Name for search engine.
keyword: Keyword, used to initiate a custom search from omnibox.
url: URL template for this search engine's query.
windex: The window index, default is 0.
"""
self.AddSearchEngine(title, keyword, url, windex=windex)
name = self._GetSearchEngineWithKeyword(keyword, windex=windex)
self.assertTrue(name)
def _AssertStartUpPage(self, url, profile='Default'):
"""Asserts start up page for given profile.
Args:
url: URL of the page to be set as start up page
profile: The profile name, defaults to 'Default'.
"""
self.AppendBrowserLaunchSwitch('--profile-directory=' + profile)
self.RestartBrowser(clear_profile=False)
info = self.GetBrowserInfo()
self.assertEqual(url, info['windows'][0]['tabs'][0]['url'].rstrip('/'))
self.assertTrue(url in
self.GetPrefsInfo().Prefs(pyauto.kURLsToRestoreOnStartup))
def _AssertHomePage(self, url, profile='Default'):
"""Asserts home page for given profile.
Args:
url: URL of the page to be set as home page
profile: The profile name, defaults to 'Dafault'.
"""
self.AppendBrowserLaunchSwitch('--profile-directory=' + profile)
self.RestartBrowser(clear_profile=False)
self.assertTrue(url in self.GetPrefsInfo().Prefs(pyauto.kHomePage))
def _AssertDefaultSearchEngine(self, search_engine, profile='Default'):
"""Asserts default search engine for given profile.
Args:
search_engine: Name of default search engine.
profile: The profile name, defaults to 'Default'.
"""
self.AppendBrowserLaunchSwitch('--profile-directory=' + profile)
self.RestartBrowser(clear_profile=False)
name = self._GetSearchEngineWithKeyword(search_engine)
self.assertTrue(name['is_default'])
self.SetOmniboxText('test search')
self.OmniboxAcceptInput()
self.assertTrue(re.search(search_engine, self.GetActiveTabURL().spec()))
def _AssertSessionRestore(self, url_list, set_restore, num_tabs=1,
profile='Default'):
"""Asserts urls when session is set to restored or set default.
Args:
url_list: List of URL to be restored.
set_restore: Value of action of start up.
num_tabs: Number of tabs to be restored, default is 1.
profile: The profile name, defaults to 'Default'.
"""
self.AppendBrowserLaunchSwitch('--profile-directory=' + profile)
self.RestartBrowser(clear_profile=False)
self.assertEqual(num_tabs, self.GetTabCount())
self.assertEqual(self.GetPrefsInfo().Prefs(pyauto.kRestoreOnStartup),
set_restore)
tab_index = 0
while (tab_index < num_tabs):
self.ActivateTab(tab_index)
self.assertEqual(url_list[tab_index], self.GetActiveTabURL().spec())
tab_index += 1
def testBasic(self):
"""Multi-profile windows can open."""
self.assertEqual(1, self.GetBrowserWindowCount())
self.assertTrue(self.GetMultiProfileInfo()['enabled'],
msg='Multi-profile is not enabled')
self.OpenNewBrowserWindowWithNewProfile()
# Verify multi-profile info.
multi_profile = self.GetMultiProfileInfo()
self.assertEqual(2, len(multi_profile['profiles']))
new_profile = multi_profile['profiles'][1]
self.assertTrue(new_profile['name'])
# Verify browser windows.
self.assertEqual(2, self.GetBrowserWindowCount(),
msg='New browser window did not open')
info = self.GetBrowserInfo()
new_profile_window = info['windows'][1]
self.assertEqual('Profile 1', new_profile_window['profile_path'])
self.assertEqual(1, len(new_profile_window['tabs']))
self.assertEqual('chrome://newtab/', new_profile_window['tabs'][0]['url'])
def test20NewProfiles(self):
"""Verify we can create 20 new profiles."""
for index in range(1, 21):
self.OpenNewBrowserWindowWithNewProfile()
multi_profile = self.GetMultiProfileInfo()
self.assertEqual(index + 1, len(multi_profile['profiles']),
msg='Expected %d profiles after adding %d new users. Got %d' % (
index + 1, index, len(multi_profile['profiles'])))
def testStartUpPageOptionInMultiProfile(self):
"""Test startup page for Multi-profile windows."""
self.assertTrue(self.GetMultiProfileInfo()['enabled'],
msg='Multi-profile is not enabled')
# Launch browser with new Profile 1, set startup page to 'www.google.com'.
self.OpenNewBrowserWindowWithNewProfile()
self._SetStartUpPage('http://www.google.com', windex=1)
# Launch browser with new Profile 2, set startup page to 'www.yahoo.com'.
self.OpenNewBrowserWindowWithNewProfile()
# Verify start up page for Profile 2 is still newtab page.
info = self.GetBrowserInfo()
self.assertEqual('chrome://newtab/', info['windows'][2]['tabs'][0]['url'])
self._SetStartUpPage('http://www.yahoo.com', windex=2)
# Exit Profile 1 / Profile 2
self.CloseBrowserWindow(2)
self.CloseBrowserWindow(1)
# Relaunch Browser with Profile 2, verify startup page.
self._AssertStartUpPage('http://www.yahoo.com', profile='Profile 2')
# Relaunch Browser with Profile 1, verify startup page.
self._AssertStartUpPage('http://www.google.com', profile='Profile 1')
def testHomePageOptionMultiProfile(self):
"""Test Home page for Multi-profile windows."""
self.assertTrue(self.GetMultiProfileInfo()['enabled'],
msg='Multi-profile is not enabled')
# Launch browser with new Profile 1, set homepage to 'www.google.com'.
self.OpenNewBrowserWindowWithNewProfile()
self._SetHomePage('http://www.google.com', windex=1)
# Launch browser with new Profile 2, set homepage to 'www.yahoo.com'.
self.OpenNewBrowserWindowWithNewProfile()
self._SetHomePage('http://www.yahoo.com', windex=2)
# Exit Profile 1 / Profile 2
self.CloseBrowserWindow(2)
self.CloseBrowserWindow(1)
# Relaunch Browser with Profile 2, verify startup page.
self._AssertHomePage('http://www.yahoo.com', profile='Profile 2')
# Relaunch Browser with Profile 1, verify startup page.
self._AssertHomePage('http://www.google.com', profile='Profile 1')
def testSessionRestoreInMultiProfile(self):
"""Test session restore preference for Multi-profile windows."""
self.assertTrue(self.GetMultiProfileInfo()['enabled'],
msg='Multi-profile is not enabled')
# Launch browser with new Profile 1, set pref to restore session on
# startup.
self.OpenNewBrowserWindowWithNewProfile()
self._SetSessionRestoreURLs(self._RESTORE_LASTOPEN_URL_VALUE, windex=1)
# Launch browser with new Profile 2, do not set session restore pref.
self.OpenNewBrowserWindowWithNewProfile()
self._SetSessionRestoreURLs(self._RESTORE_DEFAULT_URL_VALUE, windex=2)
# Exit Profile 1 / Profile 2
self.CloseBrowserWindow(2)
self.CloseBrowserWindow(1)
# Relaunch Browser with Profile 1, verify session restores on startup.
url_list = ['http://www.google.com/', 'http://news.google.com/']
self._AssertSessionRestore(url_list, self._RESTORE_LASTOPEN_URL_VALUE,
num_tabs=2, profile='Profile 1')
# Relaunch Browser with Profile 2, verify session does not get restored.
url_list = ['chrome://newtab/']
self._AssertSessionRestore(url_list, self._RESTORE_DEFAULT_URL_VALUE,
num_tabs=1, profile='Profile 2')
def testMakeSearchEngineDefaultInMultiprofile(self):
"""Test adding and making a search engine default for Multi-profiles."""
self.assertTrue(self.GetMultiProfileInfo()['enabled'],
msg='Multi-profile is not enabled')
# Launch browser with new Profile 1, add search engine to 'Hulu'.
self.OpenNewBrowserWindowWithNewProfile()
self._AddSearchEngine('Hulu', 'hulu.com',
'http://www.hulu.com/search?query=%s&ref=os&src={referrer:source?}', 1)
self.MakeSearchEngineDefault('hulu.com', windex=1)
# Launch browser with new Profile 2, add search engine to 'Youtube'.
self.OpenNewBrowserWindowWithNewProfile()
self._AddSearchEngine('YouTube Video Search', 'youtube.com',
'http://www.youtube.com/results?search_query=%s&page={startPage?}'+
'&utm_source=opensearch', 2)
self.MakeSearchEngineDefault('youtube.com', windex=2)
# Exit Profile 1 / Profile 2
self.CloseBrowserWindow(2)
self.CloseBrowserWindow(1)
# Relaunch Browser with Profile 1, verify default search engine as 'Hulu'.
self._AssertDefaultSearchEngine('hulu.com', profile='Profile 1')
# Relaunch Browser with Profile 2, verify default search engine as
# 'Youtube'.
self._AssertDefaultSearchEngine('youtube.com', profile='Profile 2')
def testDeleteSearchEngineInMultiprofile(self):
"""Test adding then deleting a search engine for Multi-profiles."""
self.assertTrue(self.GetMultiProfileInfo()['enabled'],
msg='Multi-profile is not enabled')
# Launch browser with new Profile 1, add 'foo.com' as new search engine.
self.OpenNewBrowserWindowWithNewProfile()
self._AddSearchEngine('foo', 'foo.com', 'http://foo/?q=%s', windex=1)
# Launch browser with new Profile 2, add 'foo.com' as new search engine.
self.OpenNewBrowserWindowWithNewProfile()
self._AddSearchEngine('foo', 'foo.com', 'http://foo/?q=%s', windex=2)
# Delete search engine 'foo.com' from Profile 1 and exit.
self.DeleteSearchEngine('foo.com', windex=1)
self.CloseBrowserWindow(2)
self.CloseBrowserWindow(1)
# Relaunch Browser with Profile 1, verify search engine 'foo.com'
# is deleted.
self.AppendBrowserLaunchSwitch('--profile-directory=Profile 1')
self.RestartBrowser(clear_profile=False)
foo = self._GetSearchEngineWithKeyword('foo.com')
self.assertFalse(foo)
# Relaunch Browser with Profile 2, verify search engine 'foo.com'
# is not deleted.
self.AppendBrowserLaunchSwitch('--profile-directory=Profile 2')
self.RestartBrowser(clear_profile=False)
foo = self._GetSearchEngineWithKeyword('foo.com')
self.assertTrue(foo)
if __name__ == '__main__':
pyauto_functional.Main()
| bsd-3-clause | -64,203,652,193,483,420 | 41.219745 | 79 | 0.686204 | false |
wolfskaempf/ga_statistics | lib/python2.7/site-packages/crispy_forms/layout.py | 10 | 15081 | import warnings
from django.conf import settings
from django.template import Context, Template
from django.template.loader import render_to_string
from django.utils.html import conditional_escape
from crispy_forms.compatibility import string_types, text_type
from crispy_forms.utils import render_field, flatatt
TEMPLATE_PACK = getattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap')
class LayoutObject(object):
def __getitem__(self, slice):
return self.fields[slice]
def __setitem__(self, slice, value):
self.fields[slice] = value
def __delitem__(self, slice):
del self.fields[slice]
def __len__(self):
return len(self.fields)
def __getattr__(self, name):
"""
This allows us to access self.fields list methods like append or insert, without
having to declaee them one by one
"""
# Check necessary for unpickling, see #107
if 'fields' in self.__dict__ and hasattr(self.fields, name):
return getattr(self.fields, name)
else:
return object.__getattribute__(self, name)
def get_field_names(self, index=None):
"""
Returns a list of lists, those lists are named pointers. First parameter
is the location of the field, second one the name of the field. Example::
[
[[0,1,2], 'field_name1'],
[[0,3], 'field_name2']
]
"""
return self.get_layout_objects(string_types, greedy=True)
def get_layout_objects(self, *LayoutClasses, **kwargs):
"""
Returns a list of lists pointing to layout objects of any type matching
`LayoutClasses`::
[
[[0,1,2], 'div'],
[[0,3], 'field_name']
]
:param max_level: An integer that indicates max level depth to reach when
traversing a layout.
:param greedy: Boolean that indicates whether to be greedy. If set, max_level
is skipped.
"""
index = kwargs.pop('index', None)
max_level = kwargs.pop('max_level', 0)
greedy = kwargs.pop('greedy', False)
pointers = []
if index is not None and not isinstance(index, list):
index = [index]
elif index is None:
index = []
for i, layout_object in enumerate(self.fields):
if isinstance(layout_object, LayoutClasses):
if len(LayoutClasses) == 1 and LayoutClasses[0] == string_types:
pointers.append([index + [i], layout_object])
else:
pointers.append([index + [i], layout_object.__class__.__name__.lower()])
# If it's a layout object and we haven't reached the max depth limit or greedy
# we recursive call
if hasattr(layout_object, 'get_field_names') and (len(index) < max_level or greedy):
new_kwargs = {'index': index + [i], 'max_level': max_level, 'greedy': greedy}
pointers = pointers + layout_object.get_layout_objects(*LayoutClasses, **new_kwargs)
return pointers
class Layout(LayoutObject):
"""
Form Layout. It is conformed by Layout objects: `Fieldset`, `Row`, `Column`, `MultiField`,
`HTML`, `ButtonHolder`, `Button`, `Hidden`, `Reset`, `Submit` and fields. Form fields
have to be strings.
Layout objects `Fieldset`, `Row`, `Column`, `MultiField` and `ButtonHolder` can hold other
Layout objects within. Though `ButtonHolder` should only hold `HTML` and BaseInput
inherited classes: `Button`, `Hidden`, `Reset` and `Submit`.
You need to add your `Layout` to the `FormHelper` using its method `add_layout`.
Example::
layout = Layout(
Fieldset('Company data',
'is_company'
),
Fieldset(_('Contact details'),
'email',
Row('password1', 'password2'),
'first_name',
'last_name',
HTML('<img src="/media/somepicture.jpg"/>'),
'company'
),
ButtonHolder(
Submit('Save', 'Save', css_class='button white'),
),
)
helper.add_layout(layout)
"""
def __init__(self, *fields):
self.fields = list(fields)
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK):
html = ""
for field in self.fields:
html += render_field(
field,
form,
form_style,
context,
template_pack=template_pack
)
return html
class ButtonHolder(LayoutObject):
"""
Layout object. It wraps fields in a <div class="buttonHolder">
This is where you should put Layout objects that render to form buttons like Submit.
It should only hold `HTML` and `BaseInput` inherited objects.
Example::
ButtonHolder(
HTML(<span style="display: hidden;">Information Saved</span>),
Submit('Save', 'Save')
)
"""
template = "uni_form/layout/buttonholder.html"
def __init__(self, *fields, **kwargs):
self.fields = list(fields)
self.css_class = kwargs.get('css_class', None)
self.css_id = kwargs.get('css_id', None)
self.template = kwargs.get('template', self.template)
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK):
html = u''
for field in self.fields:
html += render_field(field, form, form_style,
context, template_pack=template_pack)
return render_to_string(self.template, Context({'buttonholder': self, 'fields_output': html}))
class BaseInput(object):
"""
A base class to reduce the amount of code in the Input classes.
"""
template = "%s/layout/baseinput.html" % TEMPLATE_PACK
def __init__(self, name, value, **kwargs):
self.name = name
self.value = value
self.id = kwargs.pop('css_id', '')
self.attrs = {}
if 'css_class' in kwargs:
self.field_classes += ' %s' % kwargs.pop('css_class')
self.template = kwargs.pop('template', self.template)
self.flat_attrs = flatatt(kwargs)
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK):
"""
Renders an `<input />` if container is used as a Layout object.
Input button value can be a variable in context.
"""
self.value = Template(text_type(self.value)).render(context)
return render_to_string(self.template, Context({'input': self}))
class Submit(BaseInput):
"""
Used to create a Submit button descriptor for the {% crispy %} template tag::
submit = Submit('Search the Site', 'search this site')
.. note:: The first argument is also slugified and turned into the id for the submit button.
"""
input_type = 'submit'
field_classes = 'submit submitButton' if TEMPLATE_PACK == 'uni_form' else 'btn btn-primary'
class Button(BaseInput):
"""
Used to create a Submit input descriptor for the {% crispy %} template tag::
button = Button('Button 1', 'Press Me!')
.. note:: The first argument is also slugified and turned into the id for the button.
"""
input_type = 'button'
field_classes = 'button' if TEMPLATE_PACK == 'uni_form' else 'btn'
class Hidden(BaseInput):
"""
Used to create a Hidden input descriptor for the {% crispy %} template tag.
"""
input_type = 'hidden'
field_classes = 'hidden'
class Reset(BaseInput):
"""
Used to create a Reset button input descriptor for the {% crispy %} template tag::
reset = Reset('Reset This Form', 'Revert Me!')
.. note:: The first argument is also slugified and turned into the id for the reset.
"""
input_type = 'reset'
field_classes = 'reset resetButton' if TEMPLATE_PACK == 'uni_form' else 'btn btn-inverse'
class Fieldset(LayoutObject):
"""
Layout object. It wraps fields in a <fieldset>
Example::
Fieldset("Text for the legend",
'form_field_1',
'form_field_2'
)
The first parameter is the text for the fieldset legend. This text is context aware,
so you can do things like::
Fieldset("Data for {{ user.username }}",
'form_field_1',
'form_field_2'
)
"""
template = "uni_form/layout/fieldset.html"
def __init__(self, legend, *fields, **kwargs):
self.fields = list(fields)
self.legend = legend
self.css_class = kwargs.pop('css_class', '')
self.css_id = kwargs.pop('css_id', None)
# Overrides class variable with an instance level variable
self.template = kwargs.pop('template', self.template)
self.flat_attrs = flatatt(kwargs)
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK):
fields = ''
for field in self.fields:
fields += render_field(field, form, form_style, context,
template_pack=template_pack)
legend = ''
if self.legend:
legend = u'%s' % Template(text_type(self.legend)).render(context)
return render_to_string(self.template, Context({'fieldset': self, 'legend': legend, 'fields': fields, 'form_style': form_style}))
class MultiField(LayoutObject):
""" MultiField container. Renders to a MultiField <div> """
template = "uni_form/layout/multifield.html"
field_template = "uni_form/multifield.html"
def __init__(self, label, *fields, **kwargs):
self.fields = list(fields)
self.label_html = label
self.label_class = kwargs.pop('label_class', u'blockLabel')
self.css_class = kwargs.pop('css_class', u'ctrlHolder')
self.css_id = kwargs.pop('css_id', None)
self.template = kwargs.pop('template', self.template)
self.field_template = kwargs.pop('field_template', self.field_template)
self.flat_attrs = flatatt(kwargs)
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK):
# If a field within MultiField contains errors
if context['form_show_errors']:
for field in map(lambda pointer: pointer[1], self.get_field_names()):
if field in form.errors:
self.css_class += " error"
fields_output = u''
for field in self.fields:
fields_output += render_field(
field, form, form_style, context,
self.field_template, self.label_class, layout_object=self,
template_pack=template_pack
)
context.update({'multifield': self, 'fields_output': fields_output})
return render_to_string(self.template, context)
class Div(LayoutObject):
"""
Layout object. It wraps fields in a <div>
You can set `css_id` for a DOM id and `css_class` for a DOM class. Example::
Div('form_field_1', 'form_field_2', css_id='div-example', css_class='divs')
"""
template = "uni_form/layout/div.html"
def __init__(self, *fields, **kwargs):
self.fields = list(fields)
if hasattr(self, 'css_class') and 'css_class' in kwargs:
self.css_class += ' %s' % kwargs.pop('css_class')
if not hasattr(self, 'css_class'):
self.css_class = kwargs.pop('css_class', None)
self.css_id = kwargs.pop('css_id', '')
self.template = kwargs.pop('template', self.template)
self.flat_attrs = flatatt(kwargs)
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK):
fields = ''
for field in self.fields:
fields += render_field(field, form, form_style, context, template_pack=template_pack)
return render_to_string(self.template, Context({'div': self, 'fields': fields}))
class Row(Div):
"""
Layout object. It wraps fields in a div whose default class is "formRow". Example::
Row('form_field_1', 'form_field_2', 'form_field_3')
"""
css_class = 'formRow' if TEMPLATE_PACK == 'uni_form' else 'row'
class Column(Div):
"""
Layout object. It wraps fields in a div whose default class is "formColumn". Example::
Column('form_field_1', 'form_field_2')
"""
css_class = 'formColumn'
class HTML(object):
"""
Layout object. It can contain pure HTML and it has access to the whole
context of the page where the form is being rendered.
Examples::
HTML("{% if saved %}Data saved{% endif %}")
HTML('<input type="hidden" name="{{ step_field }}" value="{{ step0 }}" />')
"""
def __init__(self, html):
self.html = html
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK):
return Template(text_type(self.html)).render(context)
class Field(LayoutObject):
"""
Layout object, It contains one field name, and you can add attributes to it easily.
For setting class attributes, you need to use `css_class`, as `class` is a Python keyword.
Example::
Field('field_name', style="color: #333;", css_class="whatever", id="field_name")
"""
template = "%s/field.html" % TEMPLATE_PACK
def __init__(self, *args, **kwargs):
self.fields = list(args)
if not hasattr(self, 'attrs'):
self.attrs = {}
if 'css_class' in kwargs:
if 'class' in self.attrs:
self.attrs['class'] += " %s" % kwargs.pop('css_class')
else:
self.attrs['class'] = kwargs.pop('css_class')
self.wrapper_class = kwargs.pop('wrapper_class', None)
self.template = kwargs.pop('template', self.template)
# We use kwargs as HTML attributes, turning data_id='test' into data-id='test'
self.attrs.update(dict([(k.replace('_', '-'), conditional_escape(v)) for k, v in kwargs.items()]))
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK):
if hasattr(self, 'wrapper_class'):
context['wrapper_class'] = self.wrapper_class
html = ''
for field in self.fields:
html += render_field(field, form, form_style, context, template=self.template, attrs=self.attrs, template_pack=template_pack)
return html
class MultiWidgetField(Field):
"""
Layout object. For fields with :class:`~django.forms.MultiWidget` as `widget`, you can pass
additional attributes to each widget.
Example::
MultiWidgetField(
'multiwidget_field_name',
attrs=(
{'style': 'width: 30px;'},
{'class': 'second_widget_class'}
),
)
.. note:: To override widget's css class use ``class`` not ``css_class``.
"""
def __init__(self, *args, **kwargs):
self.fields = list(args)
self.attrs = kwargs.pop('attrs', {})
self.template = kwargs.pop('template', self.template)
| mit | -5,088,395,528,216,756,000 | 33.042889 | 137 | 0.591804 | false |
Lamecarlate/gourmet | gourmet/gglobals.py | 6 | 6335 | import os, os.path, gobject, re, gtk
import tempfile
from gdebug import debug
from OptionParser import args
from util import windows
tmpdir = tempfile.gettempdir()
if args.gourmetdir:
gourmetdir = args.gourmetdir
debug("User specified gourmetdir %s"%gourmetdir,0)
else:
if os.name =='nt':
# Under Windows, we cannot unfortunately just use os.environ, see
# http://stackoverflow.com/questions/2608200/problems-with-umlauts-in-python-appdata-environvent-variable
# We might drop this workaround with Python 3 (all strings are unicode)
# and/or GTK+ 3 (use Glib.get_home_dir()).
APPDATA = windows.getenv(u'APPDATA').decode('utf-8')
gourmetdir = os.path.join(APPDATA,'gourmet')
else:
gourmetdir = os.path.join(os.path.expanduser('~'),'.gourmet')
try:
if not os.path.exists(gourmetdir):
debug('Creating %s'%gourmetdir,0)
os.makedirs(gourmetdir)
except OSError:
try:
debug("Unable to create standard config directory in home directory. Looking for .gourmet in working directory instead.",0)
gourmetdir = '.gourmet'
if not os.path.exists(gourmetdir):
debug("Creating .gourmet in working directory",0)
os.makedirs(gourmetdir)
except OSError:
print "Unable to create gourmet directory."
raise
import sys
sys.exit()
if not os.access(gourmetdir,os.W_OK):
debug('Cannot write to configuration directory, %s'%gourmetdir,-1)
import sys
sys.exit()
debug('gourmetdir=%s'%gourmetdir,2)
use_threads = args.threads
# Uncomment the below to test FauxThreads
#use_threads = False
# note: this stuff must be kept in sync with changes in setup.py
import settings
uibase = os.path.join(settings.ui_base)
lib_dir = os.path.join(settings.lib_dir,'gourmet')
# To have strings from .ui files (gtk.Builder) translated on all platforms,
# we need the following module to enable localization on all platforms.
try:
import elib.intl
elib.intl.install('gourmet', settings.locale_base)
except ImportError:
print 'elib.intl failed to load.'
print 'IF YOU HAVE TROUBLE WITH TRANSLATIONS, MAKE SURE YOU HAVE THIS LIBRARY INSTALLED.'
from gettext import gettext as _
data_dir = settings.data_dir
imagedir = os.path.join(settings.data_dir,'images')
style_dir = os.path.join(settings.data_dir,'style')
icondir = os.path.join(settings.icon_base,"48x48","apps")
doc_base = settings.doc_base
plugin_base = settings.plugin_base
# GRAB PLUGIN DIR FOR HTML IMPORT
if args.html_plugin_dir:
html_plugin_dir = args.html_plugin_dir
else:
html_plugin_dir = os.path.join(gourmetdir,'html_plugins')
if not os.path.exists(html_plugin_dir):
os.makedirs(html_plugin_dir)
template_file = os.path.join(settings.data_dir,'RULES_TEMPLATE')
if os.path.exists(template_file):
import shutil
shutil.copy(template_file,
os.path.join(html_plugin_dir,'RULES_TEMPLATE')
)
REC_ATTRS = [('title',_('Title'),'Entry'),
('category',_('Category'),'Combo'),
('cuisine',_('Cuisine'),'Combo'),
('rating',_('Rating'),'Entry'),
('source',_('Source'),'Combo'),
('link',_('Website'),'Entry'),
('yields',_('Yield'),'Entry'),
('yield_unit',_('Yield Unit'),'Combo'),
('preptime',_('Preparation Time'),'Entry'),
('cooktime',_('Cooking Time'),'Entry'),
]
INT_REC_ATTRS = ['rating','preptime','cooktime']
FLOAT_REC_ATTRS = ['yields']
TEXT_ATTR_DIC = {'instructions':_('Instructions'),
'modifications':_('Notes'),
}
REC_ATTR_DIC={}
NAME_TO_ATTR = {_('Instructions'):'instructions',
_('Notes'):'modifications',
_('Modifications'):'modifications',
}
DEFAULT_ATTR_ORDER = ['title',
#'servings',
'yields',
'cooktime',
'preptime',
'category',
'cuisine',
'rating',
'source',
'link',
]
DEFAULT_TEXT_ATTR_ORDER = ['instructions',
'modifications',]
def build_rec_attr_dic ():
for attr, name, widget in REC_ATTRS:
REC_ATTR_DIC[attr]=name
NAME_TO_ATTR[name]=attr
build_rec_attr_dic()
DEFAULT_HIDDEN_COLUMNS = [REC_ATTR_DIC[attr] for attr in
['link','yields','yield_unit','preptime','cooktime']
]
from gtk_extras import dialog_extras
def launch_url (url, ext=""):
if os.name == 'nt':
os.startfile(url)
elif os.name == 'posix':
try:
gtk.show_uri(gtk.gdk.Screen(),url,0L)
except gobject.GError, err:
#print dir(err)
label = _('Unable to open URL')
for reg, msg in [('mailto:',_('Unable to launch mail reader.')),
('http:',_('Unable to open website.')),
('file:',_('Unable to open file.'))]:
if re.match(reg,url.lower()): label = msg
dialog_extras.show_message(
label=label,
sublabel=err.message,
expander=[_('_Details'),
_("There was an error launching the url: %s"%url)]
)
# Set up custom STOCK items and ICONS!
icon_factory = gtk.IconFactory()
def add_icon (file_name, stock_id, label=None, modifier=0, keyval=0):
pb = gtk.gdk.pixbuf_new_from_file(file_name)
iconset = gtk.IconSet(pb)
icon_factory.add(stock_id,iconset)
icon_factory.add_default()
gtk.stock_add([(stock_id,
label,
modifier,
keyval,
"")])
for filename,stock_id,label,modifier,keyval in [
('AddToShoppingList.png','add-to-shopping-list',_('Add to _Shopping List'),gtk.gdk.CONTROL_MASK,gtk.gdk.keyval_from_name('l')),
('reccard.png','recipe-card',None,0,0),
('reccard_edit.png','edit-recipe-card',None,0,0),
]:
add_icon(os.path.join(imagedir,filename),stock_id,label,modifier,keyval)
| gpl-2.0 | 4,074,403,741,678,433,000 | 34.391061 | 131 | 0.576638 | false |
aosagie/spark | examples/src/main/python/ml/onehot_encoder_example.py | 31 | 1638 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on$
from pyspark.ml.feature import OneHotEncoder
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName("OneHotEncoderExample")\
.getOrCreate()
# Note: categorical features are usually first encoded with StringIndexer
# $example on$
df = spark.createDataFrame([
(0.0, 1.0),
(1.0, 0.0),
(2.0, 1.0),
(0.0, 2.0),
(0.0, 1.0),
(2.0, 0.0)
], ["categoryIndex1", "categoryIndex2"])
encoder = OneHotEncoder(inputCols=["categoryIndex1", "categoryIndex2"],
outputCols=["categoryVec1", "categoryVec2"])
model = encoder.fit(df)
encoded = model.transform(df)
encoded.show()
# $example off$
spark.stop()
| apache-2.0 | 7,084,640,044,498,938,000 | 32.428571 | 77 | 0.673993 | false |
simbits/Lumiere | bulb/bulb.py | 1 | 5877 | #!/usr/bin/env python
from pyomxplayer import OMXPlayer
import RPi.GPIO as GPIO
import pprint
import random
import socket
import struct
import sys
import time
import traceback
DRAWERS = 9
MCAST_GRP = '224.19.79.1'
MCAST_PORT = 9999
MOVIE_PATH = '/usr/share/lumiere/media'
MOVIE_SUFFIX = 'mp4'
MOVIE_LIST = [ '%s/%d.%s' % (MOVIE_PATH, n, MOVIE_SUFFIX) for n in range(1, 10) ]
PROJECTOR_SUPPLY_PIN = 26 #BOARD P1 pin number corresponds with GPIO7 on Rev2 RPi
PROJECTOR_ON = True
PROJECTOR_OFF = False
STATE_OPEN = 'o'
STATE_CLOSED = 'c'
_now_playing = -1
_omxplayer = None
def stop_movie():
global _omxplayer
global _now_playing
if _omxplayer != None:
print 'Stopping movie %d:%s' % (_now_playing+1, MOVIE_LIST[_now_playing])
if _omxplayer.isAlive():
_omxplayer.stop()
while _omxplayer.isAlive():
print '- Waiting for player to stop'
time.sleep(0.1)
_omxplayer.close()
_omxplayer = None
_now_playing =-1
def start_movie(index):
global _omxplayer
global _now_playing
if index >= len(MOVIE_LIST):
return -1
stop_movie()
print 'Starting movie %d:%s' % (index+1, MOVIE_LIST[index])
_omxplayer = OMXPlayer(MOVIE_LIST[index], args='-b', start_playback=True)
_now_playing = index
GPIO.output(PROJECTOR_SUPPLY_PIN, PROJECTOR_ON)
return index
def start_random_movie_from_list(l=[]):
try:
return start_movie(random.choice(l))
except IndexError:
pass
return -1
def is_movie_playing():
global _omxplayer
if _omxplayer != None:
return _omxplayer.isAlive()
return False
def current_movie_playing():
global _now_playing
return _now_playing
def main():
previous_state = [True] * DRAWERS
playlist = set()
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
try:
host = '0.0.0.0'
timeval=struct.pack("2I", 0, 500000) # timeout 0.5s
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, timeval)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 32)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
sock.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(host))
sock.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(MCAST_GRP) + socket.inet_aton(host))
sock.bind((MCAST_GRP, MCAST_PORT))
except AttributeError:
pass
random.seed()
while 1:
drawer = -1
data = ''
try:
data, addr = sock.recvfrom(512)
#print '[%s] | received from %s: %s' % (time.ctime(), addr, data)
except socket.error, e:
if e.errno == 11:
if not is_movie_playing():
start_random_movie_from_list(list(playlist))
else:
print 'Expection: %s' % str(e)
continue
try:
if len(data) != 19:
print 'expected 19 bytes got %d' % len(data)
continue
cmd,args = data.split(':')
except ValueError, e:
print 'wrong data format: %s (%s)' % (data, str(e))
continue
if cmd == 's':
new_state = [bool(int(i)) for i in args.split(',')]
opened = {i for i in range(0, DRAWERS)
if new_state[i] != previous_state[i] and
not new_state[i]}
closed = {i for i in range(0, DRAWERS)
if new_state[i] != previous_state[i] and
new_state[i]}
start_random = False
start_new = False
if len(opened) > 0:
print 'New opened: %s' % (str(opened))
if len(closed) > 0:
print 'New closed: %s' % (str(closed))
try:
for i in closed:
if i in playlist:
playlist.remove(i)
if i == current_movie_playing():
stop_movie()
start_random = True
if len(playlist) == 0:
GPIO.output(PROJECTOR_SUPPLY_PIN, PROJECTOR_OFF)
if len(closed) > 0:
print 'playlist after closing: %s' % (list(playlist))
except IndexError:
pass
try:
for i in opened:
if i not in playlist:
playlist.add(i)
start_new = True
if len(opened) > 0:
print 'playlist after opening: %s' % (list(playlist))
except IndexError:
pass
try:
if start_new:
print 'starting new movie from opened list'
start_random_movie_from_list(list(opened))
elif start_random:
print 'starting random movie'
start_random_movie_from_list(list(playlist))
elif not is_movie_playing():
start_movie(random.choice(list(playlist)))
except IndexError:
pass
previous_state = list(new_state)
if __name__ == '__main__':
try:
GPIO.setmode(GPIO.BOARD)
GPIO.setup(PROJECTOR_SUPPLY_PIN, GPIO.OUT)
GPIO.output(PROJECTOR_SUPPLY_PIN, PROJECTOR_OFF)
main()
except KeyboardInterrupt:
print 'Exiting'
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2, file=sys.stdout)
finally:
print 'Cleaning up GPIO settings'
stop_movie()
GPIO.output(PROJECTOR_SUPPLY_PIN, PROJECTOR_OFF)
#GPIO.cleanup()
| mit | 708,628,124,309,549,600 | 27.950739 | 95 | 0.546708 | false |
medspx/QGIS | python/plugins/processing/gui/PointSelectionPanel.py | 8 | 3303 | # -*- coding: utf-8 -*-
"""
***************************************************************************
PointSelectionPanel.py
---------------------
Date : February 2016
Copyright : (C) 2016 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.core import (QgsProject,
QgsReferencedPointXY,
QgsPointXY)
from qgis.PyQt import uic
from qgis.utils import iface
from processing.gui.PointMapTool import PointMapTool
pluginPath = os.path.split(os.path.dirname(__file__))[0]
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'widgetBaseSelector.ui'))
class PointSelectionPanel(BASE, WIDGET):
def __init__(self, dialog, default=None):
super(PointSelectionPanel, self).__init__(None)
self.setupUi(self)
self.btnSelect.clicked.connect(self.selectOnCanvas)
self.dialog = dialog
self.crs = QgsProject.instance().crs()
if iface is not None:
canvas = iface.mapCanvas()
self.prevMapTool = canvas.mapTool()
self.tool = PointMapTool(canvas)
self.tool.canvasClicked.connect(self.updatePoint)
self.tool.complete.connect(self.pointPicked)
else:
self.prevMapTool = None
self.tool = None
if default:
tokens = str(default).split(',')
if len(tokens) == 2:
try:
float(tokens[0])
float(tokens[1])
self.leText.setText(str(default))
except:
pass
def selectOnCanvas(self):
canvas = iface.mapCanvas()
canvas.setMapTool(self.tool)
self.dialog.showMinimized()
def updatePoint(self, point, button):
s = '{},{}'.format(point.x(), point.y())
self.crs = QgsProject.instance().crs()
if self.crs.isValid():
s += ' [' + self.crs.authid() + ']'
self.leText.setText(s)
def pointPicked(self):
canvas = iface.mapCanvas()
canvas.setMapTool(self.prevMapTool)
self.dialog.showNormal()
self.dialog.raise_()
self.dialog.activateWindow()
def getValue(self):
if str(self.leText.text()).strip() != '':
return str(self.leText.text())
else:
return None
def setPointFromString(self, s):
self.leText.setText(s)
| gpl-2.0 | -8,217,441,572,238,531,000 | 31.382353 | 75 | 0.50439 | false |
timokoola/finnegan | fwtweeter.py | 1 | 2453 | #!/usr/bin/python
import tweepy, sys, os
from collections import Counter
import re
import argparse # requires 2.7
import time
class TweepyHelper:
def __init__(self,keyfile):
f = open(keyfile)
lines = f.readlines()
f.close()
consumerkey = lines[0].split("#")[0]
consumersecret = lines[1].split("#")[0]
accesstoken = lines[2].split("#")[0]
accesssec = lines[3].split("#")[0]
auth = tweepy.OAuthHandler(consumerkey, consumersecret)
auth.set_access_token(accesstoken, accesssec)
self.api = tweepy.API(auth)
def handle_command_line():
parser = argparse.ArgumentParser(description="Tweets a text format book, line by line.")
parser.add_argument("-t", "--test", help="Run a test run, get nth tweet",
type=int,default=-1 )
parser.add_argument("-k", "--keyfile", help="Twitter account consumer and accesstokens")
parser.add_argument("-b", "--bookfile", help="Book to be read")
parser.add_argument("-l", "--logfile", help="File contains ino about Line we are on.", default="tweetedids.txt")
args = parser.parse_args()
return args
def get_tweeted_file(args):
try:
f = open(args.logfile,"r+")
except:
f = open(args.logfile,"w+")
return (f, f.readlines())
def log_tweeted(tid, args):
f, ignred = get_tweeted_file(args)
f.write(str(time.time()))
f.write("\t")
f.write(tid)
f.write("\n")
f.close()
def read_tweeted(args):
f, lines = get_tweeted_file(args)
if len(lines) < 1:
return 0
f.close()
result = int(lines[-1].split()[1])
return result
if __name__ == "__main__":
args = handle_command_line()
api = (TweepyHelper(args.keyfile)).api
f = open(args.bookfile)
lines = f.readlines()
f.close()
if args.test < 0:
tid = read_tweeted(args)
line = ""
while len(line) <= 1:
tid = tid + 1
print "Tweeted line %d." % tid
line = lines[tid]
line = re.sub(r"^\d+\s+","",line)
line = re.sub(r"\s*\d+$","",line)
line = re.sub(r"^\s+$","",line)
try:
api.update_status(line)
except tweepy.TweepError:
print "skipping"
#api.update_status("%s, (%d)" % (line, tid))
log_tweeted("%d" % tid,args)
else:
print "%d/%d: %s" % (args.test, len(bw.tweets), bw.tweets[args.test])
| apache-2.0 | 4,428,032,999,088,167,000 | 26.875 | 116 | 0.563392 | false |
ianatpn/nupictest | examples/opf/experiments/anomaly/temporal/saw_200/description.py | 3 | 14514 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'~/nta/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'days': 0,
'fields': [],
'hours': 0,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalAnomaly',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': { 'f': { 'clipInput': True,
'fieldname': u'f',
'maxval': 200,
'minval': 0,
'n': 513,
'name': u'f',
'type': 'ScalarEncoder',
'w': 21}},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActivePerInhArea': 40,
'seed': 1956,
# coincInputPoolPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose coincInputPoolPct * (2*coincInputRadius+1)^2
'coincInputPoolPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nta/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 20,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
# Classifier implementation selection.
'implementation': 'cpp',
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1',
},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
control = {
# The environment that the current model is being run in
"environment": 'nupic',
# Input stream specification per py/nupic/cluster/database/StreamDef.json.
#
'dataset' : { u'info': u'Artificial Data',
u'streams': [ { u'columns': [u'*'],
u'info': u'blah',
u'source': u'file://'+os.path.join(os.path.dirname(__file__), 'data.csv'),
}
],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
'iterationCount' : -1,
# A dictionary containing all the supplementary parameters for inference
"inferenceArgs":{u'predictedField': u'f', 'predictionSteps': [1]},
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field=u'f', metric='aae', inferenceElement='prediction', params={'window': 100}),
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
'loggedMetrics': ['.*nupicScore.*'],
}
################################################################################
################################################################################
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
| gpl-3.0 | -4,111,900,825,612,882,400 | 37.498674 | 102 | 0.605278 | false |
poguez/datacats | datacats/cli/purge.py | 8 | 2153 | # Copyright 2014-2015 Boxkite Inc.
# This file is part of the DataCats package and is released under
# the terms of the GNU Affero General Public License version 3.0.
# See LICENSE.txt or http://www.fsf.org/licensing/licenses/agpl-3.0.html
from shutil import rmtree
from datacats.environment import Environment, DatacatsError
from datacats.cli.util import y_or_n_prompt
from datacats.error import DatacatsError
from datacats.task import get_format_version
def purge(opts):
"""Purge environment database and uploaded files
Usage:
datacats purge [-s NAME | --delete-environment] [-y] [ENVIRONMENT]
Options:
--delete-environment Delete environment directory as well as its data, as
well as the data for **all** sites.
-s --site=NAME Specify a site to be purge [default: primary]
-y --yes Respond yes to all prompts (i.e. force)
ENVIRONMENT may be an environment name or a path to an environment directory.
Default: '.'
"""
old = False
try:
environment = Environment.load(opts['ENVIRONMENT'], opts['--site'])
except DatacatsError:
environment = Environment.load(opts['ENVIRONMENT'], opts['--site'], data_only=True)
if get_format_version(environment.datadir) == 1:
old = True
environment = Environment.load(opts['ENVIRONMENT'], opts['--site'], allow_old=True)
# We need a valid site if they don't want to blow away everything.
if not opts['--delete-environment'] and not old:
environment.require_valid_site()
sites = [opts['--site']] if not opts['--delete-environment'] else environment.sites
if not opts['--yes']:
y_or_n_prompt('datacats purge will delete all stored data')
environment.stop_ckan()
environment.stop_supporting_containers()
environment.purge_data(sites)
if opts['--delete-environment']:
if environment.target:
rmtree(environment.target)
else:
DatacatsError(("Unable to find the environment source"
" directory so that it can be deleted.\n"
"Chances are it's because it already does not exist"))
| agpl-3.0 | -233,034,424,256,929,820 | 35.491525 | 95 | 0.673479 | false |
klonage/nlt-gcs | Lib/site-packages/scipy/special/utils/makenpz.py | 57 | 2159 | #!/bin/bash
"""
makenpz.py DIRECTORY
Build a npz containing all data files in the directory.
"""
import os
import numpy as np
from optparse import OptionParser
def main():
p = OptionParser()
options, args = p.parse_args()
if len(args) != 1:
p.error("no valid directory given")
inp = args[0]
outp = inp + ".npz"
files = []
for dirpath, dirnames, filenames in os.walk(inp):
for fn in filenames:
if fn.endswith('.txt'):
files.append(
(dirpath[len(inp)+1:] + '/' + fn[:-4],
os.path.join(dirpath, fn)))
data = {}
for key, fn in files:
key = key.replace('/', '-')
try:
data[key] = np.loadtxt(fn)
except ValueError:
print "Failed to load", fn
savez_compress(outp, **data)
def savez_compress(file, *args, **kwds):
# Import is postponed to here since zipfile depends on gzip, an optional
# component of the so-called standard library.
import zipfile
# Import deferred for startup time improvement
import tempfile
if isinstance(file, basestring):
if not file.endswith('.npz'):
file = file + '.npz'
namedict = kwds
for i, val in enumerate(args):
key = 'arr_%d' % i
if key in namedict.keys():
raise ValueError("Cannot use un-named variables and keyword %s" % key)
namedict[key] = val
zip = zipfile.ZipFile(file, mode="w", compression=zipfile.ZIP_DEFLATED)
# Stage arrays in a temporary file on disk, before writing to zip.
fd, tmpfile = tempfile.mkstemp(suffix='-numpy.npy')
os.close(fd)
try:
for key, val in namedict.iteritems():
fname = key + '.npy'
fid = open(tmpfile, 'wb')
try:
np.lib.format.write_array(fid, np.asanyarray(val))
fid.close()
fid = None
zip.write(tmpfile, arcname=fname)
finally:
if fid:
fid.close()
finally:
os.remove(tmpfile)
zip.close()
if __name__ == "__main__":
main()
| gpl-3.0 | -1,029,903,024,829,332,900 | 25.329268 | 82 | 0.547939 | false |
chengdh/openerp-ktv | openerp/addons/auction/report/buyer_form_report.py | 9 | 2671 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
class buyer_form_report(report_sxw.rml_parse):
count=0
c=0
def __init__(self, cr, uid, name, context):
super(buyer_form_report, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'sum_taxes': self.sum_taxes,
'buyerinfo' : self.buyer_info,
'grand_total' : self.grand_buyer_total,
})
def sum_taxes(self, lot):
amount=0.0
taxes=[]
if lot.author_right:
taxes.append(lot.author_right)
if lot.auction_id:
taxes += lot.auction_id.buyer_costs
tax=self.pool.get('account.tax').compute_all(self.cr, self.uid, taxes, lot.obj_price, 1)
for t in tax:
amount+=t['amount']
return amount
def buyer_info(self):
objects = [object for object in self.localcontext.get('objects')]
ret_dict = {}
for object in objects:
partner = ret_dict.get(object.ach_uid.id,False)
if not partner:
ret_dict[object.ach_uid.id] = {'partner' : object.ach_uid or False, 'lots':[object]}
else:
lots = partner.get('lots')
lots.append(object)
return ret_dict.values()
def grand_buyer_total(self,o):
grand_total = 0
for oo in o:
grand_total =grand_total + oo['obj_price'] +self.sum_taxes(oo)
return grand_total
report_sxw.report_sxw('report.buyer_form_report', 'auction.lots', 'addons/auction/report/buyer_form_report.rml', parser=buyer_form_report)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -4,328,688,751,632,081,000 | 37.710145 | 138 | 0.588544 | false |
simonwydooghe/ansible | lib/ansible/plugins/connection/__init__.py | 26 | 16411 | # (c) 2012-2014, Michael DeHaan <[email protected]>
# (c) 2015 Toshio Kuratomi <[email protected]>
# (c) 2017, Peter Sprygada <[email protected]>
# (c) 2017 Ansible Project
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fcntl
import os
import shlex
from abc import abstractmethod, abstractproperty
from functools import wraps
from ansible import constants as C
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins import AnsiblePlugin
from ansible.utils.display import Display
from ansible.plugins.loader import connection_loader, get_shell_plugin
from ansible.utils.path import unfrackpath
display = Display()
__all__ = ['ConnectionBase', 'ensure_connect']
BUFSIZE = 65536
def ensure_connect(func):
@wraps(func)
def wrapped(self, *args, **kwargs):
if not self._connected:
self._connect()
return func(self, *args, **kwargs)
return wrapped
class ConnectionBase(AnsiblePlugin):
'''
A base class for connections to contain common code.
'''
has_pipelining = False
has_native_async = False # eg, winrm
always_pipeline_modules = False # eg, winrm
has_tty = True # for interacting with become plugins
# When running over this connection type, prefer modules written in a certain language
# as discovered by the specified file extension. An empty string as the
# language means any language.
module_implementation_preferences = ('',)
allow_executable = True
# the following control whether or not the connection supports the
# persistent connection framework or not
supports_persistence = False
force_persistence = False
default_user = None
def __init__(self, play_context, new_stdin, shell=None, *args, **kwargs):
super(ConnectionBase, self).__init__()
# All these hasattrs allow subclasses to override these parameters
if not hasattr(self, '_play_context'):
# Backwards compat: self._play_context isn't really needed, using set_options/get_option
self._play_context = play_context
if not hasattr(self, '_new_stdin'):
self._new_stdin = new_stdin
if not hasattr(self, '_display'):
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
if not hasattr(self, '_connected'):
self._connected = False
self.success_key = None
self.prompt = None
self._connected = False
self._socket_path = None
# helper plugins
self._shell = shell
# we always must have shell
if not self._shell:
shell_type = play_context.shell if play_context.shell else getattr(self, '_shell_type', None)
self._shell = get_shell_plugin(shell_type=shell_type, executable=self._play_context.executable)
self.become = None
def set_become_plugin(self, plugin):
self.become = plugin
@property
def connected(self):
'''Read-only property holding whether the connection to the remote host is active or closed.'''
return self._connected
@property
def socket_path(self):
'''Read-only property holding the connection socket path for this remote host'''
return self._socket_path
@staticmethod
def _split_ssh_args(argstring):
"""
Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a
list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to
the argument list. The list will not contain any empty elements.
"""
try:
# Python 2.6.x shlex doesn't handle unicode type so we have to
# convert args to byte string for that case. More efficient to
# try without conversion first but python2.6 doesn't throw an
# exception, it merely mangles the output:
# >>> shlex.split(u't e')
# ['t\x00\x00\x00', '\x00\x00\x00e\x00\x00\x00']
return [to_text(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()]
except AttributeError:
# In Python3, shlex.split doesn't work on a byte string.
return [to_text(x.strip()) for x in shlex.split(argstring) if x.strip()]
@abstractproperty
def transport(self):
"""String used to identify this Connection class from other classes"""
pass
@abstractmethod
def _connect(self):
"""Connect to the host we've been initialized with"""
@ensure_connect
@abstractmethod
def exec_command(self, cmd, in_data=None, sudoable=True):
"""Run a command on the remote host.
:arg cmd: byte string containing the command
:kwarg in_data: If set, this data is passed to the command's stdin.
This is used to implement pipelining. Currently not all
connection plugins implement pipelining.
:kwarg sudoable: Tell the connection plugin if we're executing
a command via a privilege escalation mechanism. This may affect
how the connection plugin returns data. Note that not all
connections can handle privilege escalation.
:returns: a tuple of (return code, stdout, stderr) The return code is
an int while stdout and stderr are both byte strings.
When a command is executed, it goes through multiple commands to get
there. It looks approximately like this::
[LocalShell] ConnectionCommand [UsersLoginShell (*)] ANSIBLE_SHELL_EXECUTABLE [(BecomeCommand ANSIBLE_SHELL_EXECUTABLE)] Command
:LocalShell: Is optional. It is run locally to invoke the
``Connection Command``. In most instances, the
``ConnectionCommand`` can be invoked directly instead. The ssh
connection plugin which can have values that need expanding
locally specified via ssh_args is the sole known exception to
this. Shell metacharacters in the command itself should be
processed on the remote machine, not on the local machine so no
shell is needed on the local machine. (Example, ``/bin/sh``)
:ConnectionCommand: This is the command that connects us to the remote
machine to run the rest of the command. ``ansible_user``,
``ansible_ssh_host`` and so forth are fed to this piece of the
command to connect to the correct host (Examples ``ssh``,
``chroot``)
:UsersLoginShell: This shell may or may not be created depending on
the ConnectionCommand used by the connection plugin. This is the
shell that the ``ansible_user`` has configured as their login
shell. In traditional UNIX parlance, this is the last field of
a user's ``/etc/passwd`` entry We do not specifically try to run
the ``UsersLoginShell`` when we connect. Instead it is implicit
in the actions that the ``ConnectionCommand`` takes when it
connects to a remote machine. ``ansible_shell_type`` may be set
to inform ansible of differences in how the ``UsersLoginShell``
handles things like quoting if a shell has different semantics
than the Bourne shell.
:ANSIBLE_SHELL_EXECUTABLE: This is the shell set via the inventory var
``ansible_shell_executable`` or via
``constants.DEFAULT_EXECUTABLE`` if the inventory var is not set.
We explicitly invoke this shell so that we have predictable
quoting rules at this point. ``ANSIBLE_SHELL_EXECUTABLE`` is only
settable by the user because some sudo setups may only allow
invoking a specific shell. (For instance, ``/bin/bash`` may be
allowed but ``/bin/sh``, our default, may not). We invoke this
twice, once after the ``ConnectionCommand`` and once after the
``BecomeCommand``. After the ConnectionCommand, this is run by
the ``UsersLoginShell``. After the ``BecomeCommand`` we specify
that the ``ANSIBLE_SHELL_EXECUTABLE`` is being invoked directly.
:BecomeComand ANSIBLE_SHELL_EXECUTABLE: Is the command that performs
privilege escalation. Setting this up is performed by the action
plugin prior to running ``exec_command``. So we just get passed
:param:`cmd` which has the BecomeCommand already added.
(Examples: sudo, su) If we have a BecomeCommand then we will
invoke a ANSIBLE_SHELL_EXECUTABLE shell inside of it so that we
have a consistent view of quoting.
:Command: Is the command we're actually trying to run remotely.
(Examples: mkdir -p $HOME/.ansible, python $HOME/.ansible/tmp-script-file)
"""
pass
@ensure_connect
@abstractmethod
def put_file(self, in_path, out_path):
"""Transfer a file from local to remote"""
pass
@ensure_connect
@abstractmethod
def fetch_file(self, in_path, out_path):
"""Fetch a file from remote to local; callers are expected to have pre-created the directory chain for out_path"""
pass
@abstractmethod
def close(self):
"""Terminate the connection"""
pass
def connection_lock(self):
f = self._play_context.connection_lockfd
display.vvvv('CONNECTION: pid %d waiting for lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)
fcntl.lockf(f, fcntl.LOCK_EX)
display.vvvv('CONNECTION: pid %d acquired lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)
def connection_unlock(self):
f = self._play_context.connection_lockfd
fcntl.lockf(f, fcntl.LOCK_UN)
display.vvvv('CONNECTION: pid %d released lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)
def reset(self):
display.warning("Reset is not implemented for this connection")
# NOTE: these password functions are all become specific, the name is
# confusing as it does not handle 'protocol passwords'
# DEPRECATED:
# These are kept for backwards compatiblity
# Use the methods provided by the become plugins instead
def check_become_success(self, b_output):
display.deprecated(
"Connection.check_become_success is deprecated, calling code should be using become plugins instead",
version="2.12"
)
return self.become.check_success(b_output)
def check_password_prompt(self, b_output):
display.deprecated(
"Connection.check_password_prompt is deprecated, calling code should be using become plugins instead",
version="2.12"
)
return self.become.check_password_prompt(b_output)
def check_incorrect_password(self, b_output):
display.deprecated(
"Connection.check_incorrect_password is deprecated, calling code should be using become plugins instead",
version="2.12"
)
return self.become.check_incorrect_password(b_output)
def check_missing_password(self, b_output):
display.deprecated(
"Connection.check_missing_password is deprecated, calling code should be using become plugins instead",
version="2.12"
)
return self.become.check_missing_password(b_output)
class NetworkConnectionBase(ConnectionBase):
"""
A base class for network-style connections.
"""
force_persistence = True
# Do not use _remote_is_local in other connections
_remote_is_local = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(NetworkConnectionBase, self).__init__(play_context, new_stdin, *args, **kwargs)
self._messages = []
self._conn_closed = False
self._network_os = self._play_context.network_os
self._local = connection_loader.get('local', play_context, '/dev/null')
self._local.set_options()
self._sub_plugin = {}
self._cached_variables = (None, None, None)
# reconstruct the socket_path and set instance values accordingly
self._ansible_playbook_pid = kwargs.get('ansible_playbook_pid')
self._update_connection_state()
def __getattr__(self, name):
try:
return self.__dict__[name]
except KeyError:
if not name.startswith('_'):
plugin = self._sub_plugin.get('obj')
if plugin:
method = getattr(plugin, name, None)
if method is not None:
return method
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name))
def exec_command(self, cmd, in_data=None, sudoable=True):
return self._local.exec_command(cmd, in_data, sudoable)
def queue_message(self, level, message):
"""
Adds a message to the queue of messages waiting to be pushed back to the controller process.
:arg level: A string which can either be the name of a method in display, or 'log'. When
the messages are returned to task_executor, a value of log will correspond to
``display.display(message, log_only=True)``, while another value will call ``display.[level](message)``
"""
self._messages.append((level, message))
def pop_messages(self):
messages, self._messages = self._messages, []
return messages
def put_file(self, in_path, out_path):
"""Transfer a file from local to remote"""
return self._local.put_file(in_path, out_path)
def fetch_file(self, in_path, out_path):
"""Fetch a file from remote to local"""
return self._local.fetch_file(in_path, out_path)
def reset(self):
'''
Reset the connection
'''
if self._socket_path:
self.queue_message('vvvv', 'resetting persistent connection for socket_path %s' % self._socket_path)
self.close()
self.queue_message('vvvv', 'reset call on connection instance')
def close(self):
self._conn_closed = True
if self._connected:
self._connected = False
def set_options(self, task_keys=None, var_options=None, direct=None):
super(NetworkConnectionBase, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
if self.get_option('persistent_log_messages'):
warning = "Persistent connection logging is enabled for %s. This will log ALL interactions" % self._play_context.remote_addr
logpath = getattr(C, 'DEFAULT_LOG_PATH')
if logpath is not None:
warning += " to %s" % logpath
self.queue_message('warning', "%s and WILL NOT redact sensitive configuration like passwords. USE WITH CAUTION!" % warning)
if self._sub_plugin.get('obj') and self._sub_plugin.get('type') != 'external':
try:
self._sub_plugin['obj'].set_options(task_keys=task_keys, var_options=var_options, direct=direct)
except AttributeError:
pass
def _update_connection_state(self):
'''
Reconstruct the connection socket_path and check if it exists
If the socket path exists then the connection is active and set
both the _socket_path value to the path and the _connected value
to True. If the socket path doesn't exist, leave the socket path
value to None and the _connected value to False
'''
ssh = connection_loader.get('ssh', class_only=True)
control_path = ssh._create_control_path(
self._play_context.remote_addr, self._play_context.port,
self._play_context.remote_user, self._play_context.connection,
self._ansible_playbook_pid
)
tmp_path = unfrackpath(C.PERSISTENT_CONTROL_PATH_DIR)
socket_path = unfrackpath(control_path % dict(directory=tmp_path))
if os.path.exists(socket_path):
self._connected = True
self._socket_path = socket_path
def _log_messages(self, message):
if self.get_option('persistent_log_messages'):
self.queue_message('log', message)
| gpl-3.0 | 4,923,437,112,592,569,000 | 41.848564 | 140 | 0.640851 | false |
40223119/2015w13-1 | static/Brython3.1.3-20150514-095342/Lib/socket.py | 730 | 14913 | # Wrapper module for _socket, providing some additional facilities
# implemented in Python.
"""\
This module provides socket operations and some related functions.
On Unix, it supports IP (Internet Protocol) and Unix domain sockets.
On other systems, it only supports IP. Functions specific for a
socket are available as methods of the socket object.
Functions:
socket() -- create a new socket object
socketpair() -- create a pair of new socket objects [*]
fromfd() -- create a socket object from an open file descriptor [*]
fromshare() -- create a socket object from data received from socket.share() [*]
gethostname() -- return the current hostname
gethostbyname() -- map a hostname to its IP number
gethostbyaddr() -- map an IP number or hostname to DNS info
getservbyname() -- map a service name and a protocol name to a port number
getprotobyname() -- map a protocol name (e.g. 'tcp') to a number
ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order
htons(), htonl() -- convert 16, 32 bit int from host to network byte order
inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format
inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)
socket.getdefaulttimeout() -- get the default timeout value
socket.setdefaulttimeout() -- set the default timeout value
create_connection() -- connects to an address, with an optional timeout and
optional source address.
[*] not available on all platforms!
Special objects:
SocketType -- type object for socket objects
error -- exception raised for I/O errors
has_ipv6 -- boolean value indicating if IPv6 is supported
Integer constants:
AF_INET, AF_UNIX -- socket domains (first argument to socket() call)
SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)
Many other constants may be defined; these may be used in calls to
the setsockopt() and getsockopt() methods.
"""
import _socket
from _socket import *
import os, sys, io
try:
import errno
except ImportError:
errno = None
EBADF = getattr(errno, 'EBADF', 9)
EAGAIN = getattr(errno, 'EAGAIN', 11)
EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11)
__all__ = ["getfqdn", "create_connection"]
__all__.extend(os._get_exports_list(_socket))
_realsocket = socket
# WSA error codes
if sys.platform.lower().startswith("win"):
errorTab = {}
errorTab[10004] = "The operation was interrupted."
errorTab[10009] = "A bad file handle was passed."
errorTab[10013] = "Permission denied."
errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT
errorTab[10022] = "An invalid operation was attempted."
errorTab[10035] = "The socket operation would block"
errorTab[10036] = "A blocking operation is already in progress."
errorTab[10048] = "The network address is in use."
errorTab[10054] = "The connection has been reset."
errorTab[10058] = "The network has been shut down."
errorTab[10060] = "The operation timed out."
errorTab[10061] = "Connection refused."
errorTab[10063] = "The name is too long."
errorTab[10064] = "The host is down."
errorTab[10065] = "The host is unreachable."
__all__.append("errorTab")
class socket(_socket.socket):
"""A subclass of _socket.socket adding the makefile() method."""
__slots__ = ["__weakref__", "_io_refs", "_closed"]
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None):
_socket.socket.__init__(self, family, type, proto, fileno)
self._io_refs = 0
self._closed = False
def __enter__(self):
return self
def __exit__(self, *args):
if not self._closed:
self.close()
def __repr__(self):
"""Wrap __repr__() to reveal the real class name."""
s = _socket.socket.__repr__(self)
if s.startswith("<socket object"):
s = "<%s.%s%s%s" % (self.__class__.__module__,
self.__class__.__name__,
getattr(self, '_closed', False) and " [closed] " or "",
s[7:])
return s
def __getstate__(self):
raise TypeError("Cannot serialize socket object")
def dup(self):
"""dup() -> socket object
Return a new socket object connected to the same system resource.
"""
fd = dup(self.fileno())
sock = self.__class__(self.family, self.type, self.proto, fileno=fd)
sock.settimeout(self.gettimeout())
return sock
def accept(self):
"""accept() -> (socket object, address info)
Wait for an incoming connection. Return a new socket
representing the connection, and the address of the client.
For IP sockets, the address info is a pair (hostaddr, port).
"""
fd, addr = self._accept()
sock = socket(self.family, self.type, self.proto, fileno=fd)
# Issue #7995: if no default timeout is set and the listening
# socket had a (non-zero) timeout, force the new socket in blocking
# mode to override platform-specific socket flags inheritance.
if getdefaulttimeout() is None and self.gettimeout():
sock.setblocking(True)
return sock, addr
def makefile(self, mode="r", buffering=None, *,
encoding=None, errors=None, newline=None):
"""makefile(...) -> an I/O stream connected to the socket
The arguments are as for io.open() after the filename,
except the only mode characters supported are 'r', 'w' and 'b'.
The semantics are similar too. (XXX refactor to share code?)
"""
for c in mode:
if c not in {"r", "w", "b"}:
raise ValueError("invalid mode %r (only r, w, b allowed)")
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._io_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
def _decref_socketios(self):
if self._io_refs > 0:
self._io_refs -= 1
if self._closed:
self.close()
def _real_close(self, _ss=_socket.socket):
# This function should not reference any globals. See issue #808164.
_ss.close(self)
def close(self):
# This function should not reference any globals. See issue #808164.
self._closed = True
if self._io_refs <= 0:
self._real_close()
def detach(self):
"""detach() -> file descriptor
Close the socket object without closing the underlying file descriptor.
The object cannot be used after this call, but the file descriptor
can be reused for other purposes. The file descriptor is returned.
"""
self._closed = True
return super().detach()
def fromfd(fd, family, type, proto=0):
""" fromfd(fd, family, type[, proto]) -> socket object
Create a socket object from a duplicate of the given file
descriptor. The remaining arguments are the same as for socket().
"""
nfd = dup(fd)
return socket(family, type, proto, nfd)
if hasattr(_socket.socket, "share"):
def fromshare(info):
""" fromshare(info) -> socket object
Create a socket object from a the bytes object returned by
socket.share(pid).
"""
return socket(0, 0, 0, info)
if hasattr(_socket, "socketpair"):
def socketpair(family=None, type=SOCK_STREAM, proto=0):
"""socketpair([family[, type[, proto]]]) -> (socket object, socket object)
Create a pair of socket objects from the sockets returned by the platform
socketpair() function.
The arguments are the same as for socket() except the default family is
AF_UNIX if defined on the platform; otherwise, the default is AF_INET.
"""
if family is None:
try:
family = AF_UNIX
except NameError:
family = AF_INET
a, b = _socket.socketpair(family, type, proto)
a = socket(family, type, proto, a.detach())
b = socket(family, type, proto, b.detach())
return a, b
_blocking_errnos = { EAGAIN, EWOULDBLOCK }
class SocketIO(io.RawIOBase):
"""Raw I/O implementation for stream sockets.
This class supports the makefile() method on sockets. It provides
the raw I/O interface on top of a socket object.
"""
# One might wonder why not let FileIO do the job instead. There are two
# main reasons why FileIO is not adapted:
# - it wouldn't work under Windows (where you can't used read() and
# write() on a socket handle)
# - it wouldn't work with socket timeouts (FileIO would ignore the
# timeout and consider the socket non-blocking)
# XXX More docs
def __init__(self, sock, mode):
if mode not in ("r", "w", "rw", "rb", "wb", "rwb"):
raise ValueError("invalid mode: %r" % mode)
io.RawIOBase.__init__(self)
self._sock = sock
if "b" not in mode:
mode += "b"
self._mode = mode
self._reading = "r" in mode
self._writing = "w" in mode
self._timeout_occurred = False
def readinto(self, b):
"""Read up to len(b) bytes into the writable buffer *b* and return
the number of bytes read. If the socket is non-blocking and no bytes
are available, None is returned.
If *b* is non-empty, a 0 return value indicates that the connection
was shutdown at the other end.
"""
self._checkClosed()
self._checkReadable()
if self._timeout_occurred:
raise IOError("cannot read from timed out object")
while True:
try:
return self._sock.recv_into(b)
except timeout:
self._timeout_occurred = True
raise
except InterruptedError:
continue
except error as e:
if e.args[0] in _blocking_errnos:
return None
raise
def write(self, b):
"""Write the given bytes or bytearray object *b* to the socket
and return the number of bytes written. This can be less than
len(b) if not all data could be written. If the socket is
non-blocking and no bytes could be written None is returned.
"""
self._checkClosed()
self._checkWritable()
try:
return self._sock.send(b)
except error as e:
# XXX what about EINTR?
if e.args[0] in _blocking_errnos:
return None
raise
def readable(self):
"""True if the SocketIO is open for reading.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return self._reading
def writable(self):
"""True if the SocketIO is open for writing.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return self._writing
def seekable(self):
"""True if the SocketIO is open for seeking.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return super().seekable()
def fileno(self):
"""Return the file descriptor of the underlying socket.
"""
self._checkClosed()
return self._sock.fileno()
@property
def name(self):
if not self.closed:
return self.fileno()
else:
return -1
@property
def mode(self):
return self._mode
def close(self):
"""Close the SocketIO object. This doesn't close the underlying
socket, except if all references to it have disappeared.
"""
if self.closed:
return
io.RawIOBase.close(self)
self._sock._decref_socketios()
self._sock = None
def getfqdn(name=''):
"""Get fully qualified domain name from name.
An empty argument is interpreted as meaning the local host.
First the hostname returned by gethostbyaddr() is checked, then
possibly existing aliases. In case no FQDN is available, hostname
from gethostname() is returned.
"""
name = name.strip()
if not name or name == '0.0.0.0':
name = gethostname()
try:
hostname, aliases, ipaddrs = gethostbyaddr(name)
except error:
pass
else:
aliases.insert(0, hostname)
for name in aliases:
if '.' in name:
break
else:
name = hostname
return name
_GLOBAL_DEFAULT_TIMEOUT = object()
def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
err = None
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket(af, socktype, proto)
if timeout is not _GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except error as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
else:
raise error("getaddrinfo returns an empty list")
| gpl-3.0 | 2,818,364,267,610,992,600 | 33.125858 | 87 | 0.603836 | false |
anthonypdawson/LazyLibrarian | lazylibrarian/webServe.py | 1 | 15735 | import os, cherrypy, urllib
from cherrypy.lib.static import serve_file
from mako.template import Template
from mako.lookup import TemplateLookup
from mako import exceptions
import threading, time
import lazylibrarian
from lazylibrarian import logger, importer, database, postprocess, formatter
from lazylibrarian.searchnzb import searchbook
from lazylibrarian.formatter import checked
from lazylibrarian.gr import GoodReads
import lib.simplejson as simplejson
def serve_template(templatename, **kwargs):
interface_dir = os.path.join(str(lazylibrarian.PROG_DIR), 'data/interfaces/')
template_dir = os.path.join(str(interface_dir), lazylibrarian.HTTP_LOOK)
_hplookup = TemplateLookup(directories=[template_dir])
try:
template = _hplookup.get_template(templatename)
return template.render(**kwargs)
except:
return exceptions.html_error_template().render()
class WebInterface(object):
def index(self):
raise cherrypy.HTTPRedirect("home")
index.exposed=True
def home(self):
myDB = database.DBConnection()
authors = myDB.select('SELECT * from authors order by AuthorName COLLATE NOCASE')
return serve_template(templatename="index.html", title="Home", authors=authors)
home.exposed = True
def books(self, BookLang=None):
myDB = database.DBConnection()
languages = myDB.select('SELECT DISTINCT BookLang from books WHERE NOT STATUS="Skipped"')
if BookLang:
books = myDB.select('SELECT * from books WHERE BookLang=? AND NOT Status="Skipped"', [BookLang])
else:
books = myDB.select('SELECT * from books WHERE NOT STATUS="Skipped"')
if books is None:
raise cherrypy.HTTPRedirect("books")
return serve_template(templatename="books.html", title='Books', books=books, languages=languages)
books.exposed = True
def config(self):
http_look_dir = os.path.join(lazylibrarian.PROG_DIR, 'data/interfaces/')
http_look_list = [ name for name in os.listdir(http_look_dir) if os.path.isdir(os.path.join(http_look_dir, name)) ]
config = {
"http_host": lazylibrarian.HTTP_HOST,
"http_user": lazylibrarian.HTTP_USER,
"http_port": lazylibrarian.HTTP_PORT,
"http_pass": lazylibrarian.HTTP_PASS,
"http_look": lazylibrarian.HTTP_LOOK,
"http_look_list": http_look_list,
"launch_browser": checked(lazylibrarian.LAUNCH_BROWSER),
"logdir" : lazylibrarian.LOGDIR,
"use_imp_onlyisbn": checked(lazylibrarian.IMP_ONLYISBN),
"imp_preflang": lazylibrarian.IMP_PREFLANG,
"sab_host": lazylibrarian.SAB_HOST,
"sab_port": lazylibrarian.SAB_PORT,
"sab_api": lazylibrarian.SAB_API,
"sab_user": lazylibrarian.SAB_USER,
"sab_pass": lazylibrarian.SAB_PASS,
"use_destination_copy": checked(lazylibrarian.DESTINATION_COPY),
"destination_dir": lazylibrarian.DESTINATION_DIR,
"download_dir": lazylibrarian.DOWNLOAD_DIR,
"sab_cat": lazylibrarian.SAB_CAT,
"usenet_retention": lazylibrarian.USENET_RETENTION,
"use_blackhole": checked(lazylibrarian.BLACKHOLE),
"blackholedir": lazylibrarian.BLACKHOLEDIR,
"use_nzbmatrix" : checked(lazylibrarian.NZBMATRIX),
"nzbmatrix_user" : lazylibrarian.NZBMATRIX_USER,
"nzbmatrix_api" : lazylibrarian.NZBMATRIX_API,
"use_newznab" : checked(lazylibrarian.NEWZNAB),
"newznab_host" : lazylibrarian.NEWZNAB_HOST,
"newznab_api" : lazylibrarian.NEWZNAB_API,
"use_newzbin" : checked(lazylibrarian.NEWZBIN),
"newzbin_uid" : lazylibrarian.NEWZBIN_UID,
"newzbin_pass" : lazylibrarian.NEWZBIN_PASS,
"ebook_type" : lazylibrarian.EBOOK_TYPE,
"gr_api" : lazylibrarian.GR_API,
}
return serve_template(templatename="config.html", title="Settings", config=config)
config.exposed = True
def configUpdate(self, http_host='0.0.0.0', http_user=None, http_port=5299, http_pass=None, http_look=None, launch_browser=0, logdir=None, imp_onlyisbn=0, imp_preflang=None,
sab_host=None, sab_port=None, sab_api=None, sab_user=None, sab_pass=None, destination_copy=0, destination_dir=None, download_dir=None, sab_cat=None, usenet_retention=None, blackhole=0, blackholedir=None,
nzbmatrix=0, nzbmatrix_user=None, nzbmatrix_api=None, newznab=0, newznab_host=None, newznab_api=None, newzbin=0, newzbin_uid=None, newzbin_pass=None, ebook_type=None, gr_api=None):
lazylibrarian.HTTP_HOST = http_host
lazylibrarian.HTTP_PORT = http_port
lazylibrarian.HTTP_USER = http_user
lazylibrarian.HTTP_PASS = http_pass
lazylibrarian.HTTP_LOOK = http_look
lazylibrarian.LAUNCH_BROWSER = launch_browser
lazylibrarian.LOGDIR = logdir
lazylibrarian.IMP_ONLYISBN = imp_onlyisbn
lazylibrarian.IMP_PREFLANG = imp_preflang
lazylibrarian.SAB_HOST = sab_host
lazylibrarian.SAB_PORT = sab_port
lazylibrarian.SAB_API = sab_api
lazylibrarian.SAB_USER = sab_user
lazylibrarian.SAB_PASS = sab_pass
lazylibrarian.SAB_CAT = sab_cat
lazylibrarian.DESTINATION_COPY = destination_copy
lazylibrarian.DESTINATION_DIR = destination_dir
lazylibrarian.DOWNLOAD_DIR = download_dir
lazylibrarian.USENET_RETENTION = usenet_retention
lazylibrarian.BLACKHOLE = blackhole
lazylibrarian.BLACKHOLEDIR = blackholedir
lazylibrarian.NZBMATRIX = nzbmatrix
lazylibrarian.NZBMATRIX_USER = nzbmatrix_user
lazylibrarian.NZBMATRIX_API = nzbmatrix_api
lazylibrarian.NEWZNAB = newznab
lazylibrarian.NEWZNAB_HOST = newznab_host
lazylibrarian.NEWZNAB_API = newznab_api
lazylibrarian.NEWZBIN = newzbin
lazylibrarian.NEWZBIN_UID = newzbin_uid
lazylibrarian.NEWZBIN_PASS = newzbin_pass
lazylibrarian.EBOOK_TYPE = ebook_type
lazylibrarian.GR_API = gr_api
lazylibrarian.config_write()
raise cherrypy.HTTPRedirect("config")
configUpdate.exposed = True
def update(self):
lazylibrarian.SIGNAL = 'update'
message = 'Updating...'
return serve_template(templatename="shutdown.html", title="Updating", message=message, timer=120)
return page
update.exposed = True
#SEARCH
def search(self, name, type):
GR = GoodReads(name, type)
if len(name) == 0:
raise cherrypy.HTTPRedirect("config")
else:
searchresults = GR.find_results(name)
return serve_template(templatename="searchresults.html", title='Search Results for: "' + name + '"', searchresults=searchresults, type=type)
search.exposed = True
#AUTHOR
def authorPage(self, AuthorName, BookLang=None):
myDB = database.DBConnection()
languages = myDB.select('SELECT DISTINCT BookLang from books WHERE AuthorName LIKE ?', [AuthorName.replace("'","''")])
if BookLang:
querybooks = "SELECT * from books WHERE AuthorName LIKE '%s' AND BookLang = '%s' order by BookDate DESC, BookRate DESC" % (AuthorName.replace("'","''"), BookLang)
else:
querybooks = "SELECT * from books WHERE AuthorName LIKE '%s' AND (BookLang = '%s' OR BookLang = 'Unknown') order by BookDate DESC, BookRate DESC" % (AuthorName.replace("'","''"), lazylibrarian.IMP_PREFLANG)
queryauthors = "SELECT * from authors WHERE AuthorName LIKE '%s'" % AuthorName.replace("'","''")
author = myDB.action(queryauthors).fetchone()
books = myDB.select(querybooks)
if author is None:
raise cherrypy.HTTPRedirect("home")
return serve_template(templatename="author.html", title=author['AuthorName'], author=author, books=books, languages=languages)
authorPage.exposed = True
def pauseAuthor(self, AuthorID):
logger.info(u"Pausing author: " + AuthorID)
myDB = database.DBConnection()
controlValueDict = {'AuthorID': AuthorID}
newValueDict = {'Status': 'Paused'}
myDB.upsert("authors", newValueDict, controlValueDict)
raise cherrypy.HTTPRedirect("authorPage?AuthorID=%s" % AuthorID)
pauseAuthor.exposed = True
def resumeAuthor(self, AuthorID):
logger.info(u"Resuming author: " + AuthorID)
myDB = database.DBConnection()
controlValueDict = {'AuthorID': AuthorID}
newValueDict = {'Status': 'Active'}
myDB.upsert("authors", newValueDict, controlValueDict)
raise cherrypy.HTTPRedirect("authorPage?AuthorID=%s" % AuthorID)
resumeAuthor.exposed = True
def deleteAuthor(self, AuthorID):
logger.info(u"Removing author: " + AuthorID)
myDB = database.DBConnection()
myDB.action('DELETE from authors WHERE AuthorID=?', [AuthorID])
myDB.action('DELETE from books WHERE AuthorID=?', [AuthorID])
raise cherrypy.HTTPRedirect("home")
deleteAuthor.exposed = True
def refreshAuthor(self, AuthorID):
importer.addAuthorToDB(AuthorID)
raise cherrypy.HTTPRedirect("authorPage?AuthorID=%s" % AuthorID)
refreshAuthor.exposed=True
def addResults(self, authorname):
args = None;
threading.Thread(target=importer.addAuthorToDB, args=[authorname]).start()
raise cherrypy.HTTPRedirect("authorPage?AuthorName=%s" % authorname)
addResults.exposed = True
#BOOKS
def openBook(self, bookLink=None, action=None, **args):
myDB = database.DBConnection()
# find book
bookdata = myDB.select('SELECT * from books WHERE BookLink=\'' + bookLink + '\'')
logger.debug(('SELECT * from books WHERE BookLink=\'' + bookLink + '\''))
if bookdata:
authorName = bookdata[0]["AuthorName"];
bookName = bookdata[0]["BookName"];
dic = {'<':'', '>':'', '=':'', '?':'', '"':'', ',':'', '*':'', ':':'', ';':'', '\'':''}
bookName = formatter.latinToAscii(formatter.replace_all(bookName, dic))
if (lazylibrarian.INSTALL_TYPE == 'win'):
dest_dir = lazylibrarian.DESTINATION_DIR + '\\' + authorName + '\\' + bookName
else:
dest_dir = lazylibrarian.DESTINATION_DIR + '//' + authorName + '//' + bookName
logger.debug('bookdir ' + dest_dir);
if os.path.isdir(dest_dir):
for file2 in os.listdir(dest_dir):
if ((file2.lower().find(".jpg") <= 0) & (file2.lower().find(".opf") <= 0)):
logger.info('Openning file ' + str(file2))
return serve_file(os.path.join(dest_dir, file2), "application/x-download", "attachment")
openBook.exposed = True
def searchForBook(self, bookLink=None, action=None, **args):
myDB = database.DBConnection()
# find book
bookdata = myDB.select('SELECT * from books WHERE BookLink=\'' + bookLink + '\'')
logger.debug(('SELECT * from books WHERE BookLink=\'' + bookLink + '\''))
if bookdata:
bookid = bookdata[0]["BookID"];
AuthorName = bookdata[0]["AuthorName"];
# start searchthreads
books = []
books.append({"bookid": bookid})
threading.Thread(target=searchbook, args=[books]).start()
logger.info("Searching for book with id: " + str(bookid));
if AuthorName:
raise cherrypy.HTTPRedirect("authorPage?AuthorName=%s" % AuthorName)
searchForBook.exposed = True
def markBooks(self, AuthorName=None, action=None, **args):
myDB = database.DBConnection()
for bookid in args:
# ouch dirty workaround...
if not bookid == 'book_table_length':
controlValueDict = {'BookID': bookid}
newValueDict = {'Status': action}
myDB.upsert("books", newValueDict, controlValueDict)
logger.debug('Status set to %s for BookID: %s' % (action, bookid))
#update authors needs to be updated every time a book is marked differently
query = 'SELECT COUNT(*) FROM books WHERE AuthorName="%s" AND (Status="Have" OR Status="Open")' % AuthorName
countbooks = myDB.action(query).fetchone()
havebooks = int(countbooks[0])
controlValueDict = {"AuthorName": AuthorName}
newValueDict = {"HaveBooks": havebooks}
myDB.upsert("authors", newValueDict, controlValueDict)
# start searchthreads
books = []
for bookid in args:
# ouch dirty workaround...
if not bookid == 'book_table_length':
if action == 'Wanted':
books.append({"bookid": bookid})
threading.Thread(target=searchbook, args=[books]).start()
if AuthorName:
raise cherrypy.HTTPRedirect("authorPage?AuthorName=%s" % AuthorName)
markBooks.exposed = True
def manProcess(self):
threading.Thread(target=postprocess.processDir).start()
raise cherrypy.HTTPRedirect("books")
manProcess.exposed = True
def checkForUpdates(self):
#check the version when the application starts
from lazylibrarian import versioncheck
lazylibrarian.CURRENT_VERSION = versioncheck.getVersion()
versioncheck.checkGithub()
raise cherrypy.HTTPRedirect("config")
checkForUpdates.exposed = True
def getLog(self,iDisplayStart=0,iDisplayLength=100,iSortCol_0=0,sSortDir_0="desc",sSearch="",**kwargs):
iDisplayStart = int(iDisplayStart)
iDisplayLength = int(iDisplayLength)
filtered = []
if sSearch == "":
filtered = lazylibrarian.LOGLIST[::]
else:
filtered = [row for row in lazylibrarian.LOGLIST for column in row if sSearch in column]
sortcolumn = 0
if iSortCol_0 == '1':
sortcolumn = 2
elif iSortCol_0 == '2':
sortcolumn = 1
filtered.sort(key=lambda x:x[sortcolumn],reverse=sSortDir_0 == "desc")
rows = filtered[iDisplayStart:(iDisplayStart+iDisplayLength)]
rows = [[row[0],row[2],row[1]] for row in rows]
dict = {'iTotalDisplayRecords':len(filtered),
'iTotalRecords':len(lazylibrarian.LOGLIST),
'aaData':rows,
}
s = simplejson.dumps(dict)
return s
getLog.exposed = True
def logs(self):
return serve_template(templatename="logs.html", title="Log", lineList=lazylibrarian.LOGLIST)
logs.exposed = True
def shutdown(self):
lazylibrarian.config_write()
lazylibrarian.SIGNAL = 'shutdown'
message = 'closing ...'
return serve_template(templatename="shutdown.html", title="Close library", message=message, timer=15)
return page
shutdown.exposed = True
def restart(self):
lazylibrarian.SIGNAL = 'restart'
message = 'reopening ...'
return serve_template(templatename="shutdown.html", title="Reopen library", message=message, timer=30)
restart.exposed = True
| gpl-3.0 | -2,966,894,634,946,641,000 | 42.952514 | 218 | 0.617604 | false |
apporc/nova | nova/db/sqlalchemy/migrate_repo/versions/234_add_expire_reservations_index.py | 146 | 1511 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from sqlalchemy import Index, MetaData, Table
from nova.i18n import _LI
LOG = logging.getLogger(__name__)
def _get_deleted_expire_index(table):
members = sorted(['deleted', 'expire'])
for idx in table.indexes:
if sorted(idx.columns.keys()) == members:
return idx
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
reservations = Table('reservations', meta, autoload=True)
if _get_deleted_expire_index(reservations):
LOG.info(_LI('Skipped adding reservations_deleted_expire_idx '
'because an equivalent index already exists.'))
return
# Based on expire_reservations query
# from: nova/db/sqlalchemy/api.py
index = Index('reservations_deleted_expire_idx',
reservations.c.deleted, reservations.c.expire)
index.create(migrate_engine)
| apache-2.0 | -7,092,286,350,196,333,000 | 32.577778 | 78 | 0.688948 | false |
andrewruba/YangLab | SlideBook spl file converter/spl_reader python 3.py | 1 | 7317 | # -*- coding: utf-8 -*-
"""
Created on Wed Feb 15 12:40:19 2017
@author: Andrew Ruba
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import tkinter as tk
import numpy as np
import cv2
import os
import shutil
import tkinter.filedialog as tkfd
class splreader(tk.Frame):
def __init__(self, master = None):
tk.Frame.__init__(self, master)
self.message = tk.StringVar()
self.folder = None
self.files = None
self.mergedfiles = None
self.pack()
self.createwidgets()
self.master = master
def createwidgets(self):
self.TOPCONTAINER = tk.Frame(self)
self.TOPCONTAINER.pack({'side': 'top'})
self.LEFTTOPCONTAINER = tk.Frame(self.TOPCONTAINER)
self.LEFTTOPCONTAINER.pack({'side': 'left'})
self.BOTTOMCONTAINER = tk.Frame(self)
self.BOTTOMCONTAINER.pack({'side': 'bottom'})
self.SELECT = tk.Button(self.LEFTTOPCONTAINER)
self.SELECT['text'] = 'Select folder containing .spl files'
self.SELECT['command'] = lambda: self.selectfolder()
self.SELECT.pack({'side': 'top'})
self.QUIT = tk.Button(self.LEFTTOPCONTAINER)
self.QUIT['text'] = 'Quit'
self.QUIT['fg'] = red
self.QUIT['command'] = lambda: self.quit()
self.QUIT.pack({'side': 'top'})
self.MESSAGE = tk.Label(self.BOTTOMCONTAINER)
self.MESSAGE['textvariable'] = self.message
self.message.set('Select a folder.')
self.MESSAGE.pack({'side': 'right'})
def spl_to_tif(self, filename):
##converting .spl file to .tiff
f = open(filename, 'rb')
filetitle = filename.split('.')
filetitle.pop()
filetitle = '.'.join(filetitle)
if os.path.exists(filetitle):
shutil.rmtree(filetitle)
os.mkdir(filetitle)
else:
os.mkdir(filetitle)
filetitlename = filetitle.split('\\').pop()
frame_header = ['f6','01','01','01','49','49','00','01']
pic_width = 0
pic_height = 0
dimensions = []
temp_header = []
temp_frame = []
frame_number = 1
end_of_video = False
looking_for_header = False
found_header = False
getting_pixel_data = False
counter = 0
while not end_of_video:
try:
val = str("{:02x}".format(ord(f.read(1))))
except:
end_of_video = True
break
if counter == 0 and val == frame_header[0] and not looking_for_header and not found_header and not getting_pixel_data:
temp_header.append(val)
getting_pixel_data = False
looking_for_header = True
continue
if looking_for_header:
could_be_header = True
temp_header.append(val)
for i in range(len(temp_header)):
if temp_header[i] != frame_header[i]:
could_be_header = False
looking_for_header = False
temp_header = []
if could_be_header and len(temp_header) == len(frame_header):
found_header = True
looking_for_header = False
temp_header = []
counter = 248
continue
if found_header:
if counter >= 235 and counter <= 238:
dimensions.append(val)
if counter == 1:
found_header = False
getting_pixel_data = True
pic_width = int(dimensions[1] + dimensions[0], 16)
pic_height = int(dimensions[3] + dimensions[2], 16)
dimensions = []
counter = pic_width * pic_height * 2
counter -= 1
continue
if getting_pixel_data:
temp_frame.append(val)
if counter == 0:
self.message.set(str(frame_number) + ' frames converted from ' + filetitlename + '.spl')
self.update()
getting_pixel_data = False
temp_frame = [ x+y for x,y in zip(temp_frame[1::2], temp_frame[0::2]) ]
temp_frame = [ int(x, 16) for x in temp_frame ]
temp_frame = np.reshape(temp_frame, (pic_height, pic_width))
temp_frame = np.array(temp_frame).astype(np.uint16)
cv2.imwrite(filetitle + '/' + str(frame_number) + '.tif', temp_frame)
frame_number += 1
temp_frame = []
continue
counter -= 1
continue
def mergefiles(self):
if self.files != None:
self.SELECT['text'] = 'Select folder containing .spl files'
self.SELECT['fg'] = black
self.SELECT['command'] = lambda: self.selectfolder()
for filename in self.files:
self.spl_to_tif(filename)
self.message.set('Done. Select a folder.')
def listfiles(self, folder, file_ext):
folder_list = os.listdir(folder)
file_list = []
for file_path in folder_list:
split_ext = file_path.split(os.extsep).pop()
if file_ext == split_ext:
file_list.append(os.path.normpath(os.path.join(folder, file_path)))
return file_list
def selectfolder(self):
self.folder = tkfd.askdirectory(initialdir = 'C:\\', parent = self, title = 'Select a folder', mustexist = True)
self.files = self.listfiles(self.folder, 'spl')
self.message.set(str(len(self.files)) + ' .spl file(s) found.')
if len(self.files) > 0:
self.SELECT['text'] = 'convert .spl files'
self.SELECT['fg'] = green
self.SELECT['command'] = lambda: self.mergefiles()
print (self.files)
root = tk.Tk()
root.geometry('250x75')
root.title(string = '.spl reader')
orange = '#FF7F22'
black = '#000000'
gray = '#D3D3D3'
white = '#FFFFFF'
red = '#FF0000'
green = '#008000'
app = splreader(master = root)
app.mainloop()
root.destroy() | gpl-3.0 | -4,749,608,567,652,602,000 | 32.523585 | 130 | 0.510182 | false |
multikatt/CouchPotatoServer | libs/pyasn1/debug.py | 161 | 3044 | import time
import logging
from pyasn1.compat.octets import octs2ints
from pyasn1 import error
from pyasn1 import __version__
flagNone = 0x0000
flagEncoder = 0x0001
flagDecoder = 0x0002
flagAll = 0xffff
flagMap = {
'encoder': flagEncoder,
'decoder': flagDecoder,
'all': flagAll
}
class Printer:
def __init__(self, logger=None, handler=None, formatter=None):
if logger is None:
logger = logging.getLogger('pyasn1')
logger.setLevel(logging.DEBUG)
if handler is None:
handler = logging.StreamHandler()
if formatter is None:
formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
self.__logger = logger
def __call__(self, msg): self.__logger.debug(msg)
def __str__(self): return '<python built-in logging>'
if hasattr(logging, 'NullHandler'):
NullHandler = logging.NullHandler
else:
# Python 2.6 and older
class NullHandler(logging.Handler):
def emit(self, record):
pass
class Debug:
defaultPrinter = None
def __init__(self, *flags, **options):
self._flags = flagNone
if options.get('printer') is not None:
self._printer = options.get('printer')
elif self.defaultPrinter is not None:
self._printer = self.defaultPrinter
if 'loggerName' in options:
# route our logs to parent logger
self._printer = Printer(
logger=logging.getLogger(options['loggerName']),
handler=NullHandler()
)
else:
self._printer = Printer()
self('running pyasn1 version %s' % __version__)
for f in flags:
inverse = f and f[0] in ('!', '~')
if inverse:
f = f[1:]
try:
if inverse:
self._flags &= ~flagMap[f]
else:
self._flags |= flagMap[f]
except KeyError:
raise error.PyAsn1Error('bad debug flag %s' % f)
self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled'))
def __str__(self):
return 'logger %s, flags %x' % (self._printer, self._flags)
def __call__(self, msg):
self._printer(msg)
def __and__(self, flag):
return self._flags & flag
def __rand__(self, flag):
return flag & self._flags
logger = 0
def setLogger(l):
global logger
logger = l
def hexdump(octets):
return ' '.join(
[ '%s%.2X' % (n%16 == 0 and ('\n%.5d: ' % n) or '', x)
for n,x in zip(range(len(octets)), octs2ints(octets)) ]
)
class Scope:
def __init__(self):
self._list = []
def __str__(self): return '.'.join(self._list)
def push(self, token):
self._list.append(token)
def pop(self):
return self._list.pop()
scope = Scope()
| gpl-3.0 | -1,125,818,364,910,566,000 | 26.672727 | 87 | 0.550263 | false |
Vagab0nd/SiCKRAGE | lib3/imdb/Movie.py | 1 | 13758 | # Copyright 2004-2018 Davide Alberani <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
This module provides the Movie class, used to store information about
a given movie.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from copy import deepcopy
from imdb import linguistics
from imdb.utils import _Container
from imdb.utils import analyze_title, build_title, canonicalTitle, cmpMovies, flatten
class Movie(_Container):
"""A Movie.
Every information about a movie can be accessed as::
movieObject['information']
to get a list of the kind of information stored in a
Movie object, use the keys() method; some useful aliases
are defined (as "casting" for the "casting director" key); see
the keys_alias dictionary.
"""
# The default sets of information retrieved.
default_info = ('main', 'plot')
# Aliases for some not-so-intuitive keys.
keys_alias = {
'tv schedule': 'airing',
'user rating': 'rating',
'plot summary': 'plot',
'plot summaries': 'plot',
'directed by': 'director',
'actors': 'cast',
'actresses': 'cast',
'aka': 'akas',
'also known as': 'akas',
'country': 'countries',
'production country': 'countries',
'production countries': 'countries',
'genre': 'genres',
'runtime': 'runtimes',
'lang': 'languages',
'color': 'color info',
'cover': 'cover url',
'full-size cover': 'full-size cover url',
'seasons': 'number of seasons',
'language': 'languages',
'certificate': 'certificates',
'certifications': 'certificates',
'certification': 'certificates',
'episodes number': 'number of episodes',
'faq': 'faqs',
'technical': 'tech',
'frequently asked questions': 'faqs'
}
keys_tomodify_list = (
'plot', 'trivia', 'alternate versions', 'goofs',
'quotes', 'dvd', 'laserdisc', 'news', 'soundtrack',
'crazy credits', 'business', 'supplements',
'video review', 'faqs'
)
_image_key = 'cover url'
cmpFunct = cmpMovies
def _init(self, **kwds):
"""Initialize a Movie object.
*movieID* -- the unique identifier for the movie.
*title* -- the title of the Movie, if not in the data dictionary.
*myTitle* -- your personal title for the movie.
*myID* -- your personal identifier for the movie.
*data* -- a dictionary used to initialize the object.
*currentRole* -- a Character instance representing the current role
or duty of a person in this movie, or a Person
object representing the actor/actress who played
a given character in a Movie. If a string is
passed, an object is automatically build.
*roleID* -- if available, the characterID/personID of the currentRole
object.
*roleIsPerson* -- when False (default) the currentRole is assumed
to be a Character object, otherwise a Person.
*notes* -- notes for the person referred in the currentRole
attribute; e.g.: '(voice)'.
*accessSystem* -- a string representing the data access system used.
*titlesRefs* -- a dictionary with references to movies.
*namesRefs* -- a dictionary with references to persons.
*charactersRefs* -- a dictionary with references to characters.
*modFunct* -- function called returning text fields.
"""
title = kwds.get('title')
if title and 'title' not in self.data:
self.set_title(title)
self.movieID = kwds.get('movieID', None)
self.myTitle = kwds.get('myTitle', '')
def _reset(self):
"""Reset the Movie object."""
self.movieID = None
self.myTitle = ''
def set_title(self, title):
"""Set the title of the movie."""
d_title = analyze_title(title)
self.data.update(d_title)
def _additional_keys(self):
"""Valid keys to append to the data.keys() list."""
addkeys = []
if 'title' in self.data:
addkeys += ['canonical title', 'long imdb title',
'long imdb canonical title',
'smart canonical title',
'smart long imdb canonical title']
if 'episode of' in self.data:
addkeys += ['long imdb episode title', 'series title',
'canonical series title', 'episode title',
'canonical episode title',
'smart canonical series title',
'smart canonical episode title']
if 'cover url' in self.data:
addkeys += ['full-size cover url']
return addkeys
def guessLanguage(self):
"""Guess the language of the title of this movie; returns None
if there are no hints."""
lang = self.get('languages')
if lang:
lang = lang[0]
else:
country = self.get('countries')
if country:
lang = linguistics.COUNTRY_LANG.get(country[0])
return lang
def smartCanonicalTitle(self, title=None, lang=None):
"""Return the canonical title, guessing its language.
The title can be forces with the 'title' argument (internally
used) and the language can be forced with the 'lang' argument,
otherwise it's auto-detected."""
if title is None:
title = self.data.get('title', '')
if lang is None:
lang = self.guessLanguage()
return canonicalTitle(title, lang=lang)
def _getSeriesTitle(self, obj):
"""Get the title from a Movie object or return the string itself."""
if isinstance(obj, Movie):
return obj.get('title', '')
return obj
def _getitem(self, key):
"""Handle special keys."""
if 'episode of' in self.data:
if key == 'long imdb episode title':
return build_title(self.data)
elif key == 'series title':
return self._getSeriesTitle(self.data['episode of'])
elif key == 'canonical series title':
ser_title = self._getSeriesTitle(self.data['episode of'])
return canonicalTitle(ser_title)
elif key == 'smart canonical series title':
ser_title = self._getSeriesTitle(self.data['episode of'])
return self.smartCanonicalTitle(ser_title)
elif key == 'episode title':
return self.data.get('title', '')
elif key == 'canonical episode title':
return canonicalTitle(self.data.get('title', ''))
elif key == 'smart canonical episode title':
return self.smartCanonicalTitle(self.data.get('title', ''))
if 'title' in self.data:
if key == 'title':
return self.data['title']
elif key == 'long imdb title':
return build_title(self.data)
elif key == 'canonical title':
return canonicalTitle(self.data['title'])
elif key == 'smart canonical title':
return self.smartCanonicalTitle(self.data['title'])
elif key == 'long imdb canonical title':
return build_title(self.data, canonical=True)
elif key == 'smart long imdb canonical title':
return build_title(self.data, canonical=True, lang=self.guessLanguage())
if key == 'full-size cover url':
return self.get_fullsizeURL()
return None
def getID(self):
"""Return the movieID."""
return self.movieID
def __bool__(self):
"""The Movie is "false" if the self.data does not contain a title."""
# XXX: check the title and the movieID?
return 'title' in self.data
def isSameTitle(self, other):
"""Return true if this and the compared object have the same
long imdb title and/or movieID.
"""
# XXX: obsolete?
if not isinstance(other, self.__class__):
return False
if 'title' in self.data and 'title' in other.data and \
build_title(self.data, canonical=False) == build_title(other.data, canonical=False):
return True
if self.accessSystem == other.accessSystem and \
self.movieID is not None and self.movieID == other.movieID:
return True
return False
isSameMovie = isSameTitle # XXX: just for backward compatiblity.
def __contains__(self, item):
"""Return true if the given Person object is listed in this Movie,
or if the the given Character is represented in this Movie."""
from .Person import Person
from .Character import Character
from .Company import Company
if isinstance(item, Person):
for p in flatten(self.data, yieldDictKeys=True, scalar=Person,
toDescend=(list, dict, tuple, Movie)):
if item.isSame(p):
return True
elif isinstance(item, Character):
for p in flatten(self.data, yieldDictKeys=True, scalar=Person,
toDescend=(list, dict, tuple, Movie)):
if item.isSame(p.currentRole):
return True
elif isinstance(item, Company):
for c in flatten(self.data, yieldDictKeys=True, scalar=Company,
toDescend=(list, dict, tuple, Movie)):
if item.isSame(c):
return True
elif isinstance(item, str):
return item in self.data
return False
def __deepcopy__(self, memo):
"""Return a deep copy of a Movie instance."""
m = Movie(title='', movieID=self.movieID, myTitle=self.myTitle,
myID=self.myID, data=deepcopy(self.data, memo),
currentRole=deepcopy(self.currentRole, memo),
roleIsPerson=self._roleIsPerson,
notes=self.notes, accessSystem=self.accessSystem,
titlesRefs=deepcopy(self.titlesRefs, memo),
namesRefs=deepcopy(self.namesRefs, memo),
charactersRefs=deepcopy(self.charactersRefs, memo))
m.current_info = list(self.current_info)
m.set_mod_funct(self.modFunct)
return m
def __repr__(self):
"""String representation of a Movie object."""
# XXX: add also currentRole and notes, if present?
if 'long imdb episode title' in self:
title = self.get('long imdb episode title')
else:
title = self.get('long imdb title')
return '<Movie id:%s[%s] title:_%s_>' % (self.movieID, self.accessSystem, title)
def __str__(self):
"""Simply print the short title."""
return self.get('title', '')
def summary(self):
"""Return a string with a pretty-printed summary for the movie."""
if not self:
return ''
def _nameAndRole(personList, joiner=', '):
"""Build a pretty string with name and role."""
nl = []
for person in personList:
n = person.get('name', '')
if person.currentRole:
n += ' (%s)' % person.currentRole
nl.append(n)
return joiner.join(nl)
s = 'Movie\n=====\nTitle: %s\n' % self.get('long imdb canonical title', '')
genres = self.get('genres')
if genres:
s += 'Genres: %s.\n' % ', '.join(genres)
director = self.get('director')
if director:
s += 'Director: %s.\n' % _nameAndRole(director)
writer = self.get('writer')
if writer:
s += 'Writer: %s.\n' % _nameAndRole(writer)
cast = self.get('cast')
if cast:
cast = cast[:5]
s += 'Cast: %s.\n' % _nameAndRole(cast)
runtime = self.get('runtimes')
if runtime:
s += 'Runtime: %s.\n' % ', '.join(runtime)
countries = self.get('countries')
if countries:
s += 'Country: %s.\n' % ', '.join(countries)
lang = self.get('languages')
if lang:
s += 'Language: %s.\n' % ', '.join(lang)
rating = self.get('rating')
if rating:
s += 'Rating: %s' % rating
nr_votes = self.get('votes')
if nr_votes:
s += ' (%s votes)' % nr_votes
s += '.\n'
plot = self.get('plot')
if not plot:
plot = self.get('plot summary')
if plot:
plot = [plot]
if plot:
plot = plot[0]
i = plot.find('::')
if i != -1:
plot = plot[:i]
s += 'Plot: %s' % plot
return s
| gpl-3.0 | 7,876,707,654,463,566,000 | 38.994186 | 100 | 0.562364 | false |
Therp/odoo | addons/account_bank_statement_extensions/account_bank_statement.py | 179 | 6737 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
class account_bank_statement(osv.osv):
_inherit = 'account.bank.statement'
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
# bypass obsolete statement line resequencing
if vals.get('line_ids', False) or context.get('ebanking_import', False):
res = super(osv.osv, self).write(cr, uid, ids, vals, context=context)
else:
res = super(account_bank_statement, self).write(cr, uid, ids, vals, context=context)
return res
def button_confirm_bank(self, cr, uid, ids, context=None):
bank_statement_line_obj = self.pool.get('account.bank.statement.line')
super(account_bank_statement, self).button_confirm_bank(cr, uid, ids, context=context)
for st in self.browse(cr, uid, ids, context=context):
if st.line_ids:
line_ids = [l.id for l in st.line_ids]
cr.execute("UPDATE account_bank_statement_line \
SET state='confirm' WHERE id in %s ",
(tuple(line_ids),))
bank_statement_line_obj.invalidate_cache(cr, uid, ['state'], line_ids, context=context)
return True
def button_cancel(self, cr, uid, ids, context=None):
bank_statement_line_obj = self.pool.get('account.bank.statement.line')
super(account_bank_statement, self).button_cancel(cr, uid, ids, context=context)
for st in self.browse(cr, uid, ids, context=context):
if st.line_ids:
line_ids = [l.id for l in st.line_ids]
cr.execute("UPDATE account_bank_statement_line \
SET state='draft' WHERE id in %s ",
(tuple(line_ids),))
bank_statement_line_obj.invalidate_cache(cr, uid, ['state'], line_ids, context=context)
return True
class account_bank_statement_line_global(osv.osv):
_name = 'account.bank.statement.line.global'
_description = 'Batch Payment Info'
_columns = {
'name': fields.char('OBI', required=True, help="Originator to Beneficiary Information"),
'code': fields.char('Code', size=64, required=True),
'parent_id': fields.many2one('account.bank.statement.line.global', 'Parent Code', ondelete='cascade'),
'child_ids': fields.one2many('account.bank.statement.line.global', 'parent_id', 'Child Codes', copy=True),
'type': fields.selection([
('iso20022', 'ISO 20022'),
('coda', 'CODA'),
('manual', 'Manual'),
], 'Type', required=True),
'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')),
'bank_statement_line_ids': fields.one2many('account.bank.statement.line', 'globalisation_id', 'Bank Statement Lines'),
}
_rec_name = 'code'
_defaults = {
'code': lambda s,c,u,ctx={}: s.pool.get('ir.sequence').get(c, u, 'account.bank.statement.line.global'),
'name': '/',
}
_sql_constraints = [
('code_uniq', 'unique (code)', 'The code must be unique !'),
]
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
ids = []
if name:
ids = self.search(cr, user, [('code', 'ilike', name)] + args, limit=limit)
if not ids:
ids = self.search(cr, user, [('name', operator, name)] + args, limit=limit)
if not ids and len(name.split()) >= 2:
#Separating code and name for searching
operand1, operand2 = name.split(' ', 1) #name can contain spaces
ids = self.search(cr, user, [('code', 'like', operand1), ('name', operator, operand2)] + args, limit=limit)
else:
ids = self.search(cr, user, args, context=context, limit=limit)
return self.name_get(cr, user, ids, context=context)
class account_bank_statement_line(osv.osv):
_inherit = 'account.bank.statement.line'
_columns = {
'val_date': fields.date('Value Date', states={'confirm': [('readonly', True)]}),
'globalisation_id': fields.many2one('account.bank.statement.line.global', 'Globalisation ID',
states={'confirm': [('readonly', True)]},
help="Code to identify transactions belonging to the same globalisation level within a batch payment"),
'globalisation_amount': fields.related('globalisation_id', 'amount', type='float',
relation='account.bank.statement.line.global', string='Glob. Amount', readonly=True),
'state': fields.selection([('draft', 'Draft'), ('confirm', 'Confirmed')],
'Status', required=True, readonly=True, copy=False),
'counterparty_name': fields.char('Counterparty Name', size=35),
'counterparty_bic': fields.char('Counterparty BIC', size=11),
'counterparty_number': fields.char('Counterparty Number', size=34),
'counterparty_currency': fields.char('Counterparty Currency', size=3),
}
_defaults = {
'state': 'draft',
}
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
if context.get('block_statement_line_delete', False):
raise osv.except_osv(_('Warning!'), _('Delete operation not allowed. \
Please go to the associated bank statement in order to delete and/or modify bank statement line.'))
return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -3,009,342,559,880,495,000 | 47.818841 | 126 | 0.606353 | false |
bbbenja/SickRage | lib/unidecode/x00c.py | 252 | 4102 | data = (
'[?]', # 0x00
'N', # 0x01
'N', # 0x02
'H', # 0x03
'[?]', # 0x04
'a', # 0x05
'aa', # 0x06
'i', # 0x07
'ii', # 0x08
'u', # 0x09
'uu', # 0x0a
'R', # 0x0b
'L', # 0x0c
'[?]', # 0x0d
'e', # 0x0e
'ee', # 0x0f
'ai', # 0x10
'[?]', # 0x11
'o', # 0x12
'oo', # 0x13
'au', # 0x14
'k', # 0x15
'kh', # 0x16
'g', # 0x17
'gh', # 0x18
'ng', # 0x19
'c', # 0x1a
'ch', # 0x1b
'j', # 0x1c
'jh', # 0x1d
'ny', # 0x1e
'tt', # 0x1f
'tth', # 0x20
'dd', # 0x21
'ddh', # 0x22
'nn', # 0x23
't', # 0x24
'th', # 0x25
'd', # 0x26
'dh', # 0x27
'n', # 0x28
'[?]', # 0x29
'p', # 0x2a
'ph', # 0x2b
'b', # 0x2c
'bh', # 0x2d
'm', # 0x2e
'y', # 0x2f
'r', # 0x30
'rr', # 0x31
'l', # 0x32
'll', # 0x33
'[?]', # 0x34
'v', # 0x35
'sh', # 0x36
'ss', # 0x37
's', # 0x38
'h', # 0x39
'[?]', # 0x3a
'[?]', # 0x3b
'[?]', # 0x3c
'[?]', # 0x3d
'aa', # 0x3e
'i', # 0x3f
'ii', # 0x40
'u', # 0x41
'uu', # 0x42
'R', # 0x43
'RR', # 0x44
'[?]', # 0x45
'e', # 0x46
'ee', # 0x47
'ai', # 0x48
'[?]', # 0x49
'o', # 0x4a
'oo', # 0x4b
'au', # 0x4c
'', # 0x4d
'[?]', # 0x4e
'[?]', # 0x4f
'[?]', # 0x50
'[?]', # 0x51
'[?]', # 0x52
'[?]', # 0x53
'[?]', # 0x54
'+', # 0x55
'+', # 0x56
'[?]', # 0x57
'[?]', # 0x58
'[?]', # 0x59
'[?]', # 0x5a
'[?]', # 0x5b
'[?]', # 0x5c
'[?]', # 0x5d
'[?]', # 0x5e
'[?]', # 0x5f
'RR', # 0x60
'LL', # 0x61
'[?]', # 0x62
'[?]', # 0x63
'[?]', # 0x64
'[?]', # 0x65
'0', # 0x66
'1', # 0x67
'2', # 0x68
'3', # 0x69
'4', # 0x6a
'5', # 0x6b
'6', # 0x6c
'7', # 0x6d
'8', # 0x6e
'9', # 0x6f
'[?]', # 0x70
'[?]', # 0x71
'[?]', # 0x72
'[?]', # 0x73
'[?]', # 0x74
'[?]', # 0x75
'[?]', # 0x76
'[?]', # 0x77
'[?]', # 0x78
'[?]', # 0x79
'[?]', # 0x7a
'[?]', # 0x7b
'[?]', # 0x7c
'[?]', # 0x7d
'[?]', # 0x7e
'[?]', # 0x7f
'[?]', # 0x80
'[?]', # 0x81
'N', # 0x82
'H', # 0x83
'[?]', # 0x84
'a', # 0x85
'aa', # 0x86
'i', # 0x87
'ii', # 0x88
'u', # 0x89
'uu', # 0x8a
'R', # 0x8b
'L', # 0x8c
'[?]', # 0x8d
'e', # 0x8e
'ee', # 0x8f
'ai', # 0x90
'[?]', # 0x91
'o', # 0x92
'oo', # 0x93
'au', # 0x94
'k', # 0x95
'kh', # 0x96
'g', # 0x97
'gh', # 0x98
'ng', # 0x99
'c', # 0x9a
'ch', # 0x9b
'j', # 0x9c
'jh', # 0x9d
'ny', # 0x9e
'tt', # 0x9f
'tth', # 0xa0
'dd', # 0xa1
'ddh', # 0xa2
'nn', # 0xa3
't', # 0xa4
'th', # 0xa5
'd', # 0xa6
'dh', # 0xa7
'n', # 0xa8
'[?]', # 0xa9
'p', # 0xaa
'ph', # 0xab
'b', # 0xac
'bh', # 0xad
'm', # 0xae
'y', # 0xaf
'r', # 0xb0
'rr', # 0xb1
'l', # 0xb2
'll', # 0xb3
'[?]', # 0xb4
'v', # 0xb5
'sh', # 0xb6
'ss', # 0xb7
's', # 0xb8
'h', # 0xb9
'[?]', # 0xba
'[?]', # 0xbb
'[?]', # 0xbc
'[?]', # 0xbd
'aa', # 0xbe
'i', # 0xbf
'ii', # 0xc0
'u', # 0xc1
'uu', # 0xc2
'R', # 0xc3
'RR', # 0xc4
'[?]', # 0xc5
'e', # 0xc6
'ee', # 0xc7
'ai', # 0xc8
'[?]', # 0xc9
'o', # 0xca
'oo', # 0xcb
'au', # 0xcc
'', # 0xcd
'[?]', # 0xce
'[?]', # 0xcf
'[?]', # 0xd0
'[?]', # 0xd1
'[?]', # 0xd2
'[?]', # 0xd3
'[?]', # 0xd4
'+', # 0xd5
'+', # 0xd6
'[?]', # 0xd7
'[?]', # 0xd8
'[?]', # 0xd9
'[?]', # 0xda
'[?]', # 0xdb
'[?]', # 0xdc
'[?]', # 0xdd
'lll', # 0xde
'[?]', # 0xdf
'RR', # 0xe0
'LL', # 0xe1
'[?]', # 0xe2
'[?]', # 0xe3
'[?]', # 0xe4
'[?]', # 0xe5
'0', # 0xe6
'1', # 0xe7
'2', # 0xe8
'3', # 0xe9
'4', # 0xea
'5', # 0xeb
'6', # 0xec
'7', # 0xed
'8', # 0xee
'9', # 0xef
'[?]', # 0xf0
'[?]', # 0xf1
'[?]', # 0xf2
'[?]', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
| gpl-3.0 | -909,067,443,063,525,800 | 14.961089 | 16 | 0.306192 | false |
BiznetGIO/horizon | openstack_dashboard/dashboards/admin/hypervisors/tables.py | 10 | 3108 | # Copyright 2013 B1 Systems GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from horizon.templatetags import sizeformat
class AdminHypervisorsTable(tables.DataTable):
hostname = tables.WrappingColumn("hypervisor_hostname",
link="horizon:admin:hypervisors:detail",
verbose_name=_("Hostname"))
hypervisor_type = tables.Column("hypervisor_type",
verbose_name=_("Type"))
vcpus_used = tables.Column("vcpus_used",
verbose_name=_("VCPUs (used)"))
vcpus = tables.Column("vcpus",
verbose_name=_("VCPUs (total)"))
memory_used = tables.Column('memory_mb_used',
verbose_name=_("RAM (used)"),
attrs={'data-type': 'size'},
filters=(sizeformat.mb_float_format,))
memory = tables.Column('memory_mb',
verbose_name=_("RAM (total)"),
attrs={'data-type': 'size'},
filters=(sizeformat.mb_float_format,))
local_used = tables.Column('local_gb_used',
verbose_name=_("Local Storage (used)"),
attrs={'data-type': 'size'},
filters=(sizeformat.diskgbformat,))
local = tables.Column('local_gb',
verbose_name=_("Local Storage (total)"),
attrs={'data-type': 'size'},
filters=(sizeformat.diskgbformat,))
running_vms = tables.Column("running_vms",
verbose_name=_("Instances"))
def get_object_id(self, hypervisor):
return "%s_%s" % (hypervisor.id,
hypervisor.hypervisor_hostname)
class Meta(object):
name = "hypervisors"
verbose_name = _("Hypervisors")
class AdminHypervisorInstancesTable(tables.DataTable):
name = tables.WrappingColumn("name",
link="horizon:admin:instances:detail",
verbose_name=_("Instance Name"))
instance_id = tables.Column("uuid",
verbose_name=_("Instance ID"))
def get_object_id(self, server):
return server['uuid']
class Meta(object):
name = "hypervisor_instances"
verbose_name = _("Hypervisor Instances")
| apache-2.0 | -1,137,598,138,281,338,400 | 37.85 | 78 | 0.54601 | false |
rouault/Quantum-GIS | python/plugins/db_manager/db_plugins/postgis/plugins/qgis_topoview/__init__.py | 16 | 11572 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : TopoViewer plugin for DB Manager
Description : Create a project to display topology schema on Qgis
Date : Sep 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : [email protected]
Based on qgis_pgis_topoview by Sandro Santilli <[email protected]>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from builtins import str
from qgis.PyQt.QtWidgets import QAction
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtGui import QIcon
from qgis.core import Qgis, QgsProject, QgsVectorLayer, QgsWkbTypes, QgsLayerTreeGroup
from qgis.gui import QgsMessageBar
import os
current_path = os.path.dirname(__file__)
# The load function is called when the "db" database or either one of its
# children db objects (table o schema) is selected by the user.
# @param db is the selected database
# @param mainwindow is the DBManager mainwindow
def load(db, mainwindow):
# check whether the selected database supports topology
# (search for topology.topology)
sql = u"""SELECT count(*)
FROM pg_class AS cls JOIN pg_namespace AS nsp ON nsp.oid = cls.relnamespace
WHERE cls.relname = 'topology' AND nsp.nspname = 'topology'"""
c = db.connector._get_cursor()
db.connector._execute(c, sql)
res = db.connector._fetchone(c)
if res is None or int(res[0]) <= 0:
return
# add the action to the DBManager menu
action = QAction(QIcon(), "&TopoViewer", db)
mainwindow.registerAction(action, "&Schema", run)
# The run function is called once the user clicks on the action TopoViewer
# (look above at the load function) from the DBManager menu/toolbar.
# @param item is the selected db item (either db, schema or table)
# @param action is the clicked action on the DBManager menu/toolbar
# @param mainwindow is the DBManager mainwindow
def run(item, action, mainwindow):
db = item.database()
uri = db.uri()
iface = mainwindow.iface
quoteId = db.connector.quoteId
quoteStr = db.connector.quoteString
# check if the selected item is a topology schema
isTopoSchema = False
if not hasattr(item, 'schema'):
mainwindow.infoBar.pushMessage("Invalid topology", u'Select a topology schema to continue.', Qgis.Info,
mainwindow.iface.messageTimeout())
return False
if item.schema() is not None:
sql = u"SELECT srid FROM topology.topology WHERE name = %s" % quoteStr(item.schema().name)
c = db.connector._get_cursor()
db.connector._execute(c, sql)
res = db.connector._fetchone(c)
isTopoSchema = res is not None
if not isTopoSchema:
mainwindow.infoBar.pushMessage("Invalid topology",
u'Schema "{0}" is not registered in topology.topology.'.format(
item.schema().name), Qgis.Warning,
mainwindow.iface.messageTimeout())
return False
if (res[0] < 0):
mainwindow.infoBar.pushMessage("WARNING", u'Topology "{0}" is registered as having a srid of {1} in topology.topology, we will assume 0 (for unknown)'.format(item.schema().name, res[0]), Qgis.Warning, mainwindow.iface.messageTimeout())
toposrid = '0'
else:
toposrid = str(res[0])
# load layers into the current project
toponame = item.schema().name
template_dir = os.path.join(current_path, 'templates')
# do not refresh the canvas until all the layers are added
wasFrozen = iface.mapCanvas().isFrozen()
iface.mapCanvas().freeze()
try:
provider = db.dbplugin().providerName()
uri = db.uri()
# Force use of estimated metadata (topologies can be big)
uri.setUseEstimatedMetadata(True)
# FACES
# face mbr
uri.setDataSource(toponame, 'face', 'mbr', '', 'face_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.Polygon)
layerFaceMbr = QgsVectorLayer(uri.uri(False), u'%s.face_mbr' % toponame, provider)
layerFaceMbr.loadNamedStyle(os.path.join(template_dir, 'face_mbr.qml'))
face_extent = layerFaceMbr.extent()
# face geometry
sql = u'SELECT face_id, topology.ST_GetFaceGeometry(%s,' \
'face_id)::geometry(polygon, %s) as geom ' \
'FROM %s.face WHERE face_id > 0' % \
(quoteStr(toponame), toposrid, quoteId(toponame))
uri.setDataSource('', u'(%s\n)' % sql, 'geom', '', 'face_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.Polygon)
layerFaceGeom = QgsVectorLayer(uri.uri(False), u'%s.face' % toponame, provider)
layerFaceGeom.setExtent(face_extent)
layerFaceGeom.loadNamedStyle(os.path.join(template_dir, 'face.qml'))
# face_seed
sql = u'SELECT face_id, ST_PointOnSurface(' \
'topology.ST_GetFaceGeometry(%s,' \
'face_id))::geometry(point, %s) as geom ' \
'FROM %s.face WHERE face_id > 0' % \
(quoteStr(toponame), toposrid, quoteId(toponame))
uri.setDataSource('', u'(%s)' % sql, 'geom', '', 'face_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.Point)
layerFaceSeed = QgsVectorLayer(uri.uri(False), u'%s.face_seed' % toponame, provider)
layerFaceSeed.setExtent(face_extent)
layerFaceSeed.loadNamedStyle(os.path.join(template_dir, 'face_seed.qml'))
# TODO: add polygon0, polygon1 and polygon2 ?
# NODES
# node
uri.setDataSource(toponame, 'node', 'geom', '', 'node_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.Point)
layerNode = QgsVectorLayer(uri.uri(False), u'%s.node' % toponame, provider)
layerNode.loadNamedStyle(os.path.join(template_dir, 'node.qml'))
node_extent = layerNode.extent()
# node labels
uri.setDataSource(toponame, 'node', 'geom', '', 'node_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.Point)
layerNodeLabel = QgsVectorLayer(uri.uri(False), u'%s.node_id' % toponame, provider)
layerNodeLabel.setExtent(node_extent)
layerNodeLabel.loadNamedStyle(os.path.join(template_dir, 'node_label.qml'))
# EDGES
# edge
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
layerEdge = QgsVectorLayer(uri.uri(False), u'%s.edge' % toponame, provider)
edge_extent = layerEdge.extent()
# directed edge
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
layerDirectedEdge = QgsVectorLayer(uri.uri(False), u'%s.directed_edge' % toponame, provider)
layerDirectedEdge.setExtent(edge_extent)
layerDirectedEdge.loadNamedStyle(os.path.join(template_dir, 'edge.qml'))
# edge labels
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
layerEdgeLabel = QgsVectorLayer(uri.uri(False), u'%s.edge_id' % toponame, provider)
layerEdgeLabel.setExtent(edge_extent)
layerEdgeLabel.loadNamedStyle(os.path.join(template_dir, 'edge_label.qml'))
# face_left
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
layerFaceLeft = QgsVectorLayer(uri.uri(False), u'%s.face_left' % toponame, provider)
layerFaceLeft.setExtent(edge_extent)
layerFaceLeft.loadNamedStyle(os.path.join(template_dir, 'face_left.qml'))
# face_right
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
layerFaceRight = QgsVectorLayer(uri.uri(False), u'%s.face_right' % toponame, provider)
layerFaceRight.setExtent(edge_extent)
layerFaceRight.loadNamedStyle(os.path.join(template_dir, 'face_right.qml'))
# next_left
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
layerNextLeft = QgsVectorLayer(uri.uri(False), u'%s.next_left' % toponame, provider)
layerNextLeft.setExtent(edge_extent)
layerNextLeft.loadNamedStyle(os.path.join(template_dir, 'next_left.qml'))
# next_right
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
layerNextRight = QgsVectorLayer(uri.uri(False), u'%s.next_right' % toponame, provider)
layerNextRight.setExtent(edge_extent)
layerNextRight.loadNamedStyle(os.path.join(template_dir, 'next_right.qml'))
# Add layers to the layer tree
faceLayers = [layerFaceMbr, layerFaceGeom, layerFaceSeed]
nodeLayers = [layerNode, layerNodeLabel]
edgeLayers = [layerEdge, layerDirectedEdge, layerEdgeLabel
# , layerEdgeFaceLeft, layerEdgeFaceRight, layerEdgeNextLeft, layerEdgeNextRight
]
QgsProject.instance().addMapLayers(faceLayers, False)
groupFaces = QgsLayerTreeGroup(u'Faces')
for layer in faceLayers:
nodeLayer = groupFaces.addLayer(layer)
nodeLayer.setItemVisibilityChecked(False)
nodeLayer.setExpanded(False)
groupNodes = QgsLayerTreeGroup(u'Nodes')
for layer in nodeLayers:
nodeLayer = groupNodes.addLayer(layer)
nodeLayer.setItemVisibilityChecked(False)
nodeLayer.setExpanded(False)
groupEdges = QgsLayerTreeGroup(u'Edges')
for layer in edgeLayers:
nodeLayer = groupEdges.addLayer(layer)
nodeLayer.setItemVisibilityChecked(False)
nodeLayer.setExpanded(False)
supergroup = QgsLayerTreeGroup(u'Topology "%s"' % toponame)
supergroup.insertChildNodes(-1, [groupFaces, groupNodes, groupEdges])
QgsProject.instance().layerTreeRoot().addChildNode(supergroup)
finally:
# Set canvas extent to topology extent, if not yet initialized
canvas = iface.mapCanvas()
if (canvas.fullExtent().isNull()):
ext = node_extent
ext.combineExtentWith(edge_extent)
# Grow by 1/20 of largest side
ext = ext.buffered(max(ext.width(), ext.height()) / 20)
canvas.setExtent(ext)
# restore canvas render flag
if not wasFrozen:
iface.mapCanvas().freeze(False)
return True
| gpl-2.0 | 5,016,330,985,982,223,000 | 41.388278 | 243 | 0.614155 | false |
dotancohen/fleshwound | fleshwound.py | 1 | 3040 | #!/usr/bin/python3
"""
Python script for finding PHP vulnerabilities and coding errors.
TODO
KNOWN ISSUES
Only supports variable assignment on a single line.
Only supports function declaration on a single line.
Does not support code outside functions after function declarations start.
Does not support variables defined in include()ed or require()ed files.
@author Dotan Cohen
@version 2013-12-26
"""
import os
import re
import shutil
import sys
from pprint import pprint
filetypes_to_read = ['.php']
files_to_avoid = []
def parse_file(filename):
variable_match = re.compile('(\$[\w]+\s*={0,3})')
original_defined_variables = ['$_GET', '$_POST', '$_REQUEST', '$_SERVER', '$argv']
defined_variables = original_defined_variables[:]
defined_variables_stack = []
line_number = 0
printedFilename = False
try:
input = open(filename, 'r')
except Exception as e:
print("\nFile could not be opened: %s\n%s" % (filename, e))
"""
IOError: [Errno 2] No such file or directory: 'index']
Thrown when trying to parse file that does not exits ('index' instead of '.git/index')
Got some utf8 error when parsing a binary file
"""
for line in input:
line_number +=1
inFunctionDeclaration = False
#if re.search('\s*(static)?\s*(public)?\s*(static)?\s*function\s*[\w]*\s*\(', line):
if re.search('\s*function\s*[\w]*\s*\(', line):
inFunctionDeclaration = True
defined_variables = original_defined_variables[:]
try:
matches = variable_match.search(line)
for found_var in matches.groups():
if inFunctionDeclaration:
defined_variables.append(found_var.strip('=\t '))
continue
if re.search('^\s*global\s*', line):
defined_variables.append(found_var.strip('=\t '))
continue
if found_var.strip('=\t ') in defined_variables:
continue
if found_var.endswith('=='):
printedFilename = print_error_line(found_var, line, line_number, filename, printedFilename)
continue
if found_var.endswith('='):
defined_variables.append(found_var[:-1].strip())
continue
printedFilename = print_error_line(found_var, line, line_number, filename, printedFilename)
except AttributeError as e:
# AttributeError: 'NoneType' object has no attribute 'groups'
pass
"""
Add: Find multiple variables per line
Add: Create a new stack when going into a function declaration
"""
input.close()
return True
def print_error_line(found_var, line, line_number, filename, printedFilename):
found_var = found_var.strip('=\t ')
if not printedFilename:
print("\n\n - File: %s\n\n" % (filename,))
print("Use of undefined var: %s on line %i" % (found_var, line_number,))
print(line)
return True
def main(files_to_avoid, filetypes_to_read):
for rootDir, dirs, files in os.walk(os.getcwd()):
for f in files:
if not f in files_to_avoid and any(f.endswith(x) for x in filetypes_to_read):
parse_file(os.path.join(rootDir, f))
return True
if __name__ == '__main__':
main(files_to_avoid, filetypes_to_read)
| apache-2.0 | 4,311,094,756,140,839,000 | 21.189781 | 96 | 0.677632 | false |
benbox69/pyload | module/plugins/crypter/QuickshareCzFolder.py | 15 | 1039 | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.Crypter import Crypter
class QuickshareCzFolder(Crypter):
__name__ = "QuickshareCzFolder"
__type__ = "crypter"
__version__ = "0.12"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?quickshare\.cz/slozka-\d+'
__config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
__description__ = """Quickshare.cz folder decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "[email protected]")]
FOLDER_PATTERN = r'<textarea.*?>(.*?)</textarea>'
LINK_PATTERN = r'(http://www\.quickshare\.cz/\S+)'
def decrypt(self, pyfile):
html = self.load(pyfile.url)
m = re.search(self.FOLDER_PATTERN, html, re.S)
if m is None:
self.error(_("FOLDER_PATTERN not found"))
self.urls.extend(re.findall(self.LINK_PATTERN, m.group(1)))
| gpl-3.0 | 577,424,268,478,965,400 | 31.46875 | 95 | 0.570741 | false |
defivelo/django-parler | example/example/settings.py | 6 | 3432 | # Django settings for example project.
import django
from os.path import join, dirname, realpath
SRC_DIR = dirname(dirname(realpath(__file__)))
# Add parent path,
# Allow starting the app without installing the module.
import sys
sys.path.insert(0, dirname(SRC_DIR))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': SRC_DIR + '/example.db',
}
}
TIME_ZONE = 'Europe/Amsterdam'
LANGUAGE_CODE = 'en'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
MEDIA_ROOT = join(dirname(__file__), "media")
MEDIA_URL = '/media/'
STATIC_ROOT = join(dirname(__file__), "static")
STATIC_URL = '/static/'
STATICFILES_DIRS = ()
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '-#@bi6bue%#1j)6+4b&#i0g-*xro@%f@_#zwv=2-g_@n3n_kj5'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware', # Inserted language switcher, easy way to have multiple frontend languages.
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.request',
'django.core.context_processors.static',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
)
ROOT_URLCONF = 'example.urls'
TEMPLATE_DIRS = (
join(dirname(__file__), "templates"),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Apps
'article',
'theme1',
# Dependencies
'parler',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'filters': ['require_debug_false'],
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
if django.VERSION >= (1,7):
TEST_RUNNER = 'django.test.runner.DiscoverRunner' # silence system checks
PARLER_DEFAULT_LANGUAGE = 'en'
PARLER_LANGUAGES = {
1: (
{'code': 'en'},
{'code': 'de'},
{'code': 'fr'},
{'code': 'nl'},
{'code': 'es'},
),
'default': {
#'fallbacks': ['en'],
}
}
| apache-2.0 | -7,059,855,951,003,056,000 | 23.869565 | 126 | 0.634615 | false |
ivirshup/bioconda-recipes | recipes/biopet-sampleconfig/0.1/biopet-sampleconfig.py | 24 | 3375 | #!/usr/bin/env python
#
# Wrapper script for starting the biopet-sampleconfig JAR package
#
# This script is written for use with the Conda package manager and is copied
# from the peptide-shaker wrapper. Only the parameters are changed.
# (https://github.com/bioconda/bioconda-recipes/blob/master/recipes/peptide-shaker/peptide-shaker.py)
#
# This file was automatically generated by the sbt-bioconda plugin.
import os
import subprocess
import sys
import shutil
from os import access
from os import getenv
from os import X_OK
jar_file = 'SampleConfig-assembly-0.1.jar'
default_jvm_mem_opts = []
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
exec_dir = None
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
elif arg.startswith('--exec_dir='):
exec_dir = arg.split('=')[1].strip('"').strip("'")
if not os.path.exists(exec_dir):
shutil.copytree(real_dirname(sys.argv[0]), exec_dir, symlinks=False, ignore=None)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args, exec_dir)
def main():
"""
PeptideShaker updates files relative to the path of the jar file.
In a multiuser setting, the option --exec_dir="exec_dir"
can be used as the location for the peptide-shaker distribution.
If the exec_dir dies not exist,
we copy the jar file, lib, and resources to the exec_dir directory.
"""
java = java_executable()
(mem_opts, prop_opts, pass_args, exec_dir) = jvm_opts(sys.argv[1:])
jar_dir = exec_dir if exec_dir else real_dirname(sys.argv[0])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
| mit | 2,883,705,277,276,371,000 | 30.542056 | 101 | 0.645037 | false |
xifle/home-assistant | script/gen_requirements_all.py | 6 | 3683 | #!/usr/bin/env python3
"""Generate an updated requirements_all.txt."""
import importlib
import os
import pkgutil
import re
import sys
COMMENT_REQUIREMENTS = (
'RPi.GPIO',
'rpi-rf',
'Adafruit_Python_DHT',
'fritzconnection',
'pybluez',
'bluepy',
'python-lirc',
'gattlib',
'pyuserinput',
'evdev',
'pycups',
)
IGNORE_PACKAGES = (
'homeassistant.components.recorder.models',
)
def explore_module(package, explore_children):
"""Explore the modules."""
module = importlib.import_module(package)
found = []
if not hasattr(module, '__path__'):
return found
for _, name, _ in pkgutil.iter_modules(module.__path__, package + '.'):
found.append(name)
if explore_children:
found.extend(explore_module(name, False))
return found
def core_requirements():
"""Gather core requirements out of setup.py."""
with open('setup.py') as inp:
reqs_raw = re.search(
r'REQUIRES = \[(.*?)\]', inp.read(), re.S).group(1)
return re.findall(r"'(.*?)'", reqs_raw)
def comment_requirement(req):
"""Some requirements don't install on all systems."""
return any(ign in req for ign in COMMENT_REQUIREMENTS)
def gather_modules():
"""Collect the information and construct the output."""
reqs = {}
errors = []
output = []
for package in sorted(explore_module('homeassistant.components', True) +
explore_module('homeassistant.scripts', True)):
try:
module = importlib.import_module(package)
except ImportError:
if package not in IGNORE_PACKAGES:
errors.append(package)
continue
if not getattr(module, 'REQUIREMENTS', None):
continue
for req in module.REQUIREMENTS:
reqs.setdefault(req, []).append(package)
for key in reqs:
reqs[key] = sorted(reqs[key],
key=lambda name: (len(name.split('.')), name))
if errors:
print("******* ERROR")
print("Errors while importing: ", ', '.join(errors))
print("Make sure you import 3rd party libraries inside methods.")
return None
output.append('# Home Assistant core')
output.append('\n')
output.append('\n'.join(core_requirements()))
output.append('\n')
for pkg, requirements in sorted(reqs.items(), key=lambda item: item[0]):
for req in sorted(requirements,
key=lambda name: (len(name.split('.')), name)):
output.append('\n# {}'.format(req))
if comment_requirement(pkg):
output.append('\n# {}\n'.format(pkg))
else:
output.append('\n{}\n'.format(pkg))
return ''.join(output)
def write_file(data):
"""Write the modules to the requirements_all.txt."""
with open('requirements_all.txt', 'w+') as req_file:
req_file.write(data)
def validate_file(data):
"""Validate if requirements_all.txt is up to date."""
with open('requirements_all.txt', 'r') as req_file:
return data == ''.join(req_file)
def main():
"""Main section of the script."""
if not os.path.isfile('requirements_all.txt'):
print('Run this from HA root dir')
return
data = gather_modules()
if data is None:
sys.exit(1)
if sys.argv[-1] == 'validate':
if validate_file(data):
sys.exit(0)
print("******* ERROR")
print("requirements_all.txt is not up to date")
print("Please run script/gen_requirements_all.py")
sys.exit(1)
write_file(data)
if __name__ == '__main__':
main()
| mit | -2,677,223,878,769,526,000 | 24.755245 | 76 | 0.579962 | false |
jeffery-do/Vizdoombot | doom/lib/python3.5/site-packages/scipy/optimize/_root.py | 109 | 26007 | """
Unified interfaces to root finding algorithms.
Functions
---------
- root : find a root of a vector function.
"""
from __future__ import division, print_function, absolute_import
__all__ = ['root']
import numpy as np
from scipy._lib.six import callable
from warnings import warn
from .optimize import MemoizeJac, OptimizeResult, _check_unknown_options
from .minpack import _root_hybr, leastsq
from ._spectral import _root_df_sane
from . import nonlin
def root(fun, x0, args=(), method='hybr', jac=None, tol=None, callback=None,
options=None):
"""
Find a root of a vector function.
Parameters
----------
fun : callable
A vector function to find a root of.
x0 : ndarray
Initial guess.
args : tuple, optional
Extra arguments passed to the objective function and its Jacobian.
method : str, optional
Type of solver. Should be one of
- 'hybr' :ref:`(see here) <optimize.root-hybr>`
- 'lm' :ref:`(see here) <optimize.root-lm>`
- 'broyden1' :ref:`(see here) <optimize.root-broyden1>`
- 'broyden2' :ref:`(see here) <optimize.root-broyden2>`
- 'anderson' :ref:`(see here) <optimize.root-anderson>`
- 'linearmixing' :ref:`(see here) <optimize.root-linearmixing>`
- 'diagbroyden' :ref:`(see here) <optimize.root-diagbroyden>`
- 'excitingmixing' :ref:`(see here) <optimize.root-excitingmixing>`
- 'krylov' :ref:`(see here) <optimize.root-krylov>`
- 'df-sane' :ref:`(see here) <optimize.root-dfsane>`
jac : bool or callable, optional
If `jac` is a Boolean and is True, `fun` is assumed to return the
value of Jacobian along with the objective function. If False, the
Jacobian will be estimated numerically.
`jac` can also be a callable returning the Jacobian of `fun`. In
this case, it must accept the same arguments as `fun`.
tol : float, optional
Tolerance for termination. For detailed control, use solver-specific
options.
callback : function, optional
Optional callback function. It is called on every iteration as
``callback(x, f)`` where `x` is the current solution and `f`
the corresponding residual. For all methods but 'hybr' and 'lm'.
options : dict, optional
A dictionary of solver options. E.g. `xtol` or `maxiter`, see
:obj:`show_options()` for details.
Returns
-------
sol : OptimizeResult
The solution represented as a ``OptimizeResult`` object.
Important attributes are: ``x`` the solution array, ``success`` a
Boolean flag indicating if the algorithm exited successfully and
``message`` which describes the cause of the termination. See
`OptimizeResult` for a description of other attributes.
See also
--------
show_options : Additional options accepted by the solvers
Notes
-----
This section describes the available solvers that can be selected by the
'method' parameter. The default method is *hybr*.
Method *hybr* uses a modification of the Powell hybrid method as
implemented in MINPACK [1]_.
Method *lm* solves the system of nonlinear equations in a least squares
sense using a modification of the Levenberg-Marquardt algorithm as
implemented in MINPACK [1]_.
Method *df-sane* is a derivative-free spectral method. [3]_
Methods *broyden1*, *broyden2*, *anderson*, *linearmixing*,
*diagbroyden*, *excitingmixing*, *krylov* are inexact Newton methods,
with backtracking or full line searches [2]_. Each method corresponds
to a particular Jacobian approximations. See `nonlin` for details.
- Method *broyden1* uses Broyden's first Jacobian approximation, it is
known as Broyden's good method.
- Method *broyden2* uses Broyden's second Jacobian approximation, it
is known as Broyden's bad method.
- Method *anderson* uses (extended) Anderson mixing.
- Method *Krylov* uses Krylov approximation for inverse Jacobian. It
is suitable for large-scale problem.
- Method *diagbroyden* uses diagonal Broyden Jacobian approximation.
- Method *linearmixing* uses a scalar Jacobian approximation.
- Method *excitingmixing* uses a tuned diagonal Jacobian
approximation.
.. warning::
The algorithms implemented for methods *diagbroyden*,
*linearmixing* and *excitingmixing* may be useful for specific
problems, but whether they will work may depend strongly on the
problem.
.. versionadded:: 0.11.0
References
----------
.. [1] More, Jorge J., Burton S. Garbow, and Kenneth E. Hillstrom.
1980. User Guide for MINPACK-1.
.. [2] C. T. Kelley. 1995. Iterative Methods for Linear and Nonlinear
Equations. Society for Industrial and Applied Mathematics.
<http://www.siam.org/books/kelley/>
.. [3] W. La Cruz, J.M. Martinez, M. Raydan. Math. Comp. 75, 1429 (2006).
Examples
--------
The following functions define a system of nonlinear equations and its
jacobian.
>>> def fun(x):
... return [x[0] + 0.5 * (x[0] - x[1])**3 - 1.0,
... 0.5 * (x[1] - x[0])**3 + x[1]]
>>> def jac(x):
... return np.array([[1 + 1.5 * (x[0] - x[1])**2,
... -1.5 * (x[0] - x[1])**2],
... [-1.5 * (x[1] - x[0])**2,
... 1 + 1.5 * (x[1] - x[0])**2]])
A solution can be obtained as follows.
>>> from scipy import optimize
>>> sol = optimize.root(fun, [0, 0], jac=jac, method='hybr')
>>> sol.x
array([ 0.8411639, 0.1588361])
"""
if not isinstance(args, tuple):
args = (args,)
meth = method.lower()
if options is None:
options = {}
if callback is not None and meth in ('hybr', 'lm'):
warn('Method %s does not accept callback.' % method,
RuntimeWarning)
# fun also returns the jacobian
if not callable(jac) and meth in ('hybr', 'lm'):
if bool(jac):
fun = MemoizeJac(fun)
jac = fun.derivative
else:
jac = None
# set default tolerances
if tol is not None:
options = dict(options)
if meth in ('hybr', 'lm'):
options.setdefault('xtol', tol)
elif meth in ('df-sane',):
options.setdefault('ftol', tol)
elif meth in ('broyden1', 'broyden2', 'anderson', 'linearmixing',
'diagbroyden', 'excitingmixing', 'krylov'):
options.setdefault('xtol', tol)
options.setdefault('xatol', np.inf)
options.setdefault('ftol', np.inf)
options.setdefault('fatol', np.inf)
if meth == 'hybr':
sol = _root_hybr(fun, x0, args=args, jac=jac, **options)
elif meth == 'lm':
sol = _root_leastsq(fun, x0, args=args, jac=jac, **options)
elif meth == 'df-sane':
_warn_jac_unused(jac, method)
sol = _root_df_sane(fun, x0, args=args, callback=callback,
**options)
elif meth in ('broyden1', 'broyden2', 'anderson', 'linearmixing',
'diagbroyden', 'excitingmixing', 'krylov'):
_warn_jac_unused(jac, method)
sol = _root_nonlin_solve(fun, x0, args=args, jac=jac,
_method=meth, _callback=callback,
**options)
else:
raise ValueError('Unknown solver %s' % method)
return sol
def _warn_jac_unused(jac, method):
if jac is not None:
warn('Method %s does not use the jacobian (jac).' % (method,),
RuntimeWarning)
def _root_leastsq(func, x0, args=(), jac=None,
col_deriv=0, xtol=1.49012e-08, ftol=1.49012e-08,
gtol=0.0, maxiter=0, eps=0.0, factor=100, diag=None,
**unknown_options):
"""
Solve for least squares with Levenberg-Marquardt
Options
-------
col_deriv : bool
non-zero to specify that the Jacobian function computes derivatives
down the columns (faster, because there is no transpose operation).
ftol : float
Relative error desired in the sum of squares.
xtol : float
Relative error desired in the approximate solution.
gtol : float
Orthogonality desired between the function vector and the columns
of the Jacobian.
maxiter : int
The maximum number of calls to the function. If zero, then
100*(N+1) is the maximum where N is the number of elements in x0.
epsfcn : float
A suitable step length for the forward-difference approximation of
the Jacobian (for Dfun=None). If epsfcn is less than the machine
precision, it is assumed that the relative errors in the functions
are of the order of the machine precision.
factor : float
A parameter determining the initial step bound
(``factor * || diag * x||``). Should be in interval ``(0.1, 100)``.
diag : sequence
N positive entries that serve as a scale factors for the variables.
"""
_check_unknown_options(unknown_options)
x, cov_x, info, msg, ier = leastsq(func, x0, args=args, Dfun=jac,
full_output=True,
col_deriv=col_deriv, xtol=xtol,
ftol=ftol, gtol=gtol,
maxfev=maxiter, epsfcn=eps,
factor=factor, diag=diag)
sol = OptimizeResult(x=x, message=msg, status=ier,
success=ier in (1, 2, 3, 4), cov_x=cov_x,
fun=info.pop('fvec'))
sol.update(info)
return sol
def _root_nonlin_solve(func, x0, args=(), jac=None,
_callback=None, _method=None,
nit=None, disp=False, maxiter=None,
ftol=None, fatol=None, xtol=None, xatol=None,
tol_norm=None, line_search='armijo', jac_options=None,
**unknown_options):
_check_unknown_options(unknown_options)
f_tol = fatol
f_rtol = ftol
x_tol = xatol
x_rtol = xtol
verbose = disp
if jac_options is None:
jac_options = dict()
jacobian = {'broyden1': nonlin.BroydenFirst,
'broyden2': nonlin.BroydenSecond,
'anderson': nonlin.Anderson,
'linearmixing': nonlin.LinearMixing,
'diagbroyden': nonlin.DiagBroyden,
'excitingmixing': nonlin.ExcitingMixing,
'krylov': nonlin.KrylovJacobian
}[_method]
if args:
if jac:
def f(x):
return func(x, *args)[0]
else:
def f(x):
return func(x, *args)
else:
f = func
x, info = nonlin.nonlin_solve(f, x0, jacobian=jacobian(**jac_options),
iter=nit, verbose=verbose,
maxiter=maxiter, f_tol=f_tol,
f_rtol=f_rtol, x_tol=x_tol,
x_rtol=x_rtol, tol_norm=tol_norm,
line_search=line_search,
callback=_callback, full_output=True,
raise_exception=False)
sol = OptimizeResult(x=x)
sol.update(info)
return sol
def _root_broyden1_doc():
"""
Options
-------
nit : int, optional
Number of iterations to make. If omitted (default), make as many
as required to meet tolerances.
disp : bool, optional
Print status to stdout on every iteration.
maxiter : int, optional
Maximum number of iterations to make. If more are needed to
meet convergence, `NoConvergence` is raised.
ftol : float, optional
Relative tolerance for the residual. If omitted, not used.
fatol : float, optional
Absolute tolerance (in max-norm) for the residual.
If omitted, default is 6e-6.
xtol : float, optional
Relative minimum step size. If omitted, not used.
xatol : float, optional
Absolute minimum step size, as determined from the Jacobian
approximation. If the step size is smaller than this, optimization
is terminated as successful. If omitted, not used.
tol_norm : function(vector) -> scalar, optional
Norm to use in convergence check. Default is the maximum norm.
line_search : {None, 'armijo' (default), 'wolfe'}, optional
Which type of a line search to use to determine the step size in
the direction given by the Jacobian approximation. Defaults to
'armijo'.
jac_options : dict, optional
Options for the respective Jacobian approximation.
alpha : float, optional
Initial guess for the Jacobian is (-1/alpha).
reduction_method : str or tuple, optional
Method used in ensuring that the rank of the Broyden
matrix stays low. Can either be a string giving the
name of the method, or a tuple of the form ``(method,
param1, param2, ...)`` that gives the name of the
method and values for additional parameters.
Methods available:
- ``restart``: drop all matrix columns. Has no
extra parameters.
- ``simple``: drop oldest matrix column. Has no
extra parameters.
- ``svd``: keep only the most significant SVD
components.
Extra parameters:
- ``to_retain``: number of SVD components to
retain when rank reduction is done.
Default is ``max_rank - 2``.
max_rank : int, optional
Maximum rank for the Broyden matrix.
Default is infinity (ie., no rank reduction).
"""
pass
def _root_broyden2_doc():
"""
Options
-------
nit : int, optional
Number of iterations to make. If omitted (default), make as many
as required to meet tolerances.
disp : bool, optional
Print status to stdout on every iteration.
maxiter : int, optional
Maximum number of iterations to make. If more are needed to
meet convergence, `NoConvergence` is raised.
ftol : float, optional
Relative tolerance for the residual. If omitted, not used.
fatol : float, optional
Absolute tolerance (in max-norm) for the residual.
If omitted, default is 6e-6.
xtol : float, optional
Relative minimum step size. If omitted, not used.
xatol : float, optional
Absolute minimum step size, as determined from the Jacobian
approximation. If the step size is smaller than this, optimization
is terminated as successful. If omitted, not used.
tol_norm : function(vector) -> scalar, optional
Norm to use in convergence check. Default is the maximum norm.
line_search : {None, 'armijo' (default), 'wolfe'}, optional
Which type of a line search to use to determine the step size in
the direction given by the Jacobian approximation. Defaults to
'armijo'.
jac_options : dict, optional
Options for the respective Jacobian approximation.
alpha : float, optional
Initial guess for the Jacobian is (-1/alpha).
reduction_method : str or tuple, optional
Method used in ensuring that the rank of the Broyden
matrix stays low. Can either be a string giving the
name of the method, or a tuple of the form ``(method,
param1, param2, ...)`` that gives the name of the
method and values for additional parameters.
Methods available:
- ``restart``: drop all matrix columns. Has no
extra parameters.
- ``simple``: drop oldest matrix column. Has no
extra parameters.
- ``svd``: keep only the most significant SVD
components.
Extra parameters:
- ``to_retain``: number of SVD components to
retain when rank reduction is done.
Default is ``max_rank - 2``.
max_rank : int, optional
Maximum rank for the Broyden matrix.
Default is infinity (ie., no rank reduction).
"""
pass
def _root_anderson_doc():
"""
Options
-------
nit : int, optional
Number of iterations to make. If omitted (default), make as many
as required to meet tolerances.
disp : bool, optional
Print status to stdout on every iteration.
maxiter : int, optional
Maximum number of iterations to make. If more are needed to
meet convergence, `NoConvergence` is raised.
ftol : float, optional
Relative tolerance for the residual. If omitted, not used.
fatol : float, optional
Absolute tolerance (in max-norm) for the residual.
If omitted, default is 6e-6.
xtol : float, optional
Relative minimum step size. If omitted, not used.
xatol : float, optional
Absolute minimum step size, as determined from the Jacobian
approximation. If the step size is smaller than this, optimization
is terminated as successful. If omitted, not used.
tol_norm : function(vector) -> scalar, optional
Norm to use in convergence check. Default is the maximum norm.
line_search : {None, 'armijo' (default), 'wolfe'}, optional
Which type of a line search to use to determine the step size in
the direction given by the Jacobian approximation. Defaults to
'armijo'.
jac_options : dict, optional
Options for the respective Jacobian approximation.
alpha : float, optional
Initial guess for the Jacobian is (-1/alpha).
M : float, optional
Number of previous vectors to retain. Defaults to 5.
w0 : float, optional
Regularization parameter for numerical stability.
Compared to unity, good values of the order of 0.01.
"""
pass
def _root_linearmixing_doc():
"""
Options
-------
nit : int, optional
Number of iterations to make. If omitted (default), make as many
as required to meet tolerances.
disp : bool, optional
Print status to stdout on every iteration.
maxiter : int, optional
Maximum number of iterations to make. If more are needed to
meet convergence, ``NoConvergence`` is raised.
ftol : float, optional
Relative tolerance for the residual. If omitted, not used.
fatol : float, optional
Absolute tolerance (in max-norm) for the residual.
If omitted, default is 6e-6.
xtol : float, optional
Relative minimum step size. If omitted, not used.
xatol : float, optional
Absolute minimum step size, as determined from the Jacobian
approximation. If the step size is smaller than this, optimization
is terminated as successful. If omitted, not used.
tol_norm : function(vector) -> scalar, optional
Norm to use in convergence check. Default is the maximum norm.
line_search : {None, 'armijo' (default), 'wolfe'}, optional
Which type of a line search to use to determine the step size in
the direction given by the Jacobian approximation. Defaults to
'armijo'.
jac_options : dict, optional
Options for the respective Jacobian approximation.
alpha : float, optional
initial guess for the jacobian is (-1/alpha).
"""
pass
def _root_diagbroyden_doc():
"""
Options
-------
nit : int, optional
Number of iterations to make. If omitted (default), make as many
as required to meet tolerances.
disp : bool, optional
Print status to stdout on every iteration.
maxiter : int, optional
Maximum number of iterations to make. If more are needed to
meet convergence, `NoConvergence` is raised.
ftol : float, optional
Relative tolerance for the residual. If omitted, not used.
fatol : float, optional
Absolute tolerance (in max-norm) for the residual.
If omitted, default is 6e-6.
xtol : float, optional
Relative minimum step size. If omitted, not used.
xatol : float, optional
Absolute minimum step size, as determined from the Jacobian
approximation. If the step size is smaller than this, optimization
is terminated as successful. If omitted, not used.
tol_norm : function(vector) -> scalar, optional
Norm to use in convergence check. Default is the maximum norm.
line_search : {None, 'armijo' (default), 'wolfe'}, optional
Which type of a line search to use to determine the step size in
the direction given by the Jacobian approximation. Defaults to
'armijo'.
jac_options : dict, optional
Options for the respective Jacobian approximation.
alpha : float, optional
initial guess for the jacobian is (-1/alpha).
"""
pass
def _root_excitingmixing_doc():
"""
Options
-------
nit : int, optional
Number of iterations to make. If omitted (default), make as many
as required to meet tolerances.
disp : bool, optional
Print status to stdout on every iteration.
maxiter : int, optional
Maximum number of iterations to make. If more are needed to
meet convergence, `NoConvergence` is raised.
ftol : float, optional
Relative tolerance for the residual. If omitted, not used.
fatol : float, optional
Absolute tolerance (in max-norm) for the residual.
If omitted, default is 6e-6.
xtol : float, optional
Relative minimum step size. If omitted, not used.
xatol : float, optional
Absolute minimum step size, as determined from the Jacobian
approximation. If the step size is smaller than this, optimization
is terminated as successful. If omitted, not used.
tol_norm : function(vector) -> scalar, optional
Norm to use in convergence check. Default is the maximum norm.
line_search : {None, 'armijo' (default), 'wolfe'}, optional
Which type of a line search to use to determine the step size in
the direction given by the Jacobian approximation. Defaults to
'armijo'.
jac_options : dict, optional
Options for the respective Jacobian approximation.
alpha : float, optional
Initial Jacobian approximation is (-1/alpha).
alphamax : float, optional
The entries of the diagonal Jacobian are kept in the range
``[alpha, alphamax]``.
"""
pass
def _root_krylov_doc():
"""
Options
-------
nit : int, optional
Number of iterations to make. If omitted (default), make as many
as required to meet tolerances.
disp : bool, optional
Print status to stdout on every iteration.
maxiter : int, optional
Maximum number of iterations to make. If more are needed to
meet convergence, `NoConvergence` is raised.
ftol : float, optional
Relative tolerance for the residual. If omitted, not used.
fatol : float, optional
Absolute tolerance (in max-norm) for the residual.
If omitted, default is 6e-6.
xtol : float, optional
Relative minimum step size. If omitted, not used.
xatol : float, optional
Absolute minimum step size, as determined from the Jacobian
approximation. If the step size is smaller than this, optimization
is terminated as successful. If omitted, not used.
tol_norm : function(vector) -> scalar, optional
Norm to use in convergence check. Default is the maximum norm.
line_search : {None, 'armijo' (default), 'wolfe'}, optional
Which type of a line search to use to determine the step size in
the direction given by the Jacobian approximation. Defaults to
'armijo'.
jac_options : dict, optional
Options for the respective Jacobian approximation.
rdiff : float, optional
Relative step size to use in numerical differentiation.
method : {'lgmres', 'gmres', 'bicgstab', 'cgs', 'minres'} or function
Krylov method to use to approximate the Jacobian.
Can be a string, or a function implementing the same
interface as the iterative solvers in
`scipy.sparse.linalg`.
The default is `scipy.sparse.linalg.lgmres`.
inner_M : LinearOperator or InverseJacobian
Preconditioner for the inner Krylov iteration.
Note that you can use also inverse Jacobians as (adaptive)
preconditioners. For example,
>>> jac = BroydenFirst()
>>> kjac = KrylovJacobian(inner_M=jac.inverse).
If the preconditioner has a method named 'update', it will
be called as ``update(x, f)`` after each nonlinear step,
with ``x`` giving the current point, and ``f`` the current
function value.
inner_tol, inner_maxiter, ...
Parameters to pass on to the "inner" Krylov solver.
See `scipy.sparse.linalg.gmres` for details.
outer_k : int, optional
Size of the subspace kept across LGMRES nonlinear
iterations.
See `scipy.sparse.linalg.lgmres` for details.
"""
pass
| mit | -7,290,366,536,019,230,000 | 39.699531 | 81 | 0.603876 | false |
neerajvashistha/pa-dude | lib/python2.7/site-packages/django/contrib/gis/gdal/prototypes/raster.py | 320 | 4013 | """
This module houses the ctypes function prototypes for GDAL DataSource (raster)
related data structures.
"""
from ctypes import POINTER, c_char_p, c_double, c_int, c_void_p
from functools import partial
from django.contrib.gis.gdal.libgdal import GDAL_VERSION, std_call
from django.contrib.gis.gdal.prototypes.generation import (
const_string_output, double_output, int_output, void_output,
voidptr_output,
)
# For more detail about c function names and definitions see
# http://gdal.org/gdal_8h.html
# http://gdal.org/gdalwarper_8h.html
# Prepare partial functions that use cpl error codes
void_output = partial(void_output, cpl=True)
const_string_output = partial(const_string_output, cpl=True)
double_output = partial(double_output, cpl=True)
# Raster Driver Routines
register_all = void_output(std_call('GDALAllRegister'), [])
get_driver = voidptr_output(std_call('GDALGetDriver'), [c_int])
get_driver_by_name = voidptr_output(std_call('GDALGetDriverByName'), [c_char_p], errcheck=False)
get_driver_count = int_output(std_call('GDALGetDriverCount'), [])
get_driver_description = const_string_output(std_call('GDALGetDescription'), [c_void_p])
# Raster Data Source Routines
create_ds = voidptr_output(std_call('GDALCreate'), [c_void_p, c_char_p, c_int, c_int, c_int, c_int, c_void_p])
open_ds = voidptr_output(std_call('GDALOpen'), [c_char_p, c_int])
if GDAL_VERSION >= (2, 0):
close_ds = voidptr_output(std_call('GDALClose'), [c_void_p])
else:
close_ds = void_output(std_call('GDALClose'), [c_void_p])
flush_ds = int_output(std_call('GDALFlushCache'), [c_void_p])
copy_ds = voidptr_output(std_call('GDALCreateCopy'),
[c_void_p, c_char_p, c_void_p, c_int, POINTER(c_char_p), c_void_p, c_void_p]
)
add_band_ds = void_output(std_call('GDALAddBand'), [c_void_p, c_int])
get_ds_description = const_string_output(std_call('GDALGetDescription'), [c_void_p])
get_ds_driver = voidptr_output(std_call('GDALGetDatasetDriver'), [c_void_p])
get_ds_xsize = int_output(std_call('GDALGetRasterXSize'), [c_void_p])
get_ds_ysize = int_output(std_call('GDALGetRasterYSize'), [c_void_p])
get_ds_raster_count = int_output(std_call('GDALGetRasterCount'), [c_void_p])
get_ds_raster_band = voidptr_output(std_call('GDALGetRasterBand'), [c_void_p, c_int])
get_ds_projection_ref = const_string_output(std_call('GDALGetProjectionRef'), [c_void_p])
set_ds_projection_ref = void_output(std_call('GDALSetProjection'), [c_void_p, c_char_p])
get_ds_geotransform = void_output(std_call('GDALGetGeoTransform'), [c_void_p, POINTER(c_double * 6)], errcheck=False)
set_ds_geotransform = void_output(std_call('GDALSetGeoTransform'), [c_void_p, POINTER(c_double * 6)])
# Raster Band Routines
band_io = void_output(std_call('GDALRasterIO'),
[c_void_p, c_int, c_int, c_int, c_int, c_int, c_void_p, c_int, c_int, c_int, c_int, c_int]
)
get_band_xsize = int_output(std_call('GDALGetRasterBandXSize'), [c_void_p])
get_band_ysize = int_output(std_call('GDALGetRasterBandYSize'), [c_void_p])
get_band_index = int_output(std_call('GDALGetBandNumber'), [c_void_p])
get_band_description = const_string_output(std_call('GDALGetDescription'), [c_void_p])
get_band_ds = voidptr_output(std_call('GDALGetBandDataset'), [c_void_p])
get_band_datatype = int_output(std_call('GDALGetRasterDataType'), [c_void_p])
get_band_nodata_value = double_output(std_call('GDALGetRasterNoDataValue'), [c_void_p, POINTER(c_int)])
set_band_nodata_value = void_output(std_call('GDALSetRasterNoDataValue'), [c_void_p, c_double])
get_band_minimum = double_output(std_call('GDALGetRasterMinimum'), [c_void_p, POINTER(c_int)])
get_band_maximum = double_output(std_call('GDALGetRasterMaximum'), [c_void_p, POINTER(c_int)])
# Reprojection routine
reproject_image = void_output(std_call('GDALReprojectImage'),
[c_void_p, c_char_p, c_void_p, c_char_p, c_int, c_double, c_double, c_void_p, c_void_p, c_void_p]
)
auto_create_warped_vrt = voidptr_output(std_call('GDALAutoCreateWarpedVRT'),
[c_void_p, c_char_p, c_char_p, c_int, c_double, c_void_p]
)
| mit | -5,602,105,714,580,846,000 | 53.22973 | 117 | 0.720658 | false |
latusrepo/propmtime | test_propmtime/test_run.py | 2 | 1532 |
import time
import os
from balsa import get_logger
import propmtime
import test_propmtime
log = get_logger("test_propmtime")
def get_mtimes(root_folder, file_path):
root_mtime = os.path.getmtime(root_folder)
file_mtime = os.path.getmtime(file_path)
log.info('%s mtime : %f' % (root_folder, root_mtime))
log.info('%s mtime : %f' % (file_path, file_mtime))
log.info('difference : %f seconds' % (root_mtime - file_mtime))
return root_mtime, file_mtime
def run(is_hidden, is_system, root=test_propmtime.data_root):
current_time = time.time()
file_name = 'myfile.txt'
if propmtime.util.is_mac() and is_hidden:
file_name = '.' + file_name
file_path = os.path.join(test_propmtime.child_folder, file_name)
test_propmtime.file_creator(current_time, file_path, 1, is_hidden, is_system)
root_mtime, file_mtime = get_mtimes(test_propmtime.data_root, file_path)
assert((root_mtime - file_mtime) >= (test_propmtime.time_offset_unit - test_propmtime.time_accuracy_window))
pmt = propmtime.PropMTime(test_propmtime.data_root, True, is_hidden, is_system)
pmt.start()
pmt.join()
root_mtime, file_mtime = get_mtimes(test_propmtime.data_root, file_path)
assert(abs(root_mtime - file_mtime) < test_propmtime.time_accuracy_window)
def test_normal():
run(False, False)
def test_hidden():
run(True, False)
def test_system():
run(False, True)
def test_both():
run(True, True)
def test_non_existent():
run(False, False, 'i_do_not_exist') | mit | 8,846,400,064,544,931,000 | 25.431034 | 112 | 0.675587 | false |
ariabuckles/pyobjc-framework-Cocoa | PyObjCTest/test_cfnotificationcenter.py | 3 | 4469 | from PyObjCTools.TestSupport import *
from CoreFoundation import *
try:
long
except NameError:
long = int
class TestNotificationCenter (TestCase):
def testTypes(self):
self.assertIsCFType(CFNotificationCenterRef)
def testTypeID(self):
self.assertIsInstance(CFNotificationCenterGetTypeID(), (int, long))
def testGetting(self):
ref = CFNotificationCenterGetLocalCenter();
self.assertIsInstance(ref,CFNotificationCenterRef)
ref = CFNotificationCenterGetDistributedCenter();
self.assertIsInstance(ref,CFNotificationCenterRef)
ref = CFNotificationCenterGetDarwinNotifyCenter();
self.assertIsInstance(ref,CFNotificationCenterRef)
def testSending(self):
ref = CFNotificationCenterGetLocalCenter();
self.assertIsInstance(ref,CFNotificationCenterRef)
notifications = []
@objc.callbackFor(CFNotificationCenterAddObserver)
def observe(center, observer, name, object, userInfo):
notifications.append(( center, observer, name, object, userInfo ))
self.assertArgHasType(CFNotificationCenterAddObserver, 1, b'@')
self.assertArgIsFunction(CFNotificationCenterAddObserver, 2, b'v@@@@@', True)
self.assertArgHasType(CFNotificationCenterAddObserver, 4, b'@')
args = {}
args["object"] = b"object".decode('ascii')
args["pyobjc.test"] = b"pyobjc.test".decode('ascii')
CFNotificationCenterAddObserver(ref, args["object"], observe, args["pyobjc.test"], ref, CFNotificationSuspensionBehaviorDeliverImmediately)
CFNotificationCenterPostNotificationWithOptions(ref, b"pyobjc.test".decode('ascii'), ref, {b"name".decode('ascii'):b"value".decode('ascii')}, kCFNotificationPostToAllSessions)
self.assertEqual(len(notifications) , 1)
info = notifications[-1]
self.assertIs(info[0], ref)
self.assertEqual(info[1] , b"object".decode('ascii'))
self.assertEqual(info[2] , b"pyobjc.test".decode('ascii'))
self.assertIs(info[3], ref)
self.assertEqual(info[4] , {b"name".decode('ascii'):b"value".decode('ascii')})
CFNotificationCenterPostNotification(ref, b"pyobjc.test".decode('ascii'), ref, {b"name2".decode('ascii'):b"value2".decode('ascii')}, True)
self.assertEqual(len(notifications) , 2)
info = notifications[-1]
self.assertIs(info[0], ref)
self.assertEqual(info[1] , b"object".decode('ascii'))
self.assertEqual(info[2] , b"pyobjc.test".decode('ascii'))
self.assertIs(info[3], ref)
self.assertEqual(info[4] , {b"name2".decode('ascii'):b"value2".decode('ascii')})
self.assertArgHasType(CFNotificationCenterRemoveObserver, 1, b'@')
self.assertArgHasType(CFNotificationCenterRemoveObserver, 3, b'@')
CFNotificationCenterRemoveObserver(ref, args["object"], args["pyobjc.test"], ref)
self.assertArgHasType(CFNotificationCenterPostNotificationWithOptions, 2, b'@')
CFNotificationCenterPostNotificationWithOptions(ref, b"pyobjc.test".decode('ascii'), ref, {b"name".decode('ascii'):b"value".decode('ascii')}, kCFNotificationPostToAllSessions)
self.assertEqual(len(notifications) , 2)
CFNotificationCenterAddObserver(ref, args["object"], observe, args["pyobjc.test"], ref, CFNotificationSuspensionBehaviorDeliverImmediately)
self.assertArgHasType(CFNotificationCenterPostNotification, 2, b'@')
self.assertArgIsBOOL(CFNotificationCenterPostNotification, 4)
CFNotificationCenterPostNotification(ref, b"pyobjc.test".decode('ascii'), ref, {b"name2".decode('ascii'):b"value2".decode('ascii')}, True)
self.assertEqual(len(notifications) , 3)
CFNotificationCenterRemoveEveryObserver(ref, args["object"])
CFNotificationCenterPostNotification(ref, b"pyobjc.test".decode('ascii'), ref, {b"name2".decode('ascii'):b"value2".decode('ascii')}, True)
self.assertEqual(len(notifications) , 3)
def testConstants(self):
self.assertEqual(CFNotificationSuspensionBehaviorDrop, 1)
self.assertEqual(CFNotificationSuspensionBehaviorCoalesce, 2)
self.assertEqual(CFNotificationSuspensionBehaviorHold, 3)
self.assertEqual(CFNotificationSuspensionBehaviorDeliverImmediately, 4)
self.assertEqual(kCFNotificationDeliverImmediately, 1)
self.assertEqual(kCFNotificationPostToAllSessions, 2)
if __name__ == "__main__":
main()
| mit | 8,539,407,397,876,693,000 | 49.213483 | 184 | 0.705974 | false |
googleinterns/cabby | cabby/geo/visualize.py | 1 | 3273 | # coding=utf-8
# Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Library to support geographical visualization.'''
import folium
import geopandas as gpd
import pandas as pd
import shapely.geometry as geom
from shapely.geometry import Polygon, Point, LineString
from typing import Tuple, Sequence, Optional, Dict, Text
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.getcwd() )))
from cabby.geo import util
from cabby.geo import walk
from cabby.geo import geo_item
def get_osm_map(entity: geo_item.GeoEntity) -> Sequence[folium.Map]:
'''Create the OSM maps.
Arguments:
gdf: the GeoDataFrame from which to create the OSM map.
Returns:
OSM maps from the GeoDataFrame.
'''
mid_point = util.midpoint(
entity.geo_landmarks['end_point'].geometry,
entity.geo_landmarks['start_point'].geometry)
zoom_location = util.list_yx_from_point(mid_point)
# create a map
map_osm = folium.Map(location=zoom_location,
zoom_start=15, tiles='OpenStreetMap')
# draw the points
colors = [
'pink', 'black', 'white', 'yellow', 'red', 'green', 'blue', 'orange']
for landmark_type, landmark in entity.geo_landmarks.items():
if landmark.geometry is not None:
landmark_geom = util.list_yx_from_point(landmark.geometry)
folium.Marker(
landmark_geom,
popup=f'{landmark_type}: {landmark.main_tag}',
icon=folium.Icon(color=colors.pop(0))).add_to(map_osm)
lat_lng_list = []
for coord in entity.route.coords:
lat_lng_list.append([coord[1], coord[0]])
for index, coord_lat_lng in enumerate(lat_lng_list):
folium.Circle(location = coord_lat_lng,
radius = 5,
color='crimson',
).add_to(map_osm)
return map_osm
def get_maps_and_instructions(path: Text
) -> Sequence[Tuple[folium.Map, str]]:
'''Create the OSM maps and instructions.
Arguments:
path: The path from the start point to the goal location.
Returns:
OSM maps from the GeoDataFrame.
'''
map_osms_instructions = []
entities = walk.load_entities(path)
for entity in entities:
map_osm = get_osm_map(entity)
features_list = []
for feature_type, feature in entity.geo_features.items():
features_list.append(feature_type + ": " + str(feature))
landmark_list = []
for landmark_type, landmark in entity.geo_landmarks.items():
landmark_list.append(landmark_type + ": " + str(landmark.main_tag))
instruction = '; '.join(features_list) + '; '.join(landmark_list)
map_osms_instructions.append((map_osm, instruction))
return map_osms_instructions
| apache-2.0 | 3,070,972,481,918,681,600 | 32.060606 | 75 | 0.664833 | false |
knowsWhereHisTowelIs/pi-pyth-serv-socketio | lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/util/retry.py | 360 | 10664 | from __future__ import absolute_import
import time
import logging
from ..exceptions import (
ConnectTimeoutError,
MaxRetryError,
ProtocolError,
ReadTimeoutError,
ResponseError,
)
from ..packages import six
log = logging.getLogger(__name__)
class Retry(object):
""" Retry configuration.
Each retry attempt will create a new Retry object with updated values, so
they can be safely reused.
Retries can be defined as a default for a pool::
retries = Retry(connect=5, read=2, redirect=5)
http = PoolManager(retries=retries)
response = http.request('GET', 'http://example.com/')
Or per-request (which overrides the default for the pool)::
response = http.request('GET', 'http://example.com/', retries=Retry(10))
Retries can be disabled by passing ``False``::
response = http.request('GET', 'http://example.com/', retries=False)
Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
retries are disabled, in which case the causing exception will be raised.
:param int total:
Total number of retries to allow. Takes precedence over other counts.
Set to ``None`` to remove this constraint and fall back on other
counts. It's a good idea to set this to some sensibly-high value to
account for unexpected edge cases and avoid infinite retry loops.
Set to ``0`` to fail on the first retry.
Set to ``False`` to disable and imply ``raise_on_redirect=False``.
:param int connect:
How many connection-related errors to retry on.
These are errors raised before the request is sent to the remote server,
which we assume has not triggered the server to process the request.
Set to ``0`` to fail on the first retry of this type.
:param int read:
How many times to retry on read errors.
These errors are raised after the request was sent to the server, so the
request may have side-effects.
Set to ``0`` to fail on the first retry of this type.
:param int redirect:
How many redirects to perform. Limit this to avoid infinite redirect
loops.
A redirect is a HTTP response with a status code 301, 302, 303, 307 or
308.
Set to ``0`` to fail on the first retry of this type.
Set to ``False`` to disable and imply ``raise_on_redirect=False``.
:param iterable method_whitelist:
Set of uppercased HTTP method verbs that we should retry on.
By default, we only retry on methods which are considered to be
idempotent (multiple requests with the same parameters end with the
same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
Set to a ``False`` value to retry on any verb.
:param iterable status_forcelist:
A set of integer HTTP status codes that we should force a retry on.
A retry is initiated if the request method is in ``method_whitelist``
and the response status code is in ``status_forcelist``.
By default, this is disabled with ``None``.
:param float backoff_factor:
A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a
delay). urllib3 will sleep for::
{backoff factor} * (2 ^ ({number of total retries} - 1))
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
than :attr:`Retry.BACKOFF_MAX`.
By default, backoff is disabled (set to 0).
:param bool raise_on_redirect: Whether, if the number of redirects is
exhausted, to raise a MaxRetryError, or to return a response with a
response code in the 3xx range.
:param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
whether we should raise an exception, or return a response,
if status falls in ``status_forcelist`` range and retries have
been exhausted.
"""
DEFAULT_METHOD_WHITELIST = frozenset([
'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])
#: Maximum backoff time.
BACKOFF_MAX = 120
def __init__(self, total=10, connect=None, read=None, redirect=None,
method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
backoff_factor=0, raise_on_redirect=True, raise_on_status=True,
_observed_errors=0):
self.total = total
self.connect = connect
self.read = read
if redirect is False or total is False:
redirect = 0
raise_on_redirect = False
self.redirect = redirect
self.status_forcelist = status_forcelist or set()
self.method_whitelist = method_whitelist
self.backoff_factor = backoff_factor
self.raise_on_redirect = raise_on_redirect
self.raise_on_status = raise_on_status
self._observed_errors = _observed_errors # TODO: use .history instead?
def new(self, **kw):
params = dict(
total=self.total,
connect=self.connect, read=self.read, redirect=self.redirect,
method_whitelist=self.method_whitelist,
status_forcelist=self.status_forcelist,
backoff_factor=self.backoff_factor,
raise_on_redirect=self.raise_on_redirect,
raise_on_status=self.raise_on_status,
_observed_errors=self._observed_errors,
)
params.update(kw)
return type(self)(**params)
@classmethod
def from_int(cls, retries, redirect=True, default=None):
""" Backwards-compatibility for the old retries format."""
if retries is None:
retries = default if default is not None else cls.DEFAULT
if isinstance(retries, Retry):
return retries
redirect = bool(redirect) and None
new_retries = cls(retries, redirect=redirect)
log.debug("Converted retries value: %r -> %r", retries, new_retries)
return new_retries
def get_backoff_time(self):
""" Formula for computing the current backoff
:rtype: float
"""
if self._observed_errors <= 1:
return 0
backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1))
return min(self.BACKOFF_MAX, backoff_value)
def sleep(self):
""" Sleep between retry attempts using an exponential backoff.
By default, the backoff factor is 0 and this method will return
immediately.
"""
backoff = self.get_backoff_time()
if backoff <= 0:
return
time.sleep(backoff)
def _is_connection_error(self, err):
""" Errors when we're fairly sure that the server did not receive the
request, so it should be safe to retry.
"""
return isinstance(err, ConnectTimeoutError)
def _is_read_error(self, err):
""" Errors that occur after the request has been started, so we should
assume that the server began processing it.
"""
return isinstance(err, (ReadTimeoutError, ProtocolError))
def is_forced_retry(self, method, status_code):
""" Is this method/status code retryable? (Based on method/codes whitelists)
"""
if self.method_whitelist and method.upper() not in self.method_whitelist:
return False
return self.status_forcelist and status_code in self.status_forcelist
def is_exhausted(self):
""" Are we out of retries? """
retry_counts = (self.total, self.connect, self.read, self.redirect)
retry_counts = list(filter(None, retry_counts))
if not retry_counts:
return False
return min(retry_counts) < 0
def increment(self, method=None, url=None, response=None, error=None,
_pool=None, _stacktrace=None):
""" Return a new Retry object with incremented retry counters.
:param response: A response object, or None, if the server did not
return a response.
:type response: :class:`~urllib3.response.HTTPResponse`
:param Exception error: An error encountered during the request, or
None if the response was received successfully.
:return: A new ``Retry`` object.
"""
if self.total is False and error:
# Disabled, indicate to re-raise the error.
raise six.reraise(type(error), error, _stacktrace)
total = self.total
if total is not None:
total -= 1
_observed_errors = self._observed_errors
connect = self.connect
read = self.read
redirect = self.redirect
cause = 'unknown'
if error and self._is_connection_error(error):
# Connect retry?
if connect is False:
raise six.reraise(type(error), error, _stacktrace)
elif connect is not None:
connect -= 1
_observed_errors += 1
elif error and self._is_read_error(error):
# Read retry?
if read is False:
raise six.reraise(type(error), error, _stacktrace)
elif read is not None:
read -= 1
_observed_errors += 1
elif response and response.get_redirect_location():
# Redirect retry?
if redirect is not None:
redirect -= 1
cause = 'too many redirects'
else:
# Incrementing because of a server error like a 500 in
# status_forcelist and a the given method is in the whitelist
_observed_errors += 1
cause = ResponseError.GENERIC_ERROR
if response and response.status:
cause = ResponseError.SPECIFIC_ERROR.format(
status_code=response.status)
new_retry = self.new(
total=total,
connect=connect, read=read, redirect=redirect,
_observed_errors=_observed_errors)
if new_retry.is_exhausted():
raise MaxRetryError(_pool, url, error or ResponseError(cause))
log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
return new_retry
def __repr__(self):
return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
'read={self.read}, redirect={self.redirect})').format(
cls=type(self), self=self)
# For backwards compatibility (equivalent to pre-v1.9):
Retry.DEFAULT = Retry(3)
| mit | 2,026,008,332,306,434,600 | 34.546667 | 84 | 0.620405 | false |
craigderington/studentloan5 | studentloan5/Lib/site-packages/django/contrib/admin/validation.py | 82 | 23810 | from django.contrib.admin.utils import NotRelationField, get_fields_from_path
from django.core.exceptions import FieldDoesNotExist, ImproperlyConfigured
from django.db import models
from django.forms.models import (
BaseModelForm, BaseModelFormSet, _get_foreign_key,
)
"""
Does basic ModelAdmin option validation. Calls custom validation
classmethod in the end if it is provided in cls. The signature of the
custom validation classmethod should be: def validate(cls, model).
"""
__all__ = ['BaseValidator', 'InlineValidator']
class BaseValidator(object):
def validate(self, cls, model):
for m in dir(self):
if m.startswith('validate_'):
getattr(self, m)(cls, model)
def check_field_spec(self, cls, model, flds, label):
"""
Validate the fields specification in `flds` from a ModelAdmin subclass
`cls` for the `model` model. Use `label` for reporting problems to the user.
The fields specification can be a ``fields`` option or a ``fields``
sub-option from a ``fieldsets`` option component.
"""
for fields in flds:
# The entry in fields might be a tuple. If it is a standalone
# field, make it into a tuple to make processing easier.
if type(fields) != tuple:
fields = (fields,)
for field in fields:
if field in cls.readonly_fields:
# Stuff can be put in fields that isn't actually a
# model field if it's in readonly_fields,
# readonly_fields will handle the validation of such
# things.
continue
try:
f = model._meta.get_field(field)
except FieldDoesNotExist:
# If we can't find a field on the model that matches, it could be an
# extra field on the form; nothing to check so move on to the next field.
continue
if isinstance(f, models.ManyToManyField) and not f.rel.through._meta.auto_created:
raise ImproperlyConfigured("'%s.%s' "
"can't include the ManyToManyField field '%s' because "
"'%s' manually specifies a 'through' model." % (
cls.__name__, label, field, field))
def validate_raw_id_fields(self, cls, model):
" Validate that raw_id_fields only contains field names that are listed on the model. "
if hasattr(cls, 'raw_id_fields'):
check_isseq(cls, 'raw_id_fields', cls.raw_id_fields)
for idx, field in enumerate(cls.raw_id_fields):
f = get_field(cls, model, 'raw_id_fields', field)
if not isinstance(f, (models.ForeignKey, models.ManyToManyField)):
raise ImproperlyConfigured("'%s.raw_id_fields[%d]', '%s' must "
"be either a ForeignKey or ManyToManyField."
% (cls.__name__, idx, field))
def validate_fields(self, cls, model):
" Validate that fields only refer to existing fields, doesn't contain duplicates. "
# fields
if cls.fields: # default value is None
check_isseq(cls, 'fields', cls.fields)
self.check_field_spec(cls, model, cls.fields, 'fields')
if cls.fieldsets:
raise ImproperlyConfigured('Both fieldsets and fields are specified in %s.' % cls.__name__)
if len(cls.fields) > len(set(cls.fields)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.fields' % cls.__name__)
def validate_fieldsets(self, cls, model):
" Validate that fieldsets is properly formatted and doesn't contain duplicates. "
from django.contrib.admin.options import flatten_fieldsets
if cls.fieldsets: # default value is None
check_isseq(cls, 'fieldsets', cls.fieldsets)
for idx, fieldset in enumerate(cls.fieldsets):
check_isseq(cls, 'fieldsets[%d]' % idx, fieldset)
if len(fieldset) != 2:
raise ImproperlyConfigured("'%s.fieldsets[%d]' does not "
"have exactly two elements." % (cls.__name__, idx))
check_isdict(cls, 'fieldsets[%d][1]' % idx, fieldset[1])
if 'fields' not in fieldset[1]:
raise ImproperlyConfigured("'fields' key is required in "
"%s.fieldsets[%d][1] field options dict."
% (cls.__name__, idx))
self.check_field_spec(cls, model, fieldset[1]['fields'], "fieldsets[%d][1]['fields']" % idx)
flattened_fieldsets = flatten_fieldsets(cls.fieldsets)
if len(flattened_fieldsets) > len(set(flattened_fieldsets)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.fieldsets' % cls.__name__)
def validate_exclude(self, cls, model):
" Validate that exclude is a sequence without duplicates. "
if cls.exclude: # default value is None
check_isseq(cls, 'exclude', cls.exclude)
if len(cls.exclude) > len(set(cls.exclude)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.exclude' % cls.__name__)
def validate_form(self, cls, model):
" Validate that form subclasses BaseModelForm. "
if hasattr(cls, 'form') and not issubclass(cls.form, BaseModelForm):
raise ImproperlyConfigured("%s.form does not inherit from "
"BaseModelForm." % cls.__name__)
def validate_filter_vertical(self, cls, model):
" Validate that filter_vertical is a sequence of field names. "
if hasattr(cls, 'filter_vertical'):
check_isseq(cls, 'filter_vertical', cls.filter_vertical)
for idx, field in enumerate(cls.filter_vertical):
f = get_field(cls, model, 'filter_vertical', field)
if not isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.filter_vertical[%d]' must be "
"a ManyToManyField." % (cls.__name__, idx))
def validate_filter_horizontal(self, cls, model):
" Validate that filter_horizontal is a sequence of field names. "
if hasattr(cls, 'filter_horizontal'):
check_isseq(cls, 'filter_horizontal', cls.filter_horizontal)
for idx, field in enumerate(cls.filter_horizontal):
f = get_field(cls, model, 'filter_horizontal', field)
if not isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.filter_horizontal[%d]' must be "
"a ManyToManyField." % (cls.__name__, idx))
def validate_radio_fields(self, cls, model):
" Validate that radio_fields is a dictionary of choice or foreign key fields. "
from django.contrib.admin.options import HORIZONTAL, VERTICAL
if hasattr(cls, 'radio_fields'):
check_isdict(cls, 'radio_fields', cls.radio_fields)
for field, val in cls.radio_fields.items():
f = get_field(cls, model, 'radio_fields', field)
if not (isinstance(f, models.ForeignKey) or f.choices):
raise ImproperlyConfigured("'%s.radio_fields['%s']' "
"is neither an instance of ForeignKey nor does "
"have choices set." % (cls.__name__, field))
if val not in (HORIZONTAL, VERTICAL):
raise ImproperlyConfigured("'%s.radio_fields['%s']' "
"is neither admin.HORIZONTAL nor admin.VERTICAL."
% (cls.__name__, field))
def validate_prepopulated_fields(self, cls, model):
" Validate that prepopulated_fields if a dictionary containing allowed field types. "
# prepopulated_fields
if hasattr(cls, 'prepopulated_fields'):
check_isdict(cls, 'prepopulated_fields', cls.prepopulated_fields)
for field, val in cls.prepopulated_fields.items():
f = get_field(cls, model, 'prepopulated_fields', field)
if isinstance(f, (models.DateTimeField, models.ForeignKey,
models.ManyToManyField)):
raise ImproperlyConfigured("'%s.prepopulated_fields['%s']' "
"is either a DateTimeField, ForeignKey or "
"ManyToManyField. This isn't allowed."
% (cls.__name__, field))
check_isseq(cls, "prepopulated_fields['%s']" % field, val)
for idx, f in enumerate(val):
get_field(cls, model, "prepopulated_fields['%s'][%d]" % (field, idx), f)
def validate_view_on_site_url(self, cls, model):
if hasattr(cls, 'view_on_site'):
if not callable(cls.view_on_site) and not isinstance(cls.view_on_site, bool):
raise ImproperlyConfigured("%s.view_on_site is not a callable or a boolean value." % cls.__name__)
def validate_ordering(self, cls, model):
" Validate that ordering refers to existing fields or is random. "
# ordering = None
if cls.ordering:
check_isseq(cls, 'ordering', cls.ordering)
for idx, field in enumerate(cls.ordering):
if field == '?' and len(cls.ordering) != 1:
raise ImproperlyConfigured("'%s.ordering' has the random "
"ordering marker '?', but contains other fields as "
"well. Please either remove '?' or the other fields."
% cls.__name__)
if field == '?':
continue
if field.startswith('-'):
field = field[1:]
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
if '__' in field:
continue
get_field(cls, model, 'ordering[%d]' % idx, field)
def validate_readonly_fields(self, cls, model):
" Validate that readonly_fields refers to proper attribute or field. "
if hasattr(cls, "readonly_fields"):
check_isseq(cls, "readonly_fields", cls.readonly_fields)
for idx, field in enumerate(cls.readonly_fields):
if not callable(field):
if not hasattr(cls, field):
if not hasattr(model, field):
try:
model._meta.get_field(field)
except FieldDoesNotExist:
raise ImproperlyConfigured(
"%s.readonly_fields[%d], %r is not a callable or "
"an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name)
)
class ModelAdminValidator(BaseValidator):
def validate_save_as(self, cls, model):
" Validate save_as is a boolean. "
check_type(cls, 'save_as', bool)
def validate_save_on_top(self, cls, model):
" Validate save_on_top is a boolean. "
check_type(cls, 'save_on_top', bool)
def validate_inlines(self, cls, model):
" Validate inline model admin classes. "
from django.contrib.admin.options import BaseModelAdmin
if hasattr(cls, 'inlines'):
check_isseq(cls, 'inlines', cls.inlines)
for idx, inline in enumerate(cls.inlines):
if not issubclass(inline, BaseModelAdmin):
raise ImproperlyConfigured("'%s.inlines[%d]' does not inherit "
"from BaseModelAdmin." % (cls.__name__, idx))
if not inline.model:
raise ImproperlyConfigured("'model' is a required attribute "
"of '%s.inlines[%d]'." % (cls.__name__, idx))
if not issubclass(inline.model, models.Model):
raise ImproperlyConfigured("'%s.inlines[%d].model' does not "
"inherit from models.Model." % (cls.__name__, idx))
inline.validate(inline.model)
self.check_inline(inline, model)
def check_inline(self, cls, parent_model):
" Validate inline class's fk field is not excluded. "
fk = _get_foreign_key(parent_model, cls.model, fk_name=cls.fk_name, can_fail=True)
if hasattr(cls, 'exclude') and cls.exclude:
if fk and fk.name in cls.exclude:
raise ImproperlyConfigured("%s cannot exclude the field "
"'%s' - this is the foreign key to the parent model "
"%s.%s." % (cls.__name__, fk.name, parent_model._meta.app_label, parent_model.__name__))
def validate_list_display(self, cls, model):
" Validate that list_display only contains fields or usable attributes. "
if hasattr(cls, 'list_display'):
check_isseq(cls, 'list_display', cls.list_display)
for idx, field in enumerate(cls.list_display):
if not callable(field):
if not hasattr(cls, field):
if not hasattr(model, field):
try:
model._meta.get_field(field)
except FieldDoesNotExist:
raise ImproperlyConfigured(
"%s.list_display[%d], %r is not a callable or "
"an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name)
)
else:
# getattr(model, field) could be an X_RelatedObjectsDescriptor
f = fetch_attr(cls, model, "list_display[%d]" % idx, field)
if isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured(
"'%s.list_display[%d]', '%s' is a ManyToManyField "
"which is not supported."
% (cls.__name__, idx, field)
)
def validate_list_display_links(self, cls, model):
" Validate that list_display_links either is None or a unique subset of list_display."
if hasattr(cls, 'list_display_links'):
if cls.list_display_links is None:
return
check_isseq(cls, 'list_display_links', cls.list_display_links)
for idx, field in enumerate(cls.list_display_links):
if field not in cls.list_display:
raise ImproperlyConfigured("'%s.list_display_links[%d]' "
"refers to '%s' which is not defined in 'list_display'."
% (cls.__name__, idx, field))
def validate_list_filter(self, cls, model):
"""
Validate that list_filter is a sequence of one of three options:
1: 'field' - a basic field filter, possibly w/ relationships (eg, 'field__rel')
2: ('field', SomeFieldListFilter) - a field-based list filter class
3: SomeListFilter - a non-field list filter class
"""
from django.contrib.admin import ListFilter, FieldListFilter
if hasattr(cls, 'list_filter'):
check_isseq(cls, 'list_filter', cls.list_filter)
for idx, item in enumerate(cls.list_filter):
if callable(item) and not isinstance(item, models.Field):
# If item is option 3, it should be a ListFilter...
if not issubclass(item, ListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d]' is '%s'"
" which is not a descendant of ListFilter."
% (cls.__name__, idx, item.__name__))
# ... but not a FieldListFilter.
if issubclass(item, FieldListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d]' is '%s'"
" which is of type FieldListFilter but is not"
" associated with a field name."
% (cls.__name__, idx, item.__name__))
else:
if isinstance(item, (tuple, list)):
# item is option #2
field, list_filter_class = item
if not issubclass(list_filter_class, FieldListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d][1]'"
" is '%s' which is not of type FieldListFilter."
% (cls.__name__, idx, list_filter_class.__name__))
else:
# item is option #1
field = item
# Validate the field string
try:
get_fields_from_path(model, field)
except (NotRelationField, FieldDoesNotExist):
raise ImproperlyConfigured("'%s.list_filter[%d]' refers to '%s'"
" which does not refer to a Field."
% (cls.__name__, idx, field))
def validate_list_select_related(self, cls, model):
" Validate that list_select_related is a boolean, a list or a tuple. "
list_select_related = getattr(cls, 'list_select_related', None)
if list_select_related:
types = (bool, tuple, list)
if not isinstance(list_select_related, types):
raise ImproperlyConfigured("'%s.list_select_related' should be "
"either a bool, a tuple or a list" %
cls.__name__)
def validate_list_per_page(self, cls, model):
" Validate that list_per_page is an integer. "
check_type(cls, 'list_per_page', int)
def validate_list_max_show_all(self, cls, model):
" Validate that list_max_show_all is an integer. "
check_type(cls, 'list_max_show_all', int)
def validate_list_editable(self, cls, model):
"""
Validate that list_editable is a sequence of editable fields from
list_display without first element.
"""
if hasattr(cls, 'list_editable') and cls.list_editable:
check_isseq(cls, 'list_editable', cls.list_editable)
for idx, field_name in enumerate(cls.list_editable):
try:
field = model._meta.get_field(field_name)
except FieldDoesNotExist:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to a "
"field, '%s', not defined on %s.%s."
% (cls.__name__, idx, field_name, model._meta.app_label, model.__name__))
if field_name not in cls.list_display:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to "
"'%s' which is not defined in 'list_display'."
% (cls.__name__, idx, field_name))
if cls.list_display_links is not None:
if field_name in cls.list_display_links:
raise ImproperlyConfigured("'%s' cannot be in both '%s.list_editable'"
" and '%s.list_display_links'"
% (field_name, cls.__name__, cls.__name__))
if not cls.list_display_links and cls.list_display[0] in cls.list_editable:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to"
" the first field in list_display, '%s', which can't be"
" used unless list_display_links is set."
% (cls.__name__, idx, cls.list_display[0]))
if not field.editable:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to a "
"field, '%s', which isn't editable through the admin."
% (cls.__name__, idx, field_name))
def validate_search_fields(self, cls, model):
" Validate search_fields is a sequence. "
if hasattr(cls, 'search_fields'):
check_isseq(cls, 'search_fields', cls.search_fields)
def validate_date_hierarchy(self, cls, model):
" Validate that date_hierarchy refers to DateField or DateTimeField. "
if cls.date_hierarchy:
f = get_field(cls, model, 'date_hierarchy', cls.date_hierarchy)
if not isinstance(f, (models.DateField, models.DateTimeField)):
raise ImproperlyConfigured("'%s.date_hierarchy is "
"neither an instance of DateField nor DateTimeField."
% cls.__name__)
class InlineValidator(BaseValidator):
def validate_fk_name(self, cls, model):
" Validate that fk_name refers to a ForeignKey. "
if cls.fk_name: # default value is None
f = get_field(cls, model, 'fk_name', cls.fk_name)
if not isinstance(f, models.ForeignKey):
raise ImproperlyConfigured("'%s.fk_name is not an instance of "
"models.ForeignKey." % cls.__name__)
def validate_extra(self, cls, model):
" Validate that extra is an integer. "
check_type(cls, 'extra', int)
def validate_max_num(self, cls, model):
" Validate that max_num is an integer. "
check_type(cls, 'max_num', int)
def validate_formset(self, cls, model):
" Validate formset is a subclass of BaseModelFormSet. "
if hasattr(cls, 'formset') and not issubclass(cls.formset, BaseModelFormSet):
raise ImproperlyConfigured("'%s.formset' does not inherit from "
"BaseModelFormSet." % cls.__name__)
def check_type(cls, attr, type_):
if getattr(cls, attr, None) is not None and not isinstance(getattr(cls, attr), type_):
raise ImproperlyConfigured("'%s.%s' should be a %s."
% (cls.__name__, attr, type_.__name__))
def check_isseq(cls, label, obj):
if not isinstance(obj, (list, tuple)):
raise ImproperlyConfigured("'%s.%s' must be a list or tuple." % (cls.__name__, label))
def check_isdict(cls, label, obj):
if not isinstance(obj, dict):
raise ImproperlyConfigured("'%s.%s' must be a dictionary." % (cls.__name__, label))
def get_field(cls, model, label, field):
try:
return model._meta.get_field(field)
except FieldDoesNotExist:
raise ImproperlyConfigured("'%s.%s' refers to field '%s' that is missing from model '%s.%s'."
% (cls.__name__, label, field, model._meta.app_label, model.__name__))
def fetch_attr(cls, model, label, field):
try:
return model._meta.get_field(field)
except FieldDoesNotExist:
pass
try:
return getattr(model, field)
except AttributeError:
raise ImproperlyConfigured(
"'%s.%s' refers to '%s' that is neither a field, method or "
"property of model '%s.%s'."
% (cls.__name__, label, field, model._meta.app_label, model.__name__)
)
| bsd-3-clause | 6,608,919,516,173,174,000 | 51.911111 | 114 | 0.542713 | false |
TrevorLowing/PyGames | pysollib/tk/playeroptionsdialog.py | 2 | 6800 | #!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
##---------------------------------------------------------------------------##
##
## Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
## Copyright (C) 2003 Mt. Hood Playing Card Co.
## Copyright (C) 2005-2009 Skomoroh
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
##---------------------------------------------------------------------------##
__all__ = ['PlayerOptionsDialog']
# imports
import Tkinter
# PySol imports
from pysollib.mfxutil import KwStruct, Struct
# Toolkit imports
from tkwidget import MfxDialog
from tkutil import bind
# ************************************************************************
# *
# ************************************************************************
class SelectUserNameDialog(MfxDialog):
def __init__(self, parent, title, usernames=[], **kw):
kw = self.initKw(kw)
MfxDialog.__init__(self, parent, title, kw.resizable, kw.default)
top_frame, bottom_frame = self.createFrames(kw)
self.createBitmaps(top_frame, kw)
#
listbox = Tkinter.Listbox(top_frame)
listbox.pack(side='left', fill='both', expand=True)
scrollbar = Tkinter.Scrollbar(top_frame)
scrollbar.pack(side='right', fill='y')
listbox.configure(yscrollcommand=scrollbar.set)
scrollbar.configure(command=listbox.yview)
self.username = None
self.listbox = listbox
bind(listbox, '<<ListboxSelect>>', self.updateUserName)
#
for un in usernames:
listbox.insert('end', un)
focus = self.createButtons(bottom_frame, kw)
self.mainloop(focus, kw.timeout)
#if listbox.curselection():
# self.username = listbox.get(listbox.curselection())
def updateUserName(self, *args):
self.username = self.listbox.get(self.listbox.curselection())
def initKw(self, kw):
kw = KwStruct(kw,
strings=(_("&OK"), _("&Cancel")), default=0,
separator=False,
resizable=False,
padx=10, pady=10,
buttonpadx=10, buttonpady=5,
)
return MfxDialog.initKw(self, kw)
class PlayerOptionsDialog(MfxDialog):
def __init__(self, parent, title, app, **kw):
kw = self.initKw(kw)
MfxDialog.__init__(self, parent, title, kw.resizable, kw.default)
top_frame, bottom_frame = self.createFrames(kw)
self.createBitmaps(top_frame, kw)
self.app = app
#
self.update_stats_var = Tkinter.BooleanVar()
self.update_stats_var.set(app.opt.update_player_stats != 0)
self.confirm_var = Tkinter.BooleanVar()
self.confirm_var.set(app.opt.confirm != 0)
self.win_animation_var = Tkinter.BooleanVar()
self.win_animation_var.set(app.opt.win_animation != 0)
#
frame = Tkinter.Frame(top_frame)
frame.pack(expand=True, fill='both', padx=5, pady=10)
widget = Tkinter.Label(frame, text=_("\nPlease enter your name"),
#justify='left', anchor='w',
takefocus=0)
widget.grid(row=0, column=0, columnspan=2, sticky='ew', padx=0, pady=5)
w = kw.get("e_width", 30) # width in characters
self.player_var = Tkinter.Entry(frame, exportselection=1, width=w)
self.player_var.insert(0, app.opt.player)
self.player_var.grid(row=1, column=0, sticky='ew', padx=0, pady=5)
widget = Tkinter.Button(frame, text=_('Choose...'),
command=self.selectUserName)
widget.grid(row=1, column=1, padx=5, pady=5)
widget = Tkinter.Checkbutton(frame, variable=self.confirm_var,
anchor='w', text=_("Confirm quit"))
widget.grid(row=2, column=0, columnspan=2, sticky='ew', padx=0, pady=5)
widget = Tkinter.Checkbutton(frame, variable=self.update_stats_var,
anchor='w',
text=_("Update statistics and logs"))
widget.grid(row=3, column=0, columnspan=2, sticky='ew', padx=0, pady=5)
### widget = Tkinter.Checkbutton(frame, variable=self.win_animation_var,
### text="Win animation")
### widget.pack(side='top', padx=kw.padx, pady=kw.pady)
frame.columnconfigure(0, weight=1)
#
self.player = self.player_var.get()
self.confirm = self.confirm_var.get()
self.update_stats = self.update_stats_var.get()
self.win_animation = self.win_animation_var.get()
#
focus = self.createButtons(bottom_frame, kw)
self.mainloop(focus, kw.timeout)
def selectUserName(self, *args):
names = self.app.getAllUserNames()
d = SelectUserNameDialog(self.top, _("Select name"), names)
if d.status == 0 and d.button == 0 and d.username:
self.player_var.delete(0, 'end')
self.player_var.insert(0, d.username)
def mDone(self, button):
self.button = button
self.player = self.player_var.get()
self.confirm = self.confirm_var.get()
self.update_stats = self.update_stats_var.get()
self.win_animation = self.win_animation_var.get()
raise SystemExit
def initKw(self, kw):
kw = KwStruct(kw,
strings=(_("&OK"), _("&Cancel")), default=0,
padx=10, pady=10,
)
return MfxDialog.initKw(self, kw)
# ************************************************************************
# *
# ************************************************************************
def playeroptionsdialog_main(args):
from tkutil import wm_withdraw
opt = Struct(player="Test", update_player_stats=1)
app = Struct(opt=opt)
tk = Tkinter.Tk()
wm_withdraw(tk)
tk.update()
d = PlayerOptionsDialog(tk, "Player options", app)
print d.status, d.button, ":", d.player, d.update_stats
return 0
if __name__ == "__main__":
import sys
sys.exit(playeroptionsdialog_main(sys.argv))
| gpl-2.0 | -7,864,327,510,845,503,000 | 38.534884 | 79 | 0.558235 | false |
matheus2740/alpha_empire | dgvm/ipc/protocol.py | 1 | 2779 | __author__ = 'salvia'
import pickle
class ICPProtocolException(Exception):
pass
class BaseIPCProtocol(object):
"""
Class which handles how the data is passed through the socket.
This base class implements a very simple mechanism of pickling objects and
prefixing the message with `HEADER_SIZE` hexadecimal digits meaning the lenght of the message.
If your server is to communicate objects which cannot be serialized by pickle, you must
define your own protocol.
If your server is to communicate messages whose lenght cannot be expressed within `HEADER_SIZE` hexadecimal
digits, you should subclass this protocol and increase the `HEADER_SIZE` class variable.
Any protocol must define the static methods: `pack_message`, `send_message` and `recover_message`.
Note: the default `HEADER_SIZE` is 8 hexadecimal digits, which can describe a message of size up to 4GB.
"""
HEADER_SIZE = 8
@staticmethod
def pack_message(data):
"""
This method receives any object that is meant to be communicated from or to the server.
This method should return a string, which will be passed through the socket.
The BaseIPCProtocol will pickle and prefix this data with lenght.
:param data: any object
:return: a string which will be messaged through the socket.
"""
data = pickle.dumps(data)
header = hex(len(data))[2:]
if len(header) > BaseIPCProtocol.HEADER_SIZE:
raise ICPProtocolException('Attempted sending message too large for protocol: BaseIPCProtocol')
elif len(header) < BaseIPCProtocol.HEADER_SIZE:
zeros = b'0' * (BaseIPCProtocol.HEADER_SIZE - len(header))
header = zeros + bytes(header, 'utf-8')
packet = header + data
return packet
@staticmethod
def send_message(sock, data):
"""
This method receives an socket object and raw data to be communicated.
The BaseIPCProtocol will pickle and prefix this data with lenght.
:param sock: a socket object
:param data: any object
"""
packet = BaseIPCProtocol.pack_message(data)
sock.send(packet)
# sock.flush()
pass
@staticmethod
def recover_message(sock):
"""
This method receives a socket object and must receive and parse the message from it.
:param sock: a socket object
:return: the parsed messagem into the original object
"""
try:
header = sock.recv(BaseIPCProtocol.HEADER_SIZE)
length = int('0x'+header.decode('utf-8'), 16)
payload = sock.recv(length)
return pickle.loads(payload)
except ValueError:
return None | gpl-3.0 | -7,973,246,357,053,907,000 | 35.578947 | 111 | 0.65995 | false |
bopo/tablib | tablib/packages/openpyxl/writer/theme.py | 116 | 10933 | # -*- coding: utf-8 -*-
# file openpyxl/writer/theme.py
# Copyright (c) 2010 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: Eric Gazoni
"""Write the theme xml based on a fixed string."""
# package imports
from ..shared.xmltools import fromstring, get_document_content
def write_theme():
"""Write the theme xml."""
xml_node = fromstring(
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n'
'<a:theme xmlns:a="http://schemas.openxmlformats.org/'
'drawingml/2006/main" name="Office Theme">'
'<a:themeElements>'
'<a:clrScheme name="Office">'
'<a:dk1><a:sysClr val="windowText" lastClr="000000"/></a:dk1>'
'<a:lt1><a:sysClr val="window" lastClr="FFFFFF"/></a:lt1>'
'<a:dk2><a:srgbClr val="1F497D"/></a:dk2>'
'<a:lt2><a:srgbClr val="EEECE1"/></a:lt2>'
'<a:accent1><a:srgbClr val="4F81BD"/></a:accent1>'
'<a:accent2><a:srgbClr val="C0504D"/></a:accent2>'
'<a:accent3><a:srgbClr val="9BBB59"/></a:accent3>'
'<a:accent4><a:srgbClr val="8064A2"/></a:accent4>'
'<a:accent5><a:srgbClr val="4BACC6"/></a:accent5>'
'<a:accent6><a:srgbClr val="F79646"/></a:accent6>'
'<a:hlink><a:srgbClr val="0000FF"/></a:hlink>'
'<a:folHlink><a:srgbClr val="800080"/></a:folHlink>'
'</a:clrScheme>'
'<a:fontScheme name="Office">'
'<a:majorFont>'
'<a:latin typeface="Cambria"/>'
'<a:ea typeface=""/>'
'<a:cs typeface=""/>'
'<a:font script="Jpan" typeface="MS Pゴシック"/>'
'<a:font script="Hang" typeface="맑은 고딕"/>'
'<a:font script="Hans" typeface="宋体"/>'
'<a:font script="Hant" typeface="新細明體"/>'
'<a:font script="Arab" typeface="Times New Roman"/>'
'<a:font script="Hebr" typeface="Times New Roman"/>'
'<a:font script="Thai" typeface="Tahoma"/>'
'<a:font script="Ethi" typeface="Nyala"/>'
'<a:font script="Beng" typeface="Vrinda"/>'
'<a:font script="Gujr" typeface="Shruti"/>'
'<a:font script="Khmr" typeface="MoolBoran"/>'
'<a:font script="Knda" typeface="Tunga"/>'
'<a:font script="Guru" typeface="Raavi"/>'
'<a:font script="Cans" typeface="Euphemia"/>'
'<a:font script="Cher" typeface="Plantagenet Cherokee"/>'
'<a:font script="Yiii" typeface="Microsoft Yi Baiti"/>'
'<a:font script="Tibt" typeface="Microsoft Himalaya"/>'
'<a:font script="Thaa" typeface="MV Boli"/>'
'<a:font script="Deva" typeface="Mangal"/>'
'<a:font script="Telu" typeface="Gautami"/>'
'<a:font script="Taml" typeface="Latha"/>'
'<a:font script="Syrc" typeface="Estrangelo Edessa"/>'
'<a:font script="Orya" typeface="Kalinga"/>'
'<a:font script="Mlym" typeface="Kartika"/>'
'<a:font script="Laoo" typeface="DokChampa"/>'
'<a:font script="Sinh" typeface="Iskoola Pota"/>'
'<a:font script="Mong" typeface="Mongolian Baiti"/>'
'<a:font script="Viet" typeface="Times New Roman"/>'
'<a:font script="Uigh" typeface="Microsoft Uighur"/>'
'</a:majorFont>'
'<a:minorFont>'
'<a:latin typeface="Calibri"/>'
'<a:ea typeface=""/>'
'<a:cs typeface=""/>'
'<a:font script="Jpan" typeface="MS Pゴシック"/>'
'<a:font script="Hang" typeface="맑은 고딕"/>'
'<a:font script="Hans" typeface="宋体"/>'
'<a:font script="Hant" typeface="新細明體"/>'
'<a:font script="Arab" typeface="Arial"/>'
'<a:font script="Hebr" typeface="Arial"/>'
'<a:font script="Thai" typeface="Tahoma"/>'
'<a:font script="Ethi" typeface="Nyala"/>'
'<a:font script="Beng" typeface="Vrinda"/>'
'<a:font script="Gujr" typeface="Shruti"/>'
'<a:font script="Khmr" typeface="DaunPenh"/>'
'<a:font script="Knda" typeface="Tunga"/>'
'<a:font script="Guru" typeface="Raavi"/>'
'<a:font script="Cans" typeface="Euphemia"/>'
'<a:font script="Cher" typeface="Plantagenet Cherokee"/>'
'<a:font script="Yiii" typeface="Microsoft Yi Baiti"/>'
'<a:font script="Tibt" typeface="Microsoft Himalaya"/>'
'<a:font script="Thaa" typeface="MV Boli"/>'
'<a:font script="Deva" typeface="Mangal"/>'
'<a:font script="Telu" typeface="Gautami"/>'
'<a:font script="Taml" typeface="Latha"/>'
'<a:font script="Syrc" typeface="Estrangelo Edessa"/>'
'<a:font script="Orya" typeface="Kalinga"/>'
'<a:font script="Mlym" typeface="Kartika"/>'
'<a:font script="Laoo" typeface="DokChampa"/>'
'<a:font script="Sinh" typeface="Iskoola Pota"/>'
'<a:font script="Mong" typeface="Mongolian Baiti"/>'
'<a:font script="Viet" typeface="Arial"/>'
'<a:font script="Uigh" typeface="Microsoft Uighur"/>'
'</a:minorFont>'
'</a:fontScheme>'
'<a:fmtScheme name="Office">'
'<a:fillStyleLst>'
'<a:solidFill><a:schemeClr val="phClr"/></a:solidFill>'
'<a:gradFill rotWithShape="1"><a:gsLst>'
'<a:gs pos="0"><a:schemeClr val="phClr"><a:tint val="50000"/>'
'<a:satMod val="300000"/></a:schemeClr></a:gs>'
'<a:gs pos="35000"><a:schemeClr val="phClr"><a:tint val="37000"/>'
'<a:satMod val="300000"/></a:schemeClr></a:gs>'
'<a:gs pos="100000"><a:schemeClr val="phClr"><a:tint val="15000"/>'
'<a:satMod val="350000"/></a:schemeClr></a:gs></a:gsLst>'
'<a:lin ang="16200000" scaled="1"/></a:gradFill>'
'<a:gradFill rotWithShape="1"><a:gsLst>'
'<a:gs pos="0"><a:schemeClr val="phClr"><a:shade val="51000"/>'
'<a:satMod val="130000"/></a:schemeClr></a:gs>'
'<a:gs pos="80000"><a:schemeClr val="phClr"><a:shade val="93000"/>'
'<a:satMod val="130000"/></a:schemeClr></a:gs>'
'<a:gs pos="100000"><a:schemeClr val="phClr">'
'<a:shade val="94000"/>'
'<a:satMod val="135000"/></a:schemeClr></a:gs></a:gsLst>'
'<a:lin ang="16200000" scaled="0"/></a:gradFill></a:fillStyleLst>'
'<a:lnStyleLst>'
'<a:ln w="9525" cap="flat" cmpd="sng" algn="ctr">'
'<a:solidFill><a:schemeClr val="phClr"><a:shade val="95000"/>'
'<a:satMod val="105000"/></a:schemeClr></a:solidFill>'
'<a:prstDash val="solid"/></a:ln>'
'<a:ln w="25400" cap="flat" cmpd="sng" algn="ctr"><a:solidFill>'
'<a:schemeClr val="phClr"/></a:solidFill>'
'<a:prstDash val="solid"/></a:ln>'
'<a:ln w="38100" cap="flat" cmpd="sng" algn="ctr"><a:solidFill>'
'<a:schemeClr val="phClr"/></a:solidFill>'
'<a:prstDash val="solid"/></a:ln></a:lnStyleLst>'
'<a:effectStyleLst><a:effectStyle><a:effectLst>'
'<a:outerShdw blurRad="40000" dist="20000" dir="5400000" '
'rotWithShape="0"><a:srgbClr val="000000">'
'<a:alpha val="38000"/></a:srgbClr></a:outerShdw></a:effectLst>'
'</a:effectStyle><a:effectStyle><a:effectLst>'
'<a:outerShdw blurRad="40000" dist="23000" dir="5400000" '
'rotWithShape="0"><a:srgbClr val="000000">'
'<a:alpha val="35000"/></a:srgbClr></a:outerShdw></a:effectLst>'
'</a:effectStyle><a:effectStyle><a:effectLst>'
'<a:outerShdw blurRad="40000" dist="23000" dir="5400000" '
'rotWithShape="0"><a:srgbClr val="000000">'
'<a:alpha val="35000"/></a:srgbClr></a:outerShdw></a:effectLst>'
'<a:scene3d><a:camera prst="orthographicFront">'
'<a:rot lat="0" lon="0" rev="0"/></a:camera>'
'<a:lightRig rig="threePt" dir="t">'
'<a:rot lat="0" lon="0" rev="1200000"/></a:lightRig>'
'</a:scene3d><a:sp3d><a:bevelT w="63500" h="25400"/>'
'</a:sp3d></a:effectStyle></a:effectStyleLst>'
'<a:bgFillStyleLst><a:solidFill><a:schemeClr val="phClr"/>'
'</a:solidFill><a:gradFill rotWithShape="1"><a:gsLst>'
'<a:gs pos="0"><a:schemeClr val="phClr"><a:tint val="40000"/>'
'<a:satMod val="350000"/></a:schemeClr></a:gs>'
'<a:gs pos="40000"><a:schemeClr val="phClr"><a:tint val="45000"/>'
'<a:shade val="99000"/><a:satMod val="350000"/>'
'</a:schemeClr></a:gs>'
'<a:gs pos="100000"><a:schemeClr val="phClr">'
'<a:shade val="20000"/><a:satMod val="255000"/>'
'</a:schemeClr></a:gs></a:gsLst>'
'<a:path path="circle">'
'<a:fillToRect l="50000" t="-80000" r="50000" b="180000"/>'
'</a:path>'
'</a:gradFill><a:gradFill rotWithShape="1"><a:gsLst>'
'<a:gs pos="0"><a:schemeClr val="phClr"><a:tint val="80000"/>'
'<a:satMod val="300000"/></a:schemeClr></a:gs>'
'<a:gs pos="100000"><a:schemeClr val="phClr">'
'<a:shade val="30000"/><a:satMod val="200000"/>'
'</a:schemeClr></a:gs></a:gsLst>'
'<a:path path="circle">'
'<a:fillToRect l="50000" t="50000" r="50000" b="50000"/></a:path>'
'</a:gradFill></a:bgFillStyleLst></a:fmtScheme>'
'</a:themeElements>'
'<a:objectDefaults/><a:extraClrSchemeLst/>'
'</a:theme>')
return get_document_content(xml_node)
| mit | 8,876,829,156,971,683,000 | 52.787129 | 79 | 0.546157 | false |
mxOBS/deb-pkg_trusty_chromium-browser | third_party/webpagereplay/sslproxy.py | 2 | 3073 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Extends BaseHTTPRequestHandler with SSL certificate generation."""
import logging
import socket
import certutils
class SslHandshakeHandler:
"""Handles Server Name Indication (SNI) using dummy certs."""
def setup(self):
"""Sets up connection providing the certificate to the client."""
# One of: One of SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, or TLSv1_METHOD
context = certutils.get_ssl_context()
def handle_servername(connection):
"""A SNI callback that happens during do_handshake()."""
try:
host = connection.get_servername()
if host:
cert_str = (
self.server.http_archive_fetch.http_archive.get_certificate(host))
new_context = certutils.get_ssl_context()
cert = certutils.load_cert(cert_str)
new_context.use_certificate(cert)
new_context.use_privatekey_file(self.server.ca_cert_path)
connection.set_context(new_context)
return new_context
# else: fail with 'no shared cipher'
except Exception, e:
# Do not leak any exceptions or else openssl crashes.
logging.error('Exception in SNI handler', e)
context.set_tlsext_servername_callback(handle_servername)
self.connection = certutils.get_ssl_connection(context, self.connection)
self.connection.set_accept_state()
try:
self.connection.do_handshake()
except certutils.Error, v:
host = self.connection.get_servername()
if not host:
logging.error('Dropping request without SNI')
return ''
raise certutils.Error('SSL handshake error %s: %s' % (host, str(v)))
# Re-wrap the read/write streams with our new connection.
self.rfile = socket._fileobject(self.connection, 'rb', self.rbufsize,
close=False)
self.wfile = socket._fileobject(self.connection, 'wb', self.wbufsize,
close=False)
def finish(self):
self.connection.shutdown()
self.connection.close()
def wrap_handler(handler_class):
"""Wraps a BaseHTTPHandler wtih SSL MITM certificates."""
if certutils.openssl_import_error:
raise certutils.openssl_import_error
class WrappedHandler(SslHandshakeHandler, handler_class):
def setup(self):
handler_class.setup(self)
SslHandshakeHandler.setup(self)
def finish(self):
handler_class.finish(self)
SslHandshakeHandler.finish(self)
return WrappedHandler
| bsd-3-clause | 3,476,511,761,792,407,600 | 35.152941 | 80 | 0.684022 | false |
bgris/ODL_bgris | lib/python3.5/site-packages/pygments/lexers/data.py | 25 | 18771 | # -*- coding: utf-8 -*-
"""
pygments.lexers.data
~~~~~~~~~~~~~~~~~~~~
Lexers for data file format.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, ExtendedRegexLexer, LexerContext, \
include, bygroups, inherit
from pygments.token import Text, Comment, Keyword, Name, String, Number, \
Punctuation, Literal, Error
__all__ = ['YamlLexer', 'JsonLexer', 'JsonBareObjectLexer', 'JsonLdLexer']
class YamlLexerContext(LexerContext):
"""Indentation context for the YAML lexer."""
def __init__(self, *args, **kwds):
super(YamlLexerContext, self).__init__(*args, **kwds)
self.indent_stack = []
self.indent = -1
self.next_indent = 0
self.block_scalar_indent = None
class YamlLexer(ExtendedRegexLexer):
"""
Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
language.
.. versionadded:: 0.11
"""
name = 'YAML'
aliases = ['yaml']
filenames = ['*.yaml', '*.yml']
mimetypes = ['text/x-yaml']
def something(token_class):
"""Do not produce empty tokens."""
def callback(lexer, match, context):
text = match.group()
if not text:
return
yield match.start(), token_class, text
context.pos = match.end()
return callback
def reset_indent(token_class):
"""Reset the indentation levels."""
def callback(lexer, match, context):
text = match.group()
context.indent_stack = []
context.indent = -1
context.next_indent = 0
context.block_scalar_indent = None
yield match.start(), token_class, text
context.pos = match.end()
return callback
def save_indent(token_class, start=False):
"""Save a possible indentation level."""
def callback(lexer, match, context):
text = match.group()
extra = ''
if start:
context.next_indent = len(text)
if context.next_indent < context.indent:
while context.next_indent < context.indent:
context.indent = context.indent_stack.pop()
if context.next_indent > context.indent:
extra = text[context.indent:]
text = text[:context.indent]
else:
context.next_indent += len(text)
if text:
yield match.start(), token_class, text
if extra:
yield match.start()+len(text), token_class.Error, extra
context.pos = match.end()
return callback
def set_indent(token_class, implicit=False):
"""Set the previously saved indentation level."""
def callback(lexer, match, context):
text = match.group()
if context.indent < context.next_indent:
context.indent_stack.append(context.indent)
context.indent = context.next_indent
if not implicit:
context.next_indent += len(text)
yield match.start(), token_class, text
context.pos = match.end()
return callback
def set_block_scalar_indent(token_class):
"""Set an explicit indentation level for a block scalar."""
def callback(lexer, match, context):
text = match.group()
context.block_scalar_indent = None
if not text:
return
increment = match.group(1)
if increment:
current_indent = max(context.indent, 0)
increment = int(increment)
context.block_scalar_indent = current_indent + increment
if text:
yield match.start(), token_class, text
context.pos = match.end()
return callback
def parse_block_scalar_empty_line(indent_token_class, content_token_class):
"""Process an empty line in a block scalar."""
def callback(lexer, match, context):
text = match.group()
if (context.block_scalar_indent is None or
len(text) <= context.block_scalar_indent):
if text:
yield match.start(), indent_token_class, text
else:
indentation = text[:context.block_scalar_indent]
content = text[context.block_scalar_indent:]
yield match.start(), indent_token_class, indentation
yield (match.start()+context.block_scalar_indent,
content_token_class, content)
context.pos = match.end()
return callback
def parse_block_scalar_indent(token_class):
"""Process indentation spaces in a block scalar."""
def callback(lexer, match, context):
text = match.group()
if context.block_scalar_indent is None:
if len(text) <= max(context.indent, 0):
context.stack.pop()
context.stack.pop()
return
context.block_scalar_indent = len(text)
else:
if len(text) < context.block_scalar_indent:
context.stack.pop()
context.stack.pop()
return
if text:
yield match.start(), token_class, text
context.pos = match.end()
return callback
def parse_plain_scalar_indent(token_class):
"""Process indentation spaces in a plain scalar."""
def callback(lexer, match, context):
text = match.group()
if len(text) <= context.indent:
context.stack.pop()
context.stack.pop()
return
if text:
yield match.start(), token_class, text
context.pos = match.end()
return callback
tokens = {
# the root rules
'root': [
# ignored whitespaces
(r'[ ]+(?=#|$)', Text),
# line breaks
(r'\n+', Text),
# a comment
(r'#[^\n]*', Comment.Single),
# the '%YAML' directive
(r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
# the %TAG directive
(r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
# document start and document end indicators
(r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
'block-line'),
# indentation spaces
(r'[ ]*(?!\s|$)', save_indent(Text, start=True),
('block-line', 'indentation')),
],
# trailing whitespaces after directives or a block scalar indicator
'ignored-line': [
# ignored whitespaces
(r'[ ]+(?=#|$)', Text),
# a comment
(r'#[^\n]*', Comment.Single),
# line break
(r'\n', Text, '#pop:2'),
],
# the %YAML directive
'yaml-directive': [
# the version number
(r'([ ]+)([0-9]+\.[0-9]+)',
bygroups(Text, Number), 'ignored-line'),
],
# the %YAG directive
'tag-directive': [
# a tag handle and the corresponding prefix
(r'([ ]+)(!|![\w-]*!)'
r'([ ]+)(!|!?[\w;/?:@&=+$,.!~*\'()\[\]%-]+)',
bygroups(Text, Keyword.Type, Text, Keyword.Type),
'ignored-line'),
],
# block scalar indicators and indentation spaces
'indentation': [
# trailing whitespaces are ignored
(r'[ ]*$', something(Text), '#pop:2'),
# whitespaces preceeding block collection indicators
(r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)),
# block collection indicators
(r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
# the beginning a block line
(r'[ ]*', save_indent(Text), '#pop'),
],
# an indented line in the block context
'block-line': [
# the line end
(r'[ ]*(?=#|$)', something(Text), '#pop'),
# whitespaces separating tokens
(r'[ ]+', Text),
# tags, anchors and aliases,
include('descriptors'),
# block collections and scalars
include('block-nodes'),
# flow collections and quoted scalars
include('flow-nodes'),
# a plain scalar
(r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`-]|[?:-]\S)',
something(Name.Variable),
'plain-scalar-in-block-context'),
],
# tags, anchors, aliases
'descriptors': [
# a full-form tag
(r'!<[\w#;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
# a tag in the form '!', '!suffix' or '!handle!suffix'
(r'!(?:[\w-]+!)?'
r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]+', Keyword.Type),
# an anchor
(r'&[\w-]+', Name.Label),
# an alias
(r'\*[\w-]+', Name.Variable),
],
# block collections and scalars
'block-nodes': [
# implicit key
(r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
# literal and folded scalars
(r'[|>]', Punctuation.Indicator,
('block-scalar-content', 'block-scalar-header')),
],
# flow collections and quoted scalars
'flow-nodes': [
# a flow sequence
(r'\[', Punctuation.Indicator, 'flow-sequence'),
# a flow mapping
(r'\{', Punctuation.Indicator, 'flow-mapping'),
# a single-quoted scalar
(r'\'', String, 'single-quoted-scalar'),
# a double-quoted scalar
(r'\"', String, 'double-quoted-scalar'),
],
# the content of a flow collection
'flow-collection': [
# whitespaces
(r'[ ]+', Text),
# line breaks
(r'\n+', Text),
# a comment
(r'#[^\n]*', Comment.Single),
# simple indicators
(r'[?:,]', Punctuation.Indicator),
# tags, anchors and aliases
include('descriptors'),
# nested collections and quoted scalars
include('flow-nodes'),
# a plain scalar
(r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`])',
something(Name.Variable),
'plain-scalar-in-flow-context'),
],
# a flow sequence indicated by '[' and ']'
'flow-sequence': [
# include flow collection rules
include('flow-collection'),
# the closing indicator
(r'\]', Punctuation.Indicator, '#pop'),
],
# a flow mapping indicated by '{' and '}'
'flow-mapping': [
# include flow collection rules
include('flow-collection'),
# the closing indicator
(r'\}', Punctuation.Indicator, '#pop'),
],
# block scalar lines
'block-scalar-content': [
# line break
(r'\n', Text),
# empty line
(r'^[ ]+$',
parse_block_scalar_empty_line(Text, Name.Constant)),
# indentation spaces (we may leave the state here)
(r'^[ ]*', parse_block_scalar_indent(Text)),
# line content
(r'[\S\t ]+', Name.Constant),
],
# the content of a literal or folded scalar
'block-scalar-header': [
# indentation indicator followed by chomping flag
(r'([1-9])?[+-]?(?=[ ]|$)',
set_block_scalar_indent(Punctuation.Indicator),
'ignored-line'),
# chomping flag followed by indentation indicator
(r'[+-]?([1-9])?(?=[ ]|$)',
set_block_scalar_indent(Punctuation.Indicator),
'ignored-line'),
],
# ignored and regular whitespaces in quoted scalars
'quoted-scalar-whitespaces': [
# leading and trailing whitespaces are ignored
(r'^[ ]+', Text),
(r'[ ]+$', Text),
# line breaks are ignored
(r'\n+', Text),
# other whitespaces are a part of the value
(r'[ ]+', Name.Variable),
],
# single-quoted scalars
'single-quoted-scalar': [
# include whitespace and line break rules
include('quoted-scalar-whitespaces'),
# escaping of the quote character
(r'\'\'', String.Escape),
# regular non-whitespace characters
(r'[^\s\']+', String),
# the closing quote
(r'\'', String, '#pop'),
],
# double-quoted scalars
'double-quoted-scalar': [
# include whitespace and line break rules
include('quoted-scalar-whitespaces'),
# escaping of special characters
(r'\\[0abt\tn\nvfre "\\N_LP]', String),
# escape codes
(r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
String.Escape),
# regular non-whitespace characters
(r'[^\s"\\]+', String),
# the closing quote
(r'"', String, '#pop'),
],
# the beginning of a new line while scanning a plain scalar
'plain-scalar-in-block-context-new-line': [
# empty lines
(r'^[ ]+$', Text),
# line breaks
(r'\n+', Text),
# document start and document end indicators
(r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
# indentation spaces (we may leave the block line state here)
(r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'),
],
# a plain scalar in the block context
'plain-scalar-in-block-context': [
# the scalar ends with the ':' indicator
(r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'),
# the scalar ends with whitespaces followed by a comment
(r'[ ]+(?=#)', Text, '#pop'),
# trailing whitespaces are ignored
(r'[ ]+$', Text),
# line breaks are ignored
(r'\n+', Text, 'plain-scalar-in-block-context-new-line'),
# other whitespaces are a part of the value
(r'[ ]+', Literal.Scalar.Plain),
# regular non-whitespace characters
(r'(?::(?!\s)|[^\s:])+', Literal.Scalar.Plain),
],
# a plain scalar is the flow context
'plain-scalar-in-flow-context': [
# the scalar ends with an indicator character
(r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'),
# the scalar ends with a comment
(r'[ ]+(?=#)', Text, '#pop'),
# leading and trailing whitespaces are ignored
(r'^[ ]+', Text),
(r'[ ]+$', Text),
# line breaks are ignored
(r'\n+', Text),
# other whitespaces are a part of the value
(r'[ ]+', Name.Variable),
# regular non-whitespace characters
(r'[^\s,:?\[\]{}]+', Name.Variable),
],
}
def get_tokens_unprocessed(self, text=None, context=None):
if context is None:
context = YamlLexerContext(text, 0)
return super(YamlLexer, self).get_tokens_unprocessed(text, context)
class JsonLexer(RegexLexer):
"""
For JSON data structures.
.. versionadded:: 1.5
"""
name = 'JSON'
aliases = ['json']
filenames = ['*.json']
mimetypes = ['application/json']
flags = re.DOTALL
# integer part of a number
int_part = r'-?(0|[1-9]\d*)'
# fractional part of a number
frac_part = r'\.\d+'
# exponential part of a number
exp_part = r'[eE](\+|-)?\d+'
tokens = {
'whitespace': [
(r'\s+', Text),
],
# represents a simple terminal value
'simplevalue': [
(r'(true|false|null)\b', Keyword.Constant),
(('%(int_part)s(%(frac_part)s%(exp_part)s|'
'%(exp_part)s|%(frac_part)s)') % vars(),
Number.Float),
(int_part, Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
],
# the right hand side of an object, after the attribute name
'objectattribute': [
include('value'),
(r':', Punctuation),
# comma terminates the attribute but expects more
(r',', Punctuation, '#pop'),
# a closing bracket terminates the entire object, so pop twice
(r'\}', Punctuation, '#pop:2'),
],
# a json object - { attr, attr, ... }
'objectvalue': [
include('whitespace'),
(r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
(r'\}', Punctuation, '#pop'),
],
# json array - [ value, value, ... }
'arrayvalue': [
include('whitespace'),
include('value'),
(r',', Punctuation),
(r'\]', Punctuation, '#pop'),
],
# a json value - either a simple value or a complex value (object or array)
'value': [
include('whitespace'),
include('simplevalue'),
(r'\{', Punctuation, 'objectvalue'),
(r'\[', Punctuation, 'arrayvalue'),
],
# the root of a json document whould be a value
'root': [
include('value'),
],
}
class JsonBareObjectLexer(JsonLexer):
"""
For JSON data structures (with missing object curly braces).
.. versionadded:: 2.2
"""
name = 'JSONBareObject'
aliases = ['json-object']
filenames = []
mimetypes = ['application/json-object']
tokens = {
'root': [
(r'\}', Error),
include('objectvalue'),
],
'objectattribute': [
(r'\}', Error),
inherit,
],
}
class JsonLdLexer(JsonLexer):
"""
For `JSON-LD <http://json-ld.org/>`_ linked data.
.. versionadded:: 2.0
"""
name = 'JSON-LD'
aliases = ['jsonld', 'json-ld']
filenames = ['*.jsonld']
mimetypes = ['application/ld+json']
tokens = {
'objectvalue': [
(r'"@(context|id|value|language|type|container|list|set|'
r'reverse|index|base|vocab|graph)"', Name.Decorator,
'objectattribute'),
inherit,
],
}
| gpl-3.0 | 5,415,311,125,145,537,000 | 32.821622 | 83 | 0.488679 | false |
Shiroy/servo | tests/wpt/web-platform-tests/tools/pywebsocket/src/example/origin_check_wsh.py | 516 | 1992 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This example is derived from test/testdata/handlers/origin_check_wsh.py.
def web_socket_do_extra_handshake(request):
if request.ws_origin == 'http://example.com':
return
raise ValueError('Unacceptable origin: %r' % request.ws_origin)
def web_socket_transfer_data(request):
request.connection.write('origin_check_wsh.py is called for %s, %s' %
(request.ws_resource, request.ws_protocol))
# vi:sts=4 sw=4 et
| mpl-2.0 | 9,109,443,037,294,291,000 | 44.272727 | 74 | 0.75502 | false |
ujjwalwahi/odoo | openerp/report/printscreen/__init__.py | 381 | 1203 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import ps_list
import ps_form
""" A special report, that is automatically formatted to look like the
screen contents of Form/List Views.
"""
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -700,976,567,683,866,500 | 37.806452 | 79 | 0.620948 | false |
painnick/linkToEver | slackutil.py | 1 | 1426 | import logging
import time
import slackweb
WEBHOOK = None
logging.config.fileConfig('log.config')
logger = logging.getLogger('slackLogger')
def message(title, link):
attachments = [
{
'fallback': 'Save links to Evernote.',
'color': 'good',
'title': title,
'title_link': link,
'text': link
}
]
_hook(attachments)
def warning(msg):
attachments = [
{
'fallback': msg,
'color': 'warning',
'text': msg,
'ts': time.time()
}
]
_hook(attachments)
def danger(msg, e=None):
attachments = [
{
'fallback': msg,
'color': 'danger',
'text': msg,
'ts': time.time(),
'fields': [
{
'title': 'Exception',
'value': e,
'short': False
}
]
}
]
_hook(attachments)
_slack = None
def _hook(attachments):
if WEBHOOK is not None and WEBHOOK != '':
if _slack is None:
slack = slackweb.Slack(url=WEBHOOK)
slack.notify(attachments=attachments)
logger.debug('Send message to Slack.')
else:
logger.info('Set Slack Web incomming webhook link to config.ini!')
if __name__ == '__main__':
WEBHOOK = None
danger('Cannot read links')
| gpl-3.0 | 632,782,515,103,001,700 | 19.084507 | 74 | 0.475456 | false |
pabulumm/neighbors | lib/python3.4/site-packages/django/utils/termcolors.py | 76 | 7612 | """
termcolors.py
"""
from django.utils import six
color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
foreground = {color_names[x]: '3%s' % x for x in range(8)}
background = {color_names[x]: '4%s' % x for x in range(8)}
RESET = '0'
opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}
def colorize(text='', opts=(), **kwargs):
"""
Returns your text, enclosed in ANSI graphics codes.
Depends on the keyword arguments 'fg' and 'bg', and the contents of
the opts tuple/list.
Returns the RESET code if no parameters are given.
Valid colors:
'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
Valid options:
'bold'
'underscore'
'blink'
'reverse'
'conceal'
'noreset' - string will not be auto-terminated with the RESET code
Examples:
colorize('hello', fg='red', bg='blue', opts=('blink',))
colorize()
colorize('goodbye', opts=('underscore',))
print(colorize('first line', fg='red', opts=('noreset',)))
print('this should be red too')
print(colorize('and so should this'))
print('this should not be red')
"""
code_list = []
if text == '' and len(opts) == 1 and opts[0] == 'reset':
return '\x1b[%sm' % RESET
for k, v in six.iteritems(kwargs):
if k == 'fg':
code_list.append(foreground[v])
elif k == 'bg':
code_list.append(background[v])
for o in opts:
if o in opt_dict:
code_list.append(opt_dict[o])
if 'noreset' not in opts:
text = '%s\x1b[%sm' % (text or '', RESET)
return '%s%s' % (('\x1b[%sm' % ';'.join(code_list)), text or '')
def make_style(opts=(), **kwargs):
"""
Returns a function with default parameters for colorize()
Example:
bold_red = make_style(opts=('bold',), fg='red')
print(bold_red('hello'))
KEYWORD = make_style(fg='yellow')
COMMENT = make_style(fg='blue', opts=('bold',))
"""
return lambda text: colorize(text, opts, **kwargs)
NOCOLOR_PALETTE = 'nocolor'
DARK_PALETTE = 'dark'
LIGHT_PALETTE = 'light'
PALETTES = {
NOCOLOR_PALETTE: {
'ERROR': {},
'SUCCESS': {},
'WARNING': {},
'NOTICE': {},
'SQL_FIELD': {},
'SQL_COLTYPE': {},
'SQL_KEYWORD': {},
'SQL_TABLE': {},
'HTTP_INFO': {},
'HTTP_SUCCESS': {},
'HTTP_REDIRECT': {},
'HTTP_NOT_MODIFIED': {},
'HTTP_BAD_REQUEST': {},
'HTTP_NOT_FOUND': {},
'HTTP_SERVER_ERROR': {},
'MIGRATE_HEADING': {},
'MIGRATE_LABEL': {},
'MIGRATE_SUCCESS': {},
'MIGRATE_FAILURE': {},
},
DARK_PALETTE: {
'ERROR': {'fg': 'red', 'opts': ('bold',)},
'SUCCESS': {'fg': 'green', 'opts': ('bold',)},
'WARNING': {'fg': 'yellow', 'opts': ('bold',)},
'NOTICE': {'fg': 'red'},
'SQL_FIELD': {'fg': 'green', 'opts': ('bold',)},
'SQL_COLTYPE': {'fg': 'green'},
'SQL_KEYWORD': {'fg': 'yellow'},
'SQL_TABLE': {'opts': ('bold',)},
'HTTP_INFO': {'opts': ('bold',)},
'HTTP_SUCCESS': {},
'HTTP_REDIRECT': {'fg': 'green'},
'HTTP_NOT_MODIFIED': {'fg': 'cyan'},
'HTTP_BAD_REQUEST': {'fg': 'red', 'opts': ('bold',)},
'HTTP_NOT_FOUND': {'fg': 'yellow'},
'HTTP_SERVER_ERROR': {'fg': 'magenta', 'opts': ('bold',)},
'MIGRATE_HEADING': {'fg': 'cyan', 'opts': ('bold',)},
'MIGRATE_LABEL': {'opts': ('bold',)},
'MIGRATE_SUCCESS': {'fg': 'green', 'opts': ('bold',)},
'MIGRATE_FAILURE': {'fg': 'red', 'opts': ('bold',)},
},
LIGHT_PALETTE: {
'ERROR': {'fg': 'red', 'opts': ('bold',)},
'SUCCESS': {'fg': 'green', 'opts': ('bold',)},
'WARNING': {'fg': 'yellow', 'opts': ('bold',)},
'NOTICE': {'fg': 'red'},
'SQL_FIELD': {'fg': 'green', 'opts': ('bold',)},
'SQL_COLTYPE': {'fg': 'green'},
'SQL_KEYWORD': {'fg': 'blue'},
'SQL_TABLE': {'opts': ('bold',)},
'HTTP_INFO': {'opts': ('bold',)},
'HTTP_SUCCESS': {},
'HTTP_REDIRECT': {'fg': 'green', 'opts': ('bold',)},
'HTTP_NOT_MODIFIED': {'fg': 'green'},
'HTTP_BAD_REQUEST': {'fg': 'red', 'opts': ('bold',)},
'HTTP_NOT_FOUND': {'fg': 'red'},
'HTTP_SERVER_ERROR': {'fg': 'magenta', 'opts': ('bold',)},
'MIGRATE_HEADING': {'fg': 'cyan', 'opts': ('bold',)},
'MIGRATE_LABEL': {'opts': ('bold',)},
'MIGRATE_SUCCESS': {'fg': 'green', 'opts': ('bold',)},
'MIGRATE_FAILURE': {'fg': 'red', 'opts': ('bold',)},
}
}
DEFAULT_PALETTE = DARK_PALETTE
def parse_color_setting(config_string):
"""Parse a DJANGO_COLORS environment variable to produce the system palette
The general form of a palette definition is:
"palette;role=fg;role=fg/bg;role=fg,option,option;role=fg/bg,option,option"
where:
palette is a named palette; one of 'light', 'dark', or 'nocolor'.
role is a named style used by Django
fg is a background color.
bg is a background color.
option is a display options.
Specifying a named palette is the same as manually specifying the individual
definitions for each role. Any individual definitions following the palette
definition will augment the base palette definition.
Valid roles:
'error', 'notice', 'sql_field', 'sql_coltype', 'sql_keyword', 'sql_table',
'http_info', 'http_success', 'http_redirect', 'http_bad_request',
'http_not_found', 'http_server_error'
Valid colors:
'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
Valid options:
'bold', 'underscore', 'blink', 'reverse', 'conceal'
"""
if not config_string:
return PALETTES[DEFAULT_PALETTE]
# Split the color configuration into parts
parts = config_string.lower().split(';')
palette = PALETTES[NOCOLOR_PALETTE].copy()
for part in parts:
if part in PALETTES:
# A default palette has been specified
palette.update(PALETTES[part])
elif '=' in part:
# Process a palette defining string
definition = {}
# Break the definition into the role,
# plus the list of specific instructions.
# The role must be in upper case
role, instructions = part.split('=')
role = role.upper()
styles = instructions.split(',')
styles.reverse()
# The first instruction can contain a slash
# to break apart fg/bg.
colors = styles.pop().split('/')
colors.reverse()
fg = colors.pop()
if fg in color_names:
definition['fg'] = fg
if colors and colors[-1] in color_names:
definition['bg'] = colors[-1]
# All remaining instructions are options
opts = tuple(s for s in styles if s in opt_dict.keys())
if opts:
definition['opts'] = opts
# The nocolor palette has all available roles.
# Use that palette as the basis for determining
# if the role is valid.
if role in PALETTES[NOCOLOR_PALETTE] and definition:
palette[role] = definition
# If there are no colors specified, return the empty palette.
if palette == PALETTES[NOCOLOR_PALETTE]:
return None
return palette
| bsd-3-clause | -5,674,969,603,637,028,000 | 33.6 | 89 | 0.526406 | false |
spiegela/elasticsearch | dev-tools/create_bwc_index_with_conficting_mappings.py | 217 | 2963 | import create_bwc_index
import logging
import os
import random
import shutil
import subprocess
import sys
import tempfile
def fetch_version(version):
logging.info('fetching ES version %s' % version)
if subprocess.call([sys.executable, os.path.join(os.path.split(sys.argv[0])[0], 'get-bwc-version.py'), version]) != 0:
raise RuntimeError('failed to download ES version %s' % version)
def main():
'''
Creates a static back compat index (.zip) with conflicting mappings.
'''
logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO,
datefmt='%Y-%m-%d %I:%M:%S %p')
logging.getLogger('elasticsearch').setLevel(logging.ERROR)
logging.getLogger('urllib3').setLevel(logging.WARN)
tmp_dir = tempfile.mkdtemp()
try:
data_dir = os.path.join(tmp_dir, 'data')
repo_dir = os.path.join(tmp_dir, 'repo')
logging.info('Temp data dir: %s' % data_dir)
logging.info('Temp repo dir: %s' % repo_dir)
version = '1.7.0'
classifier = 'conflicting-mappings-%s' % version
index_name = 'index-%s' % classifier
# Download old ES releases if necessary:
release_dir = os.path.join('backwards', 'elasticsearch-%s' % version)
if not os.path.exists(release_dir):
fetch_version(version)
node = create_bwc_index.start_node(version, release_dir, data_dir, repo_dir, cluster_name=index_name)
client = create_bwc_index.create_client()
put_conflicting_mappings(client, index_name)
create_bwc_index.shutdown_node(node)
print('%s server output:\n%s' % (version, node.stdout.read().decode('utf-8')))
node = None
create_bwc_index.compress_index(classifier, tmp_dir, 'core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade')
finally:
if node is not None:
create_bwc_index.shutdown_node(node)
shutil.rmtree(tmp_dir)
def put_conflicting_mappings(client, index_name):
client.indices.delete(index=index_name, ignore=404)
logging.info('Create single shard test index')
mappings = {}
# backwardcompat test for conflicting mappings, see #11857
mappings['x'] = {
'analyzer': 'standard',
"properties": {
"foo": {
"type": "string"
}
}
}
mappings['y'] = {
'analyzer': 'standard',
"properties": {
"foo": {
"type": "date"
}
}
}
client.indices.create(index=index_name, body={
'settings': {
'number_of_shards': 1,
'number_of_replicas': 0
},
'mappings': mappings
})
health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0)
assert health['timed_out'] == False, 'cluster health timed out %s' % health
num_docs = random.randint(2000, 3000)
create_bwc_index.index_documents(client, index_name, 'doc', num_docs)
logging.info('Running basic asserts on the data added')
create_bwc_index.run_basic_asserts(client, index_name, 'doc', num_docs)
if __name__ == '__main__':
main()
| apache-2.0 | -425,800,938,237,197,000 | 30.860215 | 130 | 0.658117 | false |
sideeffects/stats_houdini | houdini_stats/migrations/0001_initial.py | 1 | 7835 | # encoding: utf-8
import datetime
import south.db
from south.db import dbs
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'MachineConfig'
db = dbs['stats']
db.dry_run = south.db.db.dry_run
db.create_table('houdini_stats_machineconfig', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('hardware_id', self.gf('django.db.models.fields.IntegerField')()),
('ip_address', self.gf('django.db.models.fields.CharField')(max_length=25)),
('last_active_date', self.gf('django.db.models.fields.DateTimeField')()),
('config_hash', self.gf('django.db.models.fields.CharField')(max_length=5)),
('houdini_major_version', self.gf('django.db.models.fields.IntegerField')(default=0)),
('houdini_minor_version', self.gf('django.db.models.fields.IntegerField')(default=0)),
('houdini_build_number', self.gf('django.db.models.fields.CharField')(max_length=10)),
('product', self.gf('django.db.models.fields.CharField')(max_length=20)),
('graphics_card', self.gf('django.db.models.fields.CharField')(max_length=20)),
('graphics_card_version', self.gf('django.db.models.fields.CharField')(max_length=20)),
('operating_system', self.gf('django.db.models.fields.CharField')(max_length=20)),
('system_memory', self.gf('django.db.models.fields.FloatField')(default=0)),
('system_resolution', self.gf('django.db.models.fields.CharField')(max_length=10)),
('number_of_processors', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('cpu_info', self.gf('django.db.models.fields.CharField')(max_length=10)),
))
db.send_create_signal('houdini_stats', ['MachineConfig'])
# Adding model 'HoudiniCrash'
db.create_table('houdini_stats_houdinicrash', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('machine_config', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['houdini_stats.MachineConfig'])),
('date', self.gf('django.db.models.fields.DateTimeField')()),
('stack_trace', self.gf('django.db.models.fields.TextField')(default='', blank=True)),
))
db.send_create_signal('houdini_stats', ['HoudiniCrash'])
# Adding model 'NodeTypeUsage'
db.create_table('houdini_stats_nodetypeusage', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('machine_config', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['houdini_stats.MachineConfig'])),
('date', self.gf('django.db.models.fields.DateTimeField')()),
('node_type', self.gf('django.db.models.fields.CharField')(max_length=20)),
('count', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('is_builtin', self.gf('django.db.models.fields.BooleanField')(default=True)),
('is_asset', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('houdini_stats', ['NodeTypeUsage'])
# Adding model 'Uptime'
db.create_table('houdini_stats_uptime', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('machine_config', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['houdini_stats.MachineConfig'])),
('date', self.gf('django.db.models.fields.DateTimeField')()),
('number_of_seconds', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
))
db.send_create_signal('houdini_stats', ['Uptime'])
def backwards(self, orm):
db = dbs['stats']
db.dry_run = south.db.db.dry_run
# Deleting model 'MachineConfig'
db.delete_table('houdini_stats_machineconfig')
# Deleting model 'HoudiniCrash'
db.delete_table('houdini_stats_houdinicrash')
# Deleting model 'NodeTypeUsage'
db.delete_table('houdini_stats_nodetypeusage')
# Deleting model 'Uptime'
db.delete_table('houdini_stats_uptime')
models = {
'houdini_stats.houdinicrash': {
'Meta': {'ordering': "('date',)", 'object_name': 'HoudiniCrash'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'machine_config': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['houdini_stats.MachineConfig']"}),
'stack_trace': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'})
},
'houdini_stats.machineconfig': {
'Meta': {'ordering': "('last_active_date',)", 'object_name': 'MachineConfig'},
'config_hash': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'cpu_info': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'graphics_card': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'graphics_card_version': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'hardware_id': ('django.db.models.fields.IntegerField', [], {}),
'houdini_build_number': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'houdini_major_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'houdini_minor_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'last_active_date': ('django.db.models.fields.DateTimeField', [], {}),
'number_of_processors': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'operating_system': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'product': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'system_memory': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'system_resolution': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'houdini_stats.nodetypeusage': {
'Meta': {'ordering': "('date',)", 'object_name': 'NodeTypeUsage'},
'count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_asset': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_builtin': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'machine_config': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['houdini_stats.MachineConfig']"}),
'node_type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'houdini_stats.uptime': {
'Meta': {'ordering': "('date', 'number_of_seconds')", 'object_name': 'Uptime'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'machine_config': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['houdini_stats.MachineConfig']"}),
'number_of_seconds': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
}
}
complete_apps = ['houdini_stats']
| mit | -5,524,620,849,634,745,000 | 59.736434 | 127 | 0.594767 | false |
chriskmanx/qmole | QMOLEDEV/boost_1_49_0/tools/build/v2/tools/notfile.py | 71 | 1728 | # Status: ported.
# Base revision: 64429.
#
# Copyright (c) 2005-2010 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import b2.build.type as type
import b2.build.generators as generators
import b2.build.virtual_target as virtual_target
import b2.build.toolset as toolset
import b2.build.targets as targets
from b2.manager import get_manager
from b2.util import bjam_signature
type.register("NOTFILE_MAIN")
class NotfileGenerator(generators.Generator):
def run(self, project, name, ps, sources):
pass
action_name = ps.get('action')[0]
if action_name[0] == '@':
action = virtual_target.Action(get_manager(), sources, action_name[1:], ps)
else:
action = virtual_target.Action(get_manager(), sources, "notfile.run", ps)
return [get_manager().virtual_targets().register(
virtual_target.NotFileTarget(name, project, action))]
generators.register(NotfileGenerator("notfile.main", False, [], ["NOTFILE_MAIN"]))
toolset.flags("notfile.run", "ACTION", [], ["<action>"])
get_manager().engine().register_action("notfile.run", "$(ACTION)")
@bjam_signature((["target_name"], ["action"], ["sources", "*"], ["requirements", "*"],
["default_build", "*"]))
def notfile(target_name, action, sources, requirements, default_build):
requirements.append("<action>" + action)
return targets.create_typed_metatarget(target_name, "NOTFILE_MAIN", sources, requirements,
default_build, [])
get_manager().projects().add_rule("notfile", notfile)
| gpl-3.0 | 1,317,226,299,949,840,400 | 32.882353 | 94 | 0.663194 | false |
rwl/muntjac | muntjac/demo/sampler/features/commons/BrowserInformation.py | 1 | 1077 |
from muntjac.demo.sampler.APIResource import APIResource
from muntjac.demo.sampler.Feature import Feature, Version
from muntjac.terminal.gwt.server.web_browser import WebBrowser
class BrowserInformation(Feature):
def getDescription(self):
return ('Browser differences are mostly hidden by Muntjac but in some '
'cases it is valuable to get information on the browser the user '
'is using. In themes special CSS rules are used but it is also '
'possible to get information about the browser in the application '
'code. This sample displays the browser name, ip address and the '
'screen size you are using, and your TimeZone offset. The '
'information is available on server side.')
def getName(self):
return 'Browser information'
def getSinceVersion(self):
return Version.V63
def getRelatedAPI(self):
return [APIResource(WebBrowser)]
def getRelatedFeatures(self):
return None
def getRelatedResources(self):
return None
| apache-2.0 | -7,445,853,048,497,281,000 | 28.916667 | 79 | 0.687094 | false |
BrotherPhil/django | django/conf/locale/eo/formats.py | 504 | 2335 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'j\-\a \d\e F Y' # '26-a de julio 1887'
TIME_FORMAT = 'H:i' # '18:59'
DATETIME_FORMAT = r'j\-\a \d\e F Y\, \j\e H:i' # '26-a de julio 1887, je 18:59'
YEAR_MONTH_FORMAT = r'F \d\e Y' # 'julio de 1887'
MONTH_DAY_FORMAT = r'j\-\a \d\e F' # '26-a de julio'
SHORT_DATE_FORMAT = 'Y-m-d' # '1887-07-26'
SHORT_DATETIME_FORMAT = 'Y-m-d H:i' # '1887-07-26 18:59'
FIRST_DAY_OF_WEEK = 1 # Monday (lundo)
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%Y-%m-%d', # '1887-07-26'
'%y-%m-%d', # '87-07-26'
'%Y %m %d', # '1887 07 26'
'%d-a de %b %Y', # '26-a de jul 1887'
'%d %b %Y', # '26 jul 1887'
'%d-a de %B %Y', # '26-a de julio 1887'
'%d %B %Y', # '26 julio 1887'
'%d %m %Y', # '26 07 1887'
]
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '18:59:00'
'%H:%M', # '18:59'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '1887-07-26 18:59:00'
'%Y-%m-%d %H:%M', # '1887-07-26 18:59'
'%Y-%m-%d', # '1887-07-26'
'%Y.%m.%d %H:%M:%S', # '1887.07.26 18:59:00'
'%Y.%m.%d %H:%M', # '1887.07.26 18:59'
'%Y.%m.%d', # '1887.07.26'
'%d/%m/%Y %H:%M:%S', # '26/07/1887 18:59:00'
'%d/%m/%Y %H:%M', # '26/07/1887 18:59'
'%d/%m/%Y', # '26/07/1887'
'%y-%m-%d %H:%M:%S', # '87-07-26 18:59:00'
'%y-%m-%d %H:%M', # '87-07-26 18:59'
'%y-%m-%d', # '87-07-26'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| bsd-3-clause | 6,006,063,733,810,354,000 | 43.903846 | 80 | 0.421842 | false |
Anonymous-X6/django | tests/introspection/models.py | 216 | 1112 | from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
facebook_user_id = models.BigIntegerField(null=True)
raw_data = models.BinaryField(null=True)
small_int = models.SmallIntegerField()
class Meta:
unique_together = ('first_name', 'last_name')
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
body = models.TextField(default='')
reporter = models.ForeignKey(Reporter, models.CASCADE)
response_to = models.ForeignKey('self', models.SET_NULL, null=True)
def __str__(self):
return self.headline
class Meta:
ordering = ('headline',)
index_together = [
["headline", "pub_date"],
]
| bsd-3-clause | -3,924,223,177,433,305,000 | 28.263158 | 71 | 0.667266 | false |
barrachri/epcon | p3/management/commands/attendify_schedule_xlsx.py | 2 | 9581 | # -*- coding: utf-8 -*-
""" Update an Attendify schedule XLSX file with the currently accepted
talks.
Usage: manage.py attendify_schedule_xlsx ep2016 schedule.xlsx
Note that for Attendify you have to download the schedule before
running this script, since they add meta data to the downloaded
file which has to be kept around when uploading it again.
The script updates schedule.xlsx in place. Unfortunately, Attendify
currently has a bug in that it doesn't accept the file format
generated by openpyxl. Opening the file in LibreOffice and saving
it (without changes) fixes this as work-around.
Attendify Worksheet "Schedule" format
-------------------------------------
Row A4: Session Title, Date (MM/DD/YYYY), Start Time (HH:MM), End
Time (HH:MM), Description (Optional), Location (Optional), Track
Title (Optional), UID (do not delete)
Row A6: Start of data
"""
from django.core.management.base import BaseCommand, CommandError
from django.core import urlresolvers
from django.utils.html import strip_tags
from conference import models
from conference import utils
import datetime
from collections import defaultdict
from optparse import make_option
import operator
import markdown2
import openpyxl
### Globals
# Debug output ?
_debug = 0
# These must match the talk .type or .admin_type
from accepted_talks import TYPE_NAMES
# Special handling of poster sessions
if 0:
# Poster sessions don't have events associated with them, so use
# these defaults
ADJUST_POSTER_SESSIONS = True
POSTER_START = datetime.datetime(2016,7,19,15,15) # TBD
POSTER_DURATION = datetime.timedelta(minutes=90)
POSTER_ROOM = u'Exhibition Hall'
else:
ADJUST_POSTER_SESSIONS = False
### Helpers
def profile_url(user):
return urlresolvers.reverse('conference-profile',
args=[user.attendeeprofile.slug])
def speaker_listing(talk):
return u', '.join(
u'<i>%s %s</i>' % (
speaker.user.first_name,
speaker.user.last_name)
for speaker in talk.get_all_speakers())
def format_text(text, remove_tags=False, output_html=True):
# Remove whitespace
text = text.strip()
if not text:
return text
# Remove links, tags, etc.
if remove_tags:
text = strip_tags(text)
# Remove quotes
if text[0] == '"' and text[-1] == '"':
text = text[1:-1]
# Convert markdown markup to HTML
if output_html:
text = markdown2.markdown(text)
return text
def talk_title(talk):
title = format_text(talk.title, remove_tags=True, output_html=False)
if not title:
return title
return title
def talk_abstract(talk):
return '<p>By %s</p>\n\n%s' % (
speaker_listing(talk),
format_text(talk.getAbstract().body))
def event_title(event):
title = format_text(event.custom, remove_tags=True, output_html=False)
if not title:
return title
return title
def event_abstract(event):
return format_text(event.abstract)
def add_event(data, talk=None, event=None, session_type='', talk_events=None):
# Determine title and abstract
title = ''
abstract = ''
if talk is None:
if event is None:
raise TypeError('need either talk or event given')
title = event_title(event)
abstract = event_abstract(event)
else:
title = talk_title(talk)
abstract = talk_abstract(talk)
if event is None:
event = talk.get_event()
# Determine time_range and room
if event is None:
if talk.type and talk.type[:1] == 'p' and ADJUST_POSTER_SESSIONS:
# Poster session
time_range = (POSTER_START,
POSTER_START + POSTER_DURATION)
room = POSTER_ROOM
else:
print ('Talk %r (type %r) does not have an event '
'associated with it; skipping' %
(title, talk.type))
return
else:
time_range = event.get_time_range()
tracks = event.tracks.all()
if tracks:
room = tracks[0].title
else:
room = u''
if talk_events is not None:
talk_events[event.pk] = event
# Don't add entries for events without title
if not title:
return
# Format time entries
date = time_range[0].strftime('%m/%d/%Y')
start_time = time_range[0].strftime('%H:%M')
stop_time = time_range[1].strftime('%H:%M')
# UID
uid = u''
data.append((
title,
date,
start_time,
stop_time,
abstract,
room,
session_type,
uid,
))
# Start row of data in spreadsheet (Python 0-based index)
SCHEDULE_WS_START_DATA = 5
# Column number of UID columns (Python 0-based index)
SCHEDULE_UID_COLUMN = 7
# Number of columns to make row unique (title, date, start, end)
SCHEDULE_UNIQUE_COLS = 4
def update_schedule(schedule_xlsx, new_data, updated_xlsx=None):
# Load workbook
wb = openpyxl.load_workbook(schedule_xlsx)
assert wb.sheetnames == [u'Instructions', u'Schedule', u'System']
ws = wb['Schedule']
# Extract data values
ws_data = list(ws.values)[SCHEDULE_WS_START_DATA:]
print ('read %i data lines' % len(ws_data))
print ('first line: %r' % ws_data[:1])
print ('last line: %r' % ws_data[-1:])
# Reconcile UIDs / talks
uids = {}
for line in ws_data:
uid = line[SCHEDULE_UID_COLUMN]
if not uid:
continue
uids[tuple(line[:SCHEDULE_UNIQUE_COLS])] = uid
# Add UID to new data
new_schedule = []
for line in new_data:
key = tuple(line[:SCHEDULE_UNIQUE_COLS])
if key not in uids:
print ('New or rescheduled talk %s found' % (key,))
uid = u''
else:
uid = uids[key]
line = tuple(line[:SCHEDULE_UID_COLUMN]) + (uid,)
new_schedule.append(line)
new_data = new_schedule
# Replace old data with new data
old_data_rows = len(ws_data)
new_data_rows = len(new_data)
print ('new data: %i data lines' % new_data_rows)
offset = SCHEDULE_WS_START_DATA + 1
print ('new_data = %i rows' % len(new_data))
for j, row in enumerate(ws[offset: offset + new_data_rows - 1]):
new_row = new_data[j]
if _debug:
print ('updating row %i with %r' % (j, new_row))
if len(row) > len(new_row):
row = row[:len(new_row)]
for i, cell in enumerate(row):
cell.value = new_row[i]
# Overwrite unused cells with None
if new_data_rows < old_data_rows:
for j, row in enumerate(ws[offset + new_data_rows + 1:
offset + old_data_rows + 1]):
if _debug:
print ('clearing row %i' % (j,))
for i, cell in enumerate(row):
cell.value = None
# Write updated data
if updated_xlsx is None:
updated_xlsx = schedule_xlsx
wb.save(updated_xlsx)
###
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
# make_option('--option',
# action='store',
# dest='option_attr',
# default=0,
# type='int',
# help='Help text',
# ),
)
def handle(self, *args, **options):
try:
conference = args[0]
except IndexError:
raise CommandError('conference not specified')
try:
schedule_xlsx = args[1]
except IndexError:
raise CommandError('XLSX file not specified')
talks = (models.Talk.objects
.filter(conference=conference,
status='accepted'))
# Group by types
talk_types = {}
for talk in talks:
talk_type = talk.type[:1]
admin_type = talk.admin_type[:1]
if (admin_type == 'm' or
'EPS' in talk.title or
'EuroPython 20' in talk.title):
type = 'm'
elif (admin_type == 'k' or
talk.title.lower().startswith('keynote')):
#print ('found keynote: %r' % talk)
type = 'k'
else:
type = talk_type
if type in talk_types:
talk_types[type].append(talk)
else:
talk_types[type] = [talk]
# Build data for updating the spreadsheet
data = []
talk_events = {}
for type, type_name, description in TYPE_NAMES:
# Get bag with talks
bag = talk_types.get(type, [])
if not bag:
continue
# Sort by talk title using title case
bag.sort(key=lambda talk: talk_title(talk).title())
# Add talks from bag to data
for talk in bag:
add_event(data,
talk=talk,
talk_events=talk_events,
session_type=type_name)
# Add events which are not talks
for schedule in models.Schedule.objects.filter(conference=conference):
for event in models.Event.objects.filter(schedule=schedule):
if event.pk in talk_events:
continue
add_event(data, event=event)
# Update spreadsheet with new data
update_schedule(schedule_xlsx, data)
| bsd-2-clause | 3,415,934,658,834,493,000 | 28.48 | 78 | 0.569669 | false |
spulec/moto | tests/test_cloudformation/fixtures/route53_health_check.py | 2 | 1093 | from __future__ import unicode_literals
template = {
"Resources": {
"HostedZone": {
"Type": "AWS::Route53::HostedZone",
"Properties": {"Name": "my_zone"},
},
"my_health_check": {
"Type": "AWS::Route53::HealthCheck",
"Properties": {
"HealthCheckConfig": {
"FailureThreshold": 3,
"IPAddress": "10.0.0.4",
"Port": 80,
"RequestInterval": 10,
"ResourcePath": "/",
"Type": "HTTP",
}
},
},
"myDNSRecord": {
"Type": "AWS::Route53::RecordSet",
"Properties": {
"HostedZoneId": {"Ref": "HostedZone"},
"Comment": "DNS name for my instance.",
"Name": "my_record_set",
"Type": "A",
"TTL": "900",
"ResourceRecords": ["my.example.com"],
"HealthCheckId": {"Ref": "my_health_check"},
},
},
}
}
| apache-2.0 | 8,650,379,758,814,754,000 | 30.228571 | 60 | 0.383349 | false |
minhphung171093/GreenERP_V8 | openerp/workflow/__init__.py | 378 | 3793 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2014 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.workflow.service import WorkflowService
# The new API is in openerp.workflow.workflow_service
# OLD API of the Workflow
def clear_cache(cr, uid):
WorkflowService.clear_cache(cr.dbname)
def trg_write(uid, res_type, res_id, cr):
"""
Reevaluates the specified workflow instance. Thus if any condition for
a transition have been changed in the backend, then running ``trg_write``
will move the workflow over that transition.
:param res_type: the model name
:param res_id: the model instance id the workflow belongs to
:param cr: a database cursor
"""
return WorkflowService.new(cr, uid, res_type, res_id).write()
def trg_trigger(uid, res_type, res_id, cr):
"""
Activate a trigger.
If a workflow instance is waiting for a trigger from another model, then this
trigger can be activated if its conditions are met.
:param res_type: the model name
:param res_id: the model instance id the workflow belongs to
:param cr: a database cursor
"""
return WorkflowService.new(cr, uid, res_type, res_id).trigger()
def trg_delete(uid, res_type, res_id, cr):
"""
Delete a workflow instance
:param res_type: the model name
:param res_id: the model instance id the workflow belongs to
:param cr: a database cursor
"""
return WorkflowService.new(cr, uid, res_type, res_id).delete()
def trg_create(uid, res_type, res_id, cr):
"""
Create a new workflow instance
:param res_type: the model name
:param res_id: the model instance id to own the created worfklow instance
:param cr: a database cursor
"""
return WorkflowService.new(cr, uid, res_type, res_id).create()
def trg_validate(uid, res_type, res_id, signal, cr):
"""
Fire a signal on a given workflow instance
:param res_type: the model name
:param res_id: the model instance id the workflow belongs to
:signal: the signal name to be fired
:param cr: a database cursor
"""
assert isinstance(signal, basestring)
return WorkflowService.new(cr, uid, res_type, res_id).validate(signal)
def trg_redirect(uid, res_type, res_id, new_rid, cr):
"""
Re-bind a workflow instance to another instance of the same model.
Make all workitems which are waiting for a (subflow) workflow instance
for the old resource point to the (first active) workflow instance for
the new resource.
:param res_type: the model name
:param res_id: the model instance id the workflow belongs to
:param new_rid: the model instance id to own the worfklow instance
:param cr: a database cursor
"""
assert isinstance(new_rid, (long, int))
return WorkflowService.new(cr, uid, res_type, res_id).redirect(new_rid)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 3,011,746,707,995,376,000 | 35.825243 | 81 | 0.671236 | false |
shankisg/twisted-intro | twisted/plugins/fastpoetry_plugin.py | 11 | 2309 | # This is the Twisted Fast Poetry Server, version 3.0
from zope.interface import implements
from twisted.python import usage, log
from twisted.plugin import IPlugin
from twisted.internet.protocol import ServerFactory, Protocol
from twisted.application import internet, service
# Normally we would import these classes from another module.
class PoetryProtocol(Protocol):
def connectionMade(self):
poem = self.factory.service.poem
log.msg('sending %d bytes of poetry to %s'
% (len(poem), self.transport.getPeer()))
self.transport.write(poem)
self.transport.loseConnection()
class PoetryFactory(ServerFactory):
protocol = PoetryProtocol
def __init__(self, service):
self.service = service
class PoetryService(service.Service):
def __init__(self, poetry_file):
self.poetry_file = poetry_file
def startService(self):
service.Service.startService(self)
self.poem = open(self.poetry_file).read()
log.msg('loaded a poem from: %s' % (self.poetry_file,))
# This is the main body of the plugin. First we define
# our command-line options.
class Options(usage.Options):
optParameters = [
['port', 'p', 10000, 'The port number to listen on.'],
['poem', None, None, 'The file containing the poem.'],
['iface', None, 'localhost', 'The interface to listen on.'],
]
# Now we define our 'service maker', an object which knows
# how to construct our service.
class PoetryServiceMaker(object):
implements(service.IServiceMaker, IPlugin)
tapname = "fastpoetry"
description = "A fast poetry service."
options = Options
def makeService(self, options):
top_service = service.MultiService()
poetry_service = PoetryService(options['poem'])
poetry_service.setServiceParent(top_service)
factory = PoetryFactory(poetry_service)
tcp_service = internet.TCPServer(int(options['port']), factory,
interface=options['iface'])
tcp_service.setServiceParent(top_service)
return top_service
# This variable name is irrelevent. What matters is that
# instances of PoetryServiceMaker implement IServiceMaker
# and IPlugin.
service_maker = PoetryServiceMaker()
| mit | 1,204,520,172,546,357,500 | 27.506173 | 71 | 0.677783 | false |
aviciimaxwell/odoo | addons/purchase_double_validation/purchase_double_validation_installer.py | 432 | 2315 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class purchase_config_settings(osv.osv_memory):
_inherit = 'purchase.config.settings'
_columns = {
'limit_amount': fields.integer('limit to require a second approval',required=True,
help="Amount after which validation of purchase is required."),
}
_defaults = {
'limit_amount': 5000,
}
def get_default_limit_amount(self, cr, uid, fields, context=None):
ir_model_data = self.pool.get('ir.model.data')
transition = ir_model_data.get_object(cr, uid, 'purchase_double_validation', 'trans_confirmed_double_lt')
field, value = transition.condition.split('<', 1)
return {'limit_amount': int(value)}
def set_limit_amount(self, cr, uid, ids, context=None):
ir_model_data = self.pool.get('ir.model.data')
config = self.browse(cr, uid, ids[0], context)
waiting = ir_model_data.get_object(cr, uid, 'purchase_double_validation', 'trans_confirmed_double_gt')
waiting.write({'condition': 'amount_total >= %s' % config.limit_amount})
confirm = ir_model_data.get_object(cr, uid, 'purchase_double_validation', 'trans_confirmed_double_lt')
confirm.write({'condition': 'amount_total < %s' % config.limit_amount})
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 4,507,449,435,211,934,000 | 46.244898 | 113 | 0.633261 | false |
perryjohnson/biplaneblade | sandia_blade_lib/plot_MK.py | 1 | 6585 | """Plot the mass and stiffness data from Sandia and VABS.
First, data from mass and stiffness matrices for the Sandia blade are written
to the file 'sandia_blade/blade_props_from_VABS.csv'
Then, these data are plotted against published data from Griffith & Resor 2011.
Usage
-----
Open an IPython terminal and type:
|> %run plot_MK
Author: Perry Roth-Johnson
Last modified: April 22, 2014
"""
import lib.blade as bl
reload(bl)
import pandas as pd
import matplotlib.pyplot as plt
from numpy import average
from matplotlib import rc
rc('font', size=14.0)
def rel_diff(vabs_data, sandia_data):
"""Calculate the percent relative difference."""
return ((vabs_data-sandia_data)/average([vabs_data,sandia_data]))*100.0
def prep_rel_diff_plot(axis,ymin=-40,ymax=40):
"""Prepare a relative difference plot."""
axis2 = axis.twinx()
axis2.set_ylabel('difference from average [%]', color='m')
axis2.set_ylim([ymin,ymax])
for tl in axis2.get_yticklabels():
tl.set_color('m')
axis2.grid('on')
return axis2
# write all the mass and stiffness matrices from VABS to a csv file -----------
m = bl.MonoplaneBlade('Sandia blade SNL100-00', 'sandia_blade')
m.writecsv_mass_and_stiffness_props()
# plot VABS and Sandia datasets against one another ---------------------------
v=pd.DataFrame.from_csv('sandia_blade/blade_props_from_VABS.csv')
s=pd.DataFrame.from_csv('sandia_blade/blade_props_from_Sandia.csv')
plt.close('all')
# stiffness properties --------------------------------------------------------
f, axarr = plt.subplots(2,2, figsize=(10*1.5,6.5*1.5))
# ref for dual-axis plotting: http://matplotlib.org/examples/api/two_scales.html
# flapwise stiffness
twin_axis00 = prep_rel_diff_plot(axarr[0,0])
twin_axis00.plot(
s['Blade Spanwise Coordinate'], rel_diff(v['K_55, EI_flap'],s['EI_flap']),
'm^:', mec='m', mfc='None', mew=1, label='difference')
axarr[0,0].plot(s['Blade Spanwise Coordinate'],s['EI_flap'],'gx--',mfc='None',mew=1,label='Sandia (PreComp)')
axarr[0,0].plot(v['Blade Spanwise Coordinate'],v['K_55, EI_flap'],'ko-',mfc='None',mew=1,label='UCLA (VABS)')
axarr[0,0].set_xlabel('span [m]')
axarr[0,0].set_ylabel('flapwise stiffness [N*m^2]')
axarr[0,0].legend()
axarr[0,0].grid('on', axis='x')
# edgewise stiffness
twin_axis01 = prep_rel_diff_plot(axarr[0,1])
twin_axis01.plot(
s['Blade Spanwise Coordinate'], rel_diff(v['K_66, EI_edge'],s['EI_edge']),
'm^:', mec='m', mfc='None', mew=1, label='difference')
axarr[0,1].plot(s['Blade Spanwise Coordinate'],s['EI_edge'],'gx--',mfc='None',mew=1,label='Sandia (PreComp)')
axarr[0,1].plot(v['Blade Spanwise Coordinate'],v['K_66, EI_edge'],'ko-',mfc='None',mew=1,label='UCLA (VABS)')
axarr[0,1].set_xlabel('span [m]')
axarr[0,1].set_ylabel('edgewise stiffness [N*m^2]')
axarr[0,1].grid('on', axis='x')
axarr[0,1].legend()
# axial stiffness
twin_axis10 = prep_rel_diff_plot(axarr[1,0])
twin_axis10.plot(
s['Blade Spanwise Coordinate'], rel_diff(v['K_11, EA_axial'],s['EA_axial']),
'm^:', mec='m', mfc='None', mew=1, label='difference')
axarr[1,0].plot(s['Blade Spanwise Coordinate'],s['EA_axial'],'gx--',mfc='None',mew=1,label='Sandia (PreComp)')
axarr[1,0].plot(v['Blade Spanwise Coordinate'],v['K_11, EA_axial'],'ko-',mfc='None',mew=1,label='UCLA (VABS)')
axarr[1,0].set_xlabel('span [m]')
axarr[1,0].set_ylabel('axial stiffness [N]')
axarr[1,0].legend()
axarr[1,0].grid('on', axis='x')
# torsional stiffness
twin_axis11 = prep_rel_diff_plot(axarr[1,1])
twin_axis11.plot(
s['Blade Spanwise Coordinate'], rel_diff(v['K_44, GJ_twist'],s['GJ_twist']),
'm^:', mec='m', mfc='None', mew=1, label='difference')
axarr[1,1].plot(s['Blade Spanwise Coordinate'],s['GJ_twist'],'gx--',mfc='None',mew=1,label='Sandia (PreComp)')
axarr[1,1].plot(v['Blade Spanwise Coordinate'],v['K_44, GJ_twist'],'ko-',mfc='None',mew=1,label='UCLA (VABS)')
axarr[1,1].set_xlabel('span [m]')
axarr[1,1].set_ylabel('torsional stiffness [N*m^2]')
axarr[1,1].legend()
axarr[1,1].grid('on', axis='x')
plt.tight_layout()
plt.subplots_adjust(left=0.05, bottom=0.07, right=0.94, top=0.96, wspace=0.33, hspace=0.28)
plt.savefig('sandia_blade/Sandia_vs_VABS_stiffness_props.png')
plt.savefig('sandia_blade/Sandia_vs_VABS_stiffness_props.pdf')
# mass properties -------------------------------------------------------------
f2, axarr2 = plt.subplots(2,2, figsize=(10*1.5,6.5*1.5))
# mass density
twin_axis2_10 = prep_rel_diff_plot(axarr2[1,0])
twin_axis2_10.plot(
s['Blade Spanwise Coordinate'], rel_diff(v['M_11, mu_mass'],s['mu_mass']),
'm^:', mec='m', mfc='None', mew=1, label='difference')
axarr2[1,0].plot(s['Blade Spanwise Coordinate'],s['mu_mass'],'gx--',mfc='None',mew=1,label='Sandia (PreComp)')
axarr2[1,0].plot(v['Blade Spanwise Coordinate'],v['M_11, mu_mass'],'ko-',mfc='None',mew=1,label='UCLA (VABS)')
axarr2[1,0].set_xlabel('span [m]')
axarr2[1,0].set_ylabel('mass [kg/m]')
axarr2[1,0].legend()
axarr2[1,0].grid('on', axis='x')
# flapwise mass moment of inertia
twin_axis2_00 = prep_rel_diff_plot(axarr2[0,0])
twin_axis2_00.plot(
s['Blade Spanwise Coordinate'], rel_diff(v['M_55, i22_flap'],s['i22_flap']),
'm^:', mec='m', mfc='None', mew=1, label='difference')
axarr2[0,0].plot(s['Blade Spanwise Coordinate'],s['i22_flap'],'gx--',mfc='None',mew=1,label='Sandia (PreComp)')
axarr2[0,0].plot(v['Blade Spanwise Coordinate'],v['M_55, i22_flap'],'ko-',mfc='None',mew=1,label='UCLA (VABS)')
axarr2[0,0].set_xlabel('span [m]')
axarr2[0,0].set_ylabel('flapwise mass moment of inertia [kg*m]')
axarr2[0,0].legend()
axarr2[0,0].grid('on', axis='x')
# edgewise mass moment of inertia
twin_axis2_01 = prep_rel_diff_plot(axarr2[0,1])
twin_axis2_01.plot(
s['Blade Spanwise Coordinate'], rel_diff(v['M_66, i33_edge'],s['i33_edge']),
'm^:', mec='m', mfc='None', mew=1, label='difference')
axarr2[0,1].plot(s['Blade Spanwise Coordinate'],s['i33_edge'],'gx--',mfc='None',mew=1,label='Sandia (PreComp)')
axarr2[0,1].plot(v['Blade Spanwise Coordinate'],v['M_66, i33_edge'],'ko-',mfc='None',mew=1,label='UCLA (VABS)')
axarr2[0,1].set_xlabel('span [m]')
axarr2[0,1].set_ylabel('edgewise mass moment of inertia [kg*m]')
axarr2[0,1].legend()
axarr2[0,1].grid('on', axis='x')
plt.tight_layout()
plt.subplots_adjust(left=0.07, bottom=0.07, right=0.94, top=0.96, wspace=0.33, hspace=0.28)
plt.savefig('sandia_blade/Sandia_vs_VABS_mass_props.png')
plt.savefig('sandia_blade/Sandia_vs_VABS_mass_props.pdf')
plt.show()
| gpl-3.0 | -6,979,873,262,624,607,000 | 41.039216 | 111 | 0.641762 | false |
Softmotions/edx-platform | common/lib/xmodule/xmodule/tests/test_lti_unit.py | 98 | 22100 | # -*- coding: utf-8 -*-
"""Test for LTI Xmodule functional logic."""
import datetime
from django.utils.timezone import UTC
from mock import Mock, patch, PropertyMock
import textwrap
from lxml import etree
from webob.request import Request
from copy import copy
import urllib
from xmodule.fields import Timedelta
from xmodule.lti_module import LTIDescriptor
from xmodule.lti_2_util import LTIError
from . import LogicTest
class LTIModuleTest(LogicTest):
"""Logic tests for LTI module."""
descriptor_class = LTIDescriptor
def setUp(self):
super(LTIModuleTest, self).setUp()
self.environ = {'wsgi.url_scheme': 'http', 'REQUEST_METHOD': 'POST'}
self.request_body_xml_template = textwrap.dedent("""
<?xml version = "1.0" encoding = "UTF-8"?>
<imsx_POXEnvelopeRequest xmlns = "{namespace}">
<imsx_POXHeader>
<imsx_POXRequestHeaderInfo>
<imsx_version>V1.0</imsx_version>
<imsx_messageIdentifier>{messageIdentifier}</imsx_messageIdentifier>
</imsx_POXRequestHeaderInfo>
</imsx_POXHeader>
<imsx_POXBody>
<{action}>
<resultRecord>
<sourcedGUID>
<sourcedId>{sourcedId}</sourcedId>
</sourcedGUID>
<result>
<resultScore>
<language>en-us</language>
<textString>{grade}</textString>
</resultScore>
</result>
</resultRecord>
</{action}>
</imsx_POXBody>
</imsx_POXEnvelopeRequest>
""")
self.system.get_real_user = Mock()
self.system.publish = Mock()
self.system.rebind_noauth_module_to_user = Mock()
self.user_id = self.xmodule.runtime.anonymous_student_id
self.lti_id = self.xmodule.lti_id
self.unquoted_resource_link_id = u'{}-i4x-2-3-lti-31de800015cf4afb973356dbe81496df'.format(self.xmodule.runtime.hostname)
sourced_id = u':'.join(urllib.quote(i) for i in (self.lti_id, self.unquoted_resource_link_id, self.user_id))
self.defaults = {
'namespace': "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0",
'sourcedId': sourced_id,
'action': 'replaceResultRequest',
'grade': 0.5,
'messageIdentifier': '528243ba5241b',
}
self.xmodule.due = None
self.xmodule.graceperiod = None
def get_request_body(self, params=None):
"""Fetches the body of a request specified by params"""
if params is None:
params = {}
data = copy(self.defaults)
data.update(params)
return self.request_body_xml_template.format(**data)
def get_response_values(self, response):
"""Gets the values from the given response"""
parser = etree.XMLParser(ns_clean=True, recover=True, encoding='utf-8')
root = etree.fromstring(response.body.strip(), parser=parser)
lti_spec_namespace = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0"
namespaces = {'def': lti_spec_namespace}
code_major = root.xpath("//def:imsx_codeMajor", namespaces=namespaces)[0].text
description = root.xpath("//def:imsx_description", namespaces=namespaces)[0].text
message_identifier = root.xpath("//def:imsx_messageIdentifier", namespaces=namespaces)[0].text
imsx_pox_body = root.xpath("//def:imsx_POXBody", namespaces=namespaces)[0]
try:
action = imsx_pox_body.getchildren()[0].tag.replace('{' + lti_spec_namespace + '}', '')
except Exception: # pylint: disable=broad-except
action = None
return {
'code_major': code_major,
'description': description,
'messageIdentifier': message_identifier,
'action': action
}
@patch('xmodule.lti_module.LTIModule.get_client_key_secret', return_value=('test_client_key', u'test_client_secret'))
def test_authorization_header_not_present(self, _get_key_secret):
"""
Request has no Authorization header.
This is an unknown service request, i.e., it is not a part of the original service specification.
"""
request = Request(self.environ)
request.body = self.get_request_body()
response = self.xmodule.grade_handler(request, '')
real_response = self.get_response_values(response)
expected_response = {
'action': None,
'code_major': 'failure',
'description': 'OAuth verification error: Malformed authorization header',
'messageIdentifier': self.defaults['messageIdentifier'],
}
self.assertEqual(response.status_code, 200)
self.assertDictEqual(expected_response, real_response)
@patch('xmodule.lti_module.LTIModule.get_client_key_secret', return_value=('test_client_key', u'test_client_secret'))
def test_authorization_header_empty(self, _get_key_secret):
"""
Request Authorization header has no value.
This is an unknown service request, i.e., it is not a part of the original service specification.
"""
request = Request(self.environ)
request.authorization = "bad authorization header"
request.body = self.get_request_body()
response = self.xmodule.grade_handler(request, '')
real_response = self.get_response_values(response)
expected_response = {
'action': None,
'code_major': 'failure',
'description': 'OAuth verification error: Malformed authorization header',
'messageIdentifier': self.defaults['messageIdentifier'],
}
self.assertEqual(response.status_code, 200)
self.assertDictEqual(expected_response, real_response)
def test_real_user_is_none(self):
"""
If we have no real user, we should send back failure response.
"""
self.xmodule.verify_oauth_body_sign = Mock()
self.xmodule.has_score = True
self.system.get_real_user = Mock(return_value=None)
request = Request(self.environ)
request.body = self.get_request_body()
response = self.xmodule.grade_handler(request, '')
real_response = self.get_response_values(response)
expected_response = {
'action': None,
'code_major': 'failure',
'description': 'User not found.',
'messageIdentifier': self.defaults['messageIdentifier'],
}
self.assertEqual(response.status_code, 200)
self.assertDictEqual(expected_response, real_response)
def test_grade_past_due(self):
"""
Should fail if we do not accept past due grades, and it is past due.
"""
self.xmodule.accept_grades_past_due = False
self.xmodule.due = datetime.datetime.now(UTC())
self.xmodule.graceperiod = Timedelta().from_json("0 seconds")
request = Request(self.environ)
request.body = self.get_request_body()
response = self.xmodule.grade_handler(request, '')
real_response = self.get_response_values(response)
expected_response = {
'action': None,
'code_major': 'failure',
'description': 'Grade is past due',
'messageIdentifier': 'unknown',
}
self.assertEqual(response.status_code, 200)
self.assertEqual(expected_response, real_response)
def test_grade_not_in_range(self):
"""
Grade returned from Tool Provider is outside the range 0.0-1.0.
"""
self.xmodule.verify_oauth_body_sign = Mock()
request = Request(self.environ)
request.body = self.get_request_body(params={'grade': '10'})
response = self.xmodule.grade_handler(request, '')
real_response = self.get_response_values(response)
expected_response = {
'action': None,
'code_major': 'failure',
'description': 'Request body XML parsing error: score value outside the permitted range of 0-1.',
'messageIdentifier': 'unknown',
}
self.assertEqual(response.status_code, 200)
self.assertDictEqual(expected_response, real_response)
def test_bad_grade_decimal(self):
"""
Grade returned from Tool Provider doesn't use a period as the decimal point.
"""
self.xmodule.verify_oauth_body_sign = Mock()
request = Request(self.environ)
request.body = self.get_request_body(params={'grade': '0,5'})
response = self.xmodule.grade_handler(request, '')
real_response = self.get_response_values(response)
expected_response = {
'action': None,
'code_major': 'failure',
'description': 'Request body XML parsing error: invalid literal for float(): 0,5',
'messageIdentifier': 'unknown',
}
self.assertEqual(response.status_code, 200)
self.assertDictEqual(expected_response, real_response)
def test_unsupported_action(self):
"""
Action returned from Tool Provider isn't supported.
`replaceResultRequest` is supported only.
"""
self.xmodule.verify_oauth_body_sign = Mock()
request = Request(self.environ)
request.body = self.get_request_body({'action': 'wrongAction'})
response = self.xmodule.grade_handler(request, '')
real_response = self.get_response_values(response)
expected_response = {
'action': None,
'code_major': 'unsupported',
'description': 'Target does not support the requested operation.',
'messageIdentifier': self.defaults['messageIdentifier'],
}
self.assertEqual(response.status_code, 200)
self.assertDictEqual(expected_response, real_response)
def test_good_request(self):
"""
Response from Tool Provider is correct.
"""
self.xmodule.verify_oauth_body_sign = Mock()
self.xmodule.has_score = True
request = Request(self.environ)
request.body = self.get_request_body()
response = self.xmodule.grade_handler(request, '')
description_expected = 'Score for {sourcedId} is now {score}'.format(
sourcedId=self.defaults['sourcedId'],
score=self.defaults['grade'],
)
real_response = self.get_response_values(response)
expected_response = {
'action': 'replaceResultResponse',
'code_major': 'success',
'description': description_expected,
'messageIdentifier': self.defaults['messageIdentifier'],
}
self.assertEqual(response.status_code, 200)
self.assertDictEqual(expected_response, real_response)
self.assertEqual(self.xmodule.module_score, float(self.defaults['grade']))
def test_user_id(self):
expected_user_id = unicode(urllib.quote(self.xmodule.runtime.anonymous_student_id))
real_user_id = self.xmodule.get_user_id()
self.assertEqual(real_user_id, expected_user_id)
def test_outcome_service_url(self):
mock_url_prefix = 'https://hostname/'
test_service_name = "test_service"
def mock_handler_url(block, handler_name, **kwargs): # pylint: disable=unused-argument
"""Mock function for returning fully-qualified handler urls"""
return mock_url_prefix + handler_name
self.xmodule.runtime.handler_url = Mock(side_effect=mock_handler_url)
real_outcome_service_url = self.xmodule.get_outcome_service_url(service_name=test_service_name)
self.assertEqual(real_outcome_service_url, mock_url_prefix + test_service_name)
def test_resource_link_id(self):
with patch('xmodule.lti_module.LTIModule.location', new_callable=PropertyMock):
self.xmodule.location.html_id = lambda: 'i4x-2-3-lti-31de800015cf4afb973356dbe81496df'
expected_resource_link_id = unicode(urllib.quote(self.unquoted_resource_link_id))
real_resource_link_id = self.xmodule.get_resource_link_id()
self.assertEqual(real_resource_link_id, expected_resource_link_id)
def test_lis_result_sourcedid(self):
expected_sourced_id = u':'.join(urllib.quote(i) for i in (
self.system.course_id.to_deprecated_string(),
self.xmodule.get_resource_link_id(),
self.user_id
))
real_lis_result_sourcedid = self.xmodule.get_lis_result_sourcedid()
self.assertEqual(real_lis_result_sourcedid, expected_sourced_id)
def test_client_key_secret(self):
"""
LTI module gets client key and secret provided.
"""
#this adds lti passports to system
mocked_course = Mock(lti_passports=['lti_id:test_client:test_secret'])
modulestore = Mock()
modulestore.get_course.return_value = mocked_course
runtime = Mock(modulestore=modulestore)
self.xmodule.descriptor.runtime = runtime
self.xmodule.lti_id = "lti_id"
key, secret = self.xmodule.get_client_key_secret()
expected = ('test_client', 'test_secret')
self.assertEqual(expected, (key, secret))
def test_client_key_secret_not_provided(self):
"""
LTI module attempts to get client key and secret provided in cms.
There are key and secret but not for specific LTI.
"""
# this adds lti passports to system
mocked_course = Mock(lti_passports=['test_id:test_client:test_secret'])
modulestore = Mock()
modulestore.get_course.return_value = mocked_course
runtime = Mock(modulestore=modulestore)
self.xmodule.descriptor.runtime = runtime
# set another lti_id
self.xmodule.lti_id = "another_lti_id"
key_secret = self.xmodule.get_client_key_secret()
expected = ('', '')
self.assertEqual(expected, key_secret)
def test_bad_client_key_secret(self):
"""
LTI module attempts to get client key and secret provided in cms.
There are key and secret provided in wrong format.
"""
# this adds lti passports to system
mocked_course = Mock(lti_passports=['test_id_test_client_test_secret'])
modulestore = Mock()
modulestore.get_course.return_value = mocked_course
runtime = Mock(modulestore=modulestore)
self.xmodule.descriptor.runtime = runtime
self.xmodule.lti_id = 'lti_id'
with self.assertRaises(LTIError):
self.xmodule.get_client_key_secret()
@patch('xmodule.lti_module.signature.verify_hmac_sha1', Mock(return_value=True))
@patch('xmodule.lti_module.LTIModule.get_client_key_secret', Mock(return_value=('test_client_key', u'test_client_secret')))
def test_successful_verify_oauth_body_sign(self):
"""
Test if OAuth signing was successful.
"""
self.xmodule.verify_oauth_body_sign(self.get_signed_grade_mock_request())
@patch('xmodule.lti_module.LTIModule.get_outcome_service_url', Mock(return_value=u'https://testurl/'))
@patch('xmodule.lti_module.LTIModule.get_client_key_secret',
Mock(return_value=(u'__consumer_key__', u'__lti_secret__')))
def test_failed_verify_oauth_body_sign_proxy_mangle_url(self):
"""
Oauth signing verify fail.
"""
request = self.get_signed_grade_mock_request_with_correct_signature()
self.xmodule.verify_oauth_body_sign(request)
# we should verify against get_outcome_service_url not
# request url proxy and load balancer along the way may
# change url presented to the method
request.url = 'http://testurl/'
self.xmodule.verify_oauth_body_sign(request)
def get_signed_grade_mock_request_with_correct_signature(self):
"""
Generate a proper LTI request object
"""
mock_request = Mock()
mock_request.headers = {
'X-Requested-With': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': (
u'OAuth realm="https://testurl/", oauth_body_hash="wwzA3s8gScKD1VpJ7jMt9b%2BMj9Q%3D",'
'oauth_nonce="18821463", oauth_timestamp="1409321145", '
'oauth_consumer_key="__consumer_key__", oauth_signature_method="HMAC-SHA1", '
'oauth_version="1.0", oauth_signature="fHsE1hhIz76/msUoMR3Lyb7Aou4%3D"'
)
}
mock_request.url = u'https://testurl'
mock_request.http_method = u'POST'
mock_request.method = mock_request.http_method
mock_request.body = (
'<?xml version=\'1.0\' encoding=\'utf-8\'?>\n'
'<imsx_POXEnvelopeRequest xmlns="http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0">'
'<imsx_POXHeader><imsx_POXRequestHeaderInfo><imsx_version>V1.0</imsx_version>'
'<imsx_messageIdentifier>edX_fix</imsx_messageIdentifier></imsx_POXRequestHeaderInfo>'
'</imsx_POXHeader><imsx_POXBody><replaceResultRequest><resultRecord><sourcedGUID>'
'<sourcedId>MITxLTI/MITxLTI/201x:localhost%3A8000-i4x-MITxLTI-MITxLTI-lti-3751833a214a4f66a0d18f63234207f2:363979ef768ca171b50f9d1bfb322131</sourcedId>'
'</sourcedGUID><result><resultScore><language>en</language><textString>0.32</textString></resultScore>'
'</result></resultRecord></replaceResultRequest></imsx_POXBody></imsx_POXEnvelopeRequest>'
)
return mock_request
def test_wrong_xml_namespace(self):
"""
Test wrong XML Namespace.
Tests that tool provider returned grade back with wrong XML Namespace.
"""
with self.assertRaises(IndexError):
mocked_request = self.get_signed_grade_mock_request(namespace_lti_v1p1=False)
self.xmodule.parse_grade_xml_body(mocked_request.body)
def test_parse_grade_xml_body(self):
"""
Test XML request body parsing.
Tests that xml body was parsed successfully.
"""
mocked_request = self.get_signed_grade_mock_request()
message_identifier, sourced_id, grade, action = self.xmodule.parse_grade_xml_body(mocked_request.body)
self.assertEqual(self.defaults['messageIdentifier'], message_identifier)
self.assertEqual(self.defaults['sourcedId'], sourced_id)
self.assertEqual(self.defaults['grade'], grade)
self.assertEqual(self.defaults['action'], action)
@patch('xmodule.lti_module.signature.verify_hmac_sha1', Mock(return_value=False))
@patch('xmodule.lti_module.LTIModule.get_client_key_secret', Mock(return_value=('test_client_key', u'test_client_secret')))
def test_failed_verify_oauth_body_sign(self):
"""
Oauth signing verify fail.
"""
with self.assertRaises(LTIError):
req = self.get_signed_grade_mock_request()
self.xmodule.verify_oauth_body_sign(req)
def get_signed_grade_mock_request(self, namespace_lti_v1p1=True):
"""
Example of signed request from LTI Provider.
When `namespace_v1p0` is set to True then the default namespase from
LTI 1.1 will be used. Otherwise fake namespace will be added to XML.
"""
mock_request = Mock()
mock_request.headers = { # pylint: disable=no-space-before-operator
'X-Requested-With': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': u'OAuth oauth_nonce="135685044251684026041377608307", \
oauth_timestamp="1234567890", oauth_version="1.0", \
oauth_signature_method="HMAC-SHA1", \
oauth_consumer_key="test_client_key", \
oauth_signature="my_signature%3D", \
oauth_body_hash="JEpIArlNCeV4ceXxric8gJQCnBw="'
}
mock_request.url = u'http://testurl'
mock_request.http_method = u'POST'
params = {}
if not namespace_lti_v1p1:
params = {
'namespace': "http://www.fakenamespace.com/fake"
}
mock_request.body = self.get_request_body(params)
return mock_request
def test_good_custom_params(self):
"""
Custom parameters are presented in right format.
"""
self.xmodule.custom_parameters = ['test_custom_params=test_custom_param_value']
self.xmodule.get_client_key_secret = Mock(return_value=('test_client_key', 'test_client_secret'))
self.xmodule.oauth_params = Mock()
self.xmodule.get_input_fields()
self.xmodule.oauth_params.assert_called_with(
{u'custom_test_custom_params': u'test_custom_param_value'},
'test_client_key', 'test_client_secret'
)
def test_bad_custom_params(self):
"""
Custom parameters are presented in wrong format.
"""
bad_custom_params = ['test_custom_params: test_custom_param_value']
self.xmodule.custom_parameters = bad_custom_params
self.xmodule.get_client_key_secret = Mock(return_value=('test_client_key', 'test_client_secret'))
self.xmodule.oauth_params = Mock()
with self.assertRaises(LTIError):
self.xmodule.get_input_fields()
def test_max_score(self):
self.xmodule.weight = 100.0
self.assertFalse(self.xmodule.has_score)
self.assertEqual(self.xmodule.max_score(), None)
self.xmodule.has_score = True
self.assertEqual(self.xmodule.max_score(), 100.0)
def test_context_id(self):
"""
Tests that LTI parameter context_id is equal to course_id.
"""
self.assertEqual(self.system.course_id.to_deprecated_string(), self.xmodule.context_id)
| agpl-3.0 | -7,229,101,229,846,730,000 | 42.503937 | 164 | 0.621267 | false |
ar7z1/ansible | lib/ansible/modules/network/meraki/meraki_mr_l3_firewall.py | 35 | 9202 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Kevin Breit (@kbreit) <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: meraki_mr_l3_firewall
short_description: Manage MR access point layer 3 firewalls in the Meraki cloud
version_added: "2.7"
description:
- Allows for creation, management, and visibility into layer 3 firewalls implemented on Meraki MR access points.
options:
state:
description:
- Create or modify an organization.
default: present
choices: [present, query]
org_name:
description:
- Name of organization.
org_id:
description:
- ID of organization.
net_name:
description:
- Name of network containing access points.
net_id:
description:
- ID of network containing access points.
number:
description:
- Number of SSID to apply firewall rule to.
aliases: [ssid_number]
ssid_name:
description:
- Name of SSID to apply firewall rule to.
aliases: [ssid]
allow_lan_access:
description:
- Sets whether devices can talk to other devices on the same LAN.
type: bool
default: yes
rules:
description:
- List of firewall rules.
suboptions:
policy:
description:
- Specifies the action that should be taken when rule is hit.
choices: [allow, deny]
protocol:
description:
- Specifies protocol to match against.
choices: [any, icmp, tcp, udp]
dest_port:
description:
- Comma separated list of destination ports to match.
dest_cidr:
description:
- Comma separated list of CIDR notation networks to match.
comment:
description:
- Optional comment describing the firewall rule.
author:
- Kevin Breit (@kbreit)
extends_documentation_fragment: meraki
'''
EXAMPLES = r'''
- name: Create single firewall rule
meraki_mr_l3_firewall:
auth_key: abc123
state: present
org_name: YourOrg
net_id: 12345
number: 1
rules:
- comment: Integration test rule
policy: allow
protocol: tcp
dest_port: 80
dest_cidr: 192.0.2.0/24
allow_lan_access: no
delegate_to: localhost
- name: Enable local LAN access
meraki_mr_l3_firewall:
auth_key: abc123
state: present
org_name: YourOrg
net_id: 123
number: 1
rules:
allow_lan_access: yes
delegate_to: localhost
- name: Query firewall rules
meraki_mr_l3_firewall:
auth_key: abc123
state: query
org_name: YourOrg
net_name: YourNet
number: 1
delegate_to: localhost
'''
RETURN = r'''
'''
import os
from ansible.module_utils.basic import AnsibleModule, json, env_fallback
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_native
from ansible.module_utils.network.meraki.meraki import MerakiModule, meraki_argument_spec
def assemble_payload(meraki):
params_map = {'policy': 'policy',
'protocol': 'protocol',
'dest_port': 'destPort',
'dest_cidr': 'destCidr',
'comment': 'comment',
}
rules = []
for rule in meraki.params['rules']:
proposed_rule = dict()
for k, v in rule.items():
proposed_rule[params_map[k]] = v
rules.append(proposed_rule)
payload = {'rules': rules}
return payload
def get_rules(meraki, net_id, number):
path = meraki.construct_path('get_all', net_id=net_id)
path = path + number + '/l3FirewallRules'
response = meraki.request(path, method='GET')
if meraki.status == 200:
return response
def get_ssid_number(name, data):
for ssid in data:
if name == ssid['name']:
return ssid['number']
return False
def get_ssids(meraki, net_id):
path = meraki.construct_path('get_all', net_id=net_id)
return meraki.request(path, method='GET')
def main():
# define the available arguments/parameters that a user can pass to
# the module
fw_rules = dict(policy=dict(type='str', choices=['allow', 'deny']),
protocol=dict(type='str', choices=['tcp', 'udp', 'icmp', 'any']),
dest_port=dict(type='str'),
dest_cidr=dict(type='str'),
comment=dict(type='str'),
)
argument_spec = meraki_argument_spec()
argument_spec.update(state=dict(type='str', choices=['present', 'query'], default='present'),
net_name=dict(type='str'),
net_id=dict(type='str'),
number=dict(type='str', aliases=['ssid_number']),
ssid_name=dict(type='str', aliases=['ssid']),
rules=dict(type='list', default=None, elements='dict', options=fw_rules),
allow_lan_access=dict(type='bool', default=True),
)
# seed the result dict in the object
# we primarily care about changed and state
# change is if this module effectively modified the target
# state will include any data that you want your module to pass back
# for consumption, for example, in a subsequent task
result = dict(
changed=False,
)
# the AnsibleModule object will be our abstraction working with Ansible
# this includes instantiation, a couple of common attr would be the
# args/params passed to the execution, as well as if the module
# supports check mode
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
)
meraki = MerakiModule(module, function='mr_l3_firewall')
meraki.params['follow_redirects'] = 'all'
query_urls = {'mr_l3_firewall': '/networks/{net_id}/ssids/'}
update_urls = {'mr_l3_firewall': '/networks/{net_id}/ssids/'}
meraki.url_catalog['get_all'].update(query_urls)
meraki.url_catalog['update'] = update_urls
payload = None
# if the user is working with this module in only check mode we do not
# want to make any changes to the environment, just return the current
# state with no modifications
# FIXME: Work with Meraki so they can implement a check mode
if module.check_mode:
meraki.exit_json(**meraki.result)
# execute checks for argument completeness
# manipulate or modify the state as needed (this is going to be the
# part where your module will do what it needs to do)
org_id = meraki.params['org_id']
orgs = None
if org_id is None:
orgs = meraki.get_orgs()
for org in orgs:
if org['name'] == meraki.params['org_name']:
org_id = org['id']
net_id = meraki.params['net_id']
if net_id is None:
if orgs is None:
orgs = meraki.get_orgs()
net_id = meraki.get_net_id(net_name=meraki.params['net_name'],
data=meraki.get_nets(org_id=org_id))
number = meraki.params['number']
if meraki.params['ssid_name']:
number = get_ssid_number(meraki.params['ssid_name'], get_ssids(meraki, net_id))
if meraki.params['state'] == 'query':
meraki.result['data'] = get_rules(meraki, net_id, number)
elif meraki.params['state'] == 'present':
rules = get_rules(meraki, net_id, number)
path = meraki.construct_path('get_all', net_id=net_id)
path = path + number + '/l3FirewallRules'
if meraki.params['rules']:
payload = assemble_payload(meraki)
else:
payload = dict()
update = False
try:
if len(rules) != len(payload['rules']): # Quick and simple check to avoid more processing
update = True
if update is False:
for r in range(len(rules) - 2):
if meraki.is_update_required(rules[r], payload[r]) is True:
update = True
except KeyError:
pass
if rules[len(rules) - 2] != meraki.params['allow_lan_access']:
update = True
if update is True:
payload['allowLanAccess'] = meraki.params['allow_lan_access']
response = meraki.request(path, method='PUT', payload=json.dumps(payload))
if meraki.status == 200:
meraki.result['data'] = response
meraki.result['changed'] = True
# in the event of a successful module execution, you will want to
# simple AnsibleModule.exit_json(), passing the key/value results
meraki.exit_json(**meraki.result)
if __name__ == '__main__':
main()
| gpl-3.0 | 1,690,669,689,782,033,200 | 32.220217 | 112 | 0.589654 | false |
mcltn/ansible | lib/ansible/plugins/action/copy.py | 48 | 12949 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import pipes
import tempfile
from ansible import constants as C
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
from ansible.utils.hashing import checksum
from ansible.utils.unicode import to_bytes
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
''' handler for file transfer operations '''
source = self._task.args.get('src', None)
content = self._task.args.get('content', None)
dest = self._task.args.get('dest', None)
raw = boolean(self._task.args.get('raw', 'no'))
force = boolean(self._task.args.get('force', 'yes'))
faf = self._task.first_available_file
remote_src = boolean(self._task.args.get('remote_src', False))
if (source is None and content is None and faf is None) or dest is None:
return dict(failed=True, msg="src (or content) and dest are required")
elif (source is not None or faf is not None) and content is not None:
return dict(failed=True, msg="src and content are mutually exclusive")
elif content is not None and dest is not None and dest.endswith("/"):
return dict(failed=True, msg="dest must be a file if content is defined")
# Check if the source ends with a "/"
source_trailing_slash = False
if source:
source_trailing_slash = self._connection._shell.path_has_trailing_slash(source)
# Define content_tempfile in case we set it after finding content populated.
content_tempfile = None
# If content is defined make a temp file and write the content into it.
if content is not None:
try:
# If content comes to us as a dict it should be decoded json.
# We need to encode it back into a string to write it out.
if isinstance(content, dict) or isinstance(content, list):
content_tempfile = self._create_content_tempfile(json.dumps(content))
else:
content_tempfile = self._create_content_tempfile(content)
source = content_tempfile
except Exception as err:
return dict(failed=True, msg="could not write content temp file: %s" % err)
# if we have first_available_file in our vars
# look up the files and use the first one we find as src
elif faf:
source = self._get_first_available_file(faf, task_vars.get('_original_file', None))
if source is None:
return dict(failed=True, msg="could not find src in first_available_file list")
elif remote_src:
new_module_args = self._task.args.copy()
del new_module_args['remote_src']
return self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=False)
else:
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
else:
source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)
# A list of source file tuples (full_path, relative_path) which will try to copy to the destination
source_files = []
# If source is a directory populate our list else source is a file and translate it to a tuple.
if os.path.isdir(source):
# Get the amount of spaces to remove to get the relative path.
if source_trailing_slash:
sz = len(source)
else:
sz = len(source.rsplit('/', 1)[0]) + 1
# Walk the directory and append the file tuples to source_files.
for base_path, sub_folders, files in os.walk(source):
for file in files:
full_path = os.path.join(base_path, file)
rel_path = full_path[sz:]
source_files.append((full_path, rel_path))
# If it's recursive copy, destination is always a dir,
# explicitly mark it so (note - copy module relies on this).
if not self._connection._shell.path_has_trailing_slash(dest):
dest = self._connection._shell.join_path(dest, '')
else:
source_files.append((source, os.path.basename(source)))
changed = False
module_result = {"changed": False}
# A register for if we executed a module.
# Used to cut down on command calls when not recursive.
module_executed = False
# Tell _execute_module to delete the file if there is one file.
delete_remote_tmp = (len(source_files) == 1)
# If this is a recursive action create a tmp path that we can share as the _exec_module create is too late.
if not delete_remote_tmp:
if tmp is None or "-tmp-" not in tmp:
tmp = self._make_tmp_path()
# expand any user home dir specifier
dest = self._remote_expand_user(dest, tmp)
diffs = []
for source_full, source_rel in source_files:
# Generate a hash of the local file.
local_checksum = checksum(source_full)
# If local_checksum is not defined we can't find the file so we should fail out.
if local_checksum is None:
return dict(failed=True, msg="could not find src=%s" % source_full)
# This is kind of optimization - if user told us destination is
# dir, do path manipulation right away, otherwise we still check
# for dest being a dir via remote call below.
if self._connection._shell.path_has_trailing_slash(dest):
dest_file = self._connection._shell.join_path(dest, source_rel)
else:
dest_file = self._connection._shell.join_path(dest)
# Attempt to get the remote checksum
remote_checksum = self._remote_checksum(tmp, dest_file, all_vars=task_vars)
if remote_checksum == '3':
# The remote_checksum was executed on a directory.
if content is not None:
# If source was defined as content remove the temporary file and fail out.
self._remove_tempfile_if_content_defined(content, content_tempfile)
return dict(failed=True, msg="can not use content with a dir as dest")
else:
# Append the relative source location to the destination and retry remote_checksum
dest_file = self._connection._shell.join_path(dest, source_rel)
remote_checksum = self._remote_checksum(tmp, dest_file, all_vars=task_vars)
if remote_checksum != '1' and not force:
# remote_file does not exist so continue to next iteration.
continue
if local_checksum != remote_checksum:
# The checksums don't match and we will change or error out.
changed = True
# Create a tmp path if missing only if this is not recursive.
# If this is recursive we already have a tmp path.
if delete_remote_tmp:
if tmp is None or "-tmp-" not in tmp:
tmp = self._make_tmp_path()
if self._play_context.diff and not raw:
diffs.append(self._get_diff_data(tmp, dest_file, source_full, task_vars))
if self._play_context.check_mode:
self._remove_tempfile_if_content_defined(content, content_tempfile)
changed = True
module_return = dict(changed=True)
continue
# Define a remote directory that we will copy the file to.
tmp_src = self._connection._shell.join_path(tmp, 'source')
if not raw:
self._connection.put_file(source_full, tmp_src)
else:
self._connection.put_file(source_full, dest_file)
# We have copied the file remotely and no longer require our content_tempfile
self._remove_tempfile_if_content_defined(content, content_tempfile)
# fix file permissions when the copy is done as a different user
if self._play_context.become and self._play_context.become_user != 'root':
self._remote_chmod('a+r', tmp_src, tmp)
if raw:
# Continue to next iteration if raw is defined.
continue
# Run the copy module
# src and dest here come after original and override them
# we pass dest only to make sure it includes trailing slash in case of recursive copy
new_module_args = self._task.args.copy()
new_module_args.update(
dict(
src=tmp_src,
dest=dest,
original_basename=source_rel,
)
)
module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp)
module_executed = True
else:
# no need to transfer the file, already correct hash, but still need to call
# the file module in case we want to change attributes
self._remove_tempfile_if_content_defined(content, content_tempfile)
if raw:
# Continue to next iteration if raw is defined.
self._remove_tmp_path(tmp)
continue
# Build temporary module_args.
new_module_args = self._task.args.copy()
new_module_args.update(
dict(
src=source_rel,
dest=dest,
original_basename=source_rel
)
)
# Execute the file module.
module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp)
module_executed = True
if not module_return.get('checksum'):
module_return['checksum'] = local_checksum
if module_return.get('failed') == True:
return module_return
if module_return.get('changed') == True:
changed = True
# the file module returns the file path as 'path', but
# the copy module uses 'dest', so add it if it's not there
if 'path' in module_return and 'dest' not in module_return:
module_return['dest'] = module_return['path']
# Delete tmp path if we were recursive or if we did not execute a module.
if (not C.DEFAULT_KEEP_REMOTE_FILES and not delete_remote_tmp) or (not C.DEFAULT_KEEP_REMOTE_FILES and delete_remote_tmp and not module_executed):
self._remove_tmp_path(tmp)
if module_executed and len(source_files) == 1:
result = module_return
else:
result = dict(dest=dest, src=source, changed=changed)
if diffs:
result['diff'] = diffs
return result
def _create_content_tempfile(self, content):
''' Create a tempfile containing defined content '''
fd, content_tempfile = tempfile.mkstemp()
f = os.fdopen(fd, 'wb')
content = to_bytes(content)
try:
f.write(content)
except Exception as err:
os.remove(content_tempfile)
raise Exception(err)
finally:
f.close()
return content_tempfile
def _remove_tempfile_if_content_defined(self, content, content_tempfile):
if content is not None:
os.remove(content_tempfile)
| gpl-3.0 | 8,255,811,017,874,124,000 | 43.194539 | 159 | 0.585296 | false |
schlos/eden | modules/tests/org/change_user_roles.py | 27 | 4661 | """ Sahana Eden Automated Test - ORG010 Change User Roles
@copyright: 2011-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from selenium.webdriver.support.ui import Select
from gluon import current
from tests.web2unittest import SeleniumUnitTest
class ChangeUserRole(SeleniumUnitTest):
"""
Desc: Change User Roles
Case: ORG010
TO DO: Check if works and upgrade to new test system framework.
"""
def org010():
"""
1. Log in as admin
2. Give test user org admin rights over Timor-Leste
3. Give user [email protected] some access on Timor-Leste
4. Log in as test user
5. Revoke all access for [email protected] on Timor-Leste
"""
as_admin()
logout()
as_orgadmin()
logout()
def as_admin():
"""
Run the tests as an administrator
"""
config = current.test_config
browser = config.browser
driver = browser
login(account='admin')
make_user_orgadmin()
open_organisation_roles()
select_user()
# Set some new access levels
driver.find_element_by_id('role_volvol_reader').click()
driver.find_element_by_id('role_projectproject_data_entry').click()
driver.find_element_by_id('role_projectproject_data_entry').submit()
# @todo: check the values of the matrix
def as_orgadmin():
"""
Run the tests as an org admin
"""
config = current.test_config
browser = config.browser
driver = browser
login()
open_organisation_roles(action="Details")
select_user()
# Reset those access levels back to None
driver.find_element_by_id('role_volNone').click()
driver.find_element_by_id('role_projectNone').click()
driver.find_element_by_id('role_projectNone').submit()
# @todo: check the values of the matrix
def make_user_orgadmin():
config = current.test_config
browser = config.browser
driver = browser
browser.get("%s/admin/user" % config.url)
# Open the roles page for [email protected] user account
dt_filter("[email protected]")
dt_action(action="Roles")
# Give org admin rights to Test User on Timor-Leste Red Cross Society
Select(driver.find_element_by_name("group_id")).select_by_visible_text("Organisation Admin")
Select(driver.find_element_by_name("pe_id")).select_by_visible_text("Timor-Leste Red Cross Society (Organization)")
driver.find_element_by_id("submit_add_button").click()
def open_organisation_roles(action="Open"):
config = current.test_config
browser = config.browser
driver = browser
# Go to the organisation list
browser.get("%s/org/organisation" % config.url)
# Open the Timor-Leste organisation
dt_filter("Timor-Leste")
dt_action(action=action)
# Go to the organisations' User Roles tab
driver.find_element_by_link_text("User Roles").click()
def select_user():
config = current.test_config
browser = config.browser
driver = browser
# Select a user from the drop-down list
Select(driver.find_element_by_name("user")).select_by_visible_text("[email protected]")
driver.find_element_by_xpath("//input[@type='submit']").click()
| mit | 8,307,002,802,715,154,000 | 33.783582 | 123 | 0.641064 | false |
codetorex/spritex | setup.py | 1 | 1345 | #!/usr/bin/env python
from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = ""
packages = [
'editor'
]
requires = [
'numpy>=1.12.1'
'Kivy>=1.10.0'
'pillow>=2.1.0'
'cython'
]
setup(
name='spritex',
version="0.1.3",
description='A simple tool for extracting sprites from full frames. Useful for AI projects. ',
long_description=long_description,
author="codetorex",
author_email='[email protected]',
packages=packages,
include_package_data=True,
install_requires=requires,
entry_points={
'console_scripts': ['spritex = editor:execute']
},
license='MIT License',
url='https://github.com/codetorex/spritex',
zip_safe=False,
keywords=['spritex', 'sprite', 'extractor', 'unique color'],
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Multimedia :: Graphics :: Editors :: Raster-Based',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
]
)
| mit | -5,924,839,716,619,719,000 | 25.9 | 98 | 0.618587 | false |
astronaut1712/taiga-back | tests/integration/test_hooks_github.py | 13 | 17297 | import pytest
from unittest import mock
from django.core.urlresolvers import reverse
from django.core import mail
from taiga.base.utils import json
from taiga.hooks.github import event_hooks
from taiga.hooks.github.api import GitHubViewSet
from taiga.hooks.exceptions import ActionSyntaxException
from taiga.projects.issues.models import Issue
from taiga.projects.tasks.models import Task
from taiga.projects.userstories.models import UserStory
from taiga.projects.models import Membership
from taiga.projects.history.services import get_history_queryset_by_model_instance, take_snapshot
from taiga.projects.notifications.choices import NotifyLevel
from taiga.projects.notifications.models import NotifyPolicy
from taiga.projects import services
from .. import factories as f
pytestmark = pytest.mark.django_db
def test_bad_signature(client):
project = f.ProjectFactory()
url = reverse("github-hook-list")
url = "%s?project=%s" % (url, project.id)
data = {}
response = client.post(url, json.dumps(data),
HTTP_X_HUB_SIGNATURE="sha1=badbadbad",
content_type="application/json")
response_content = response.data
assert response.status_code == 400
assert "Bad signature" in response_content["_error_message"]
def test_ok_signature(client):
project = f.ProjectFactory()
f.ProjectModulesConfigFactory(project=project, config={
"github": {
"secret": "tpnIwJDz4e"
}
})
url = reverse("github-hook-list")
url = "%s?project=%s" % (url, project.id)
data = {"test:": "data"}
response = client.post(url, json.dumps(data),
HTTP_X_HUB_SIGNATURE="sha1=3c8e83fdaa266f81c036ea0b71e98eb5e054581a",
content_type="application/json")
assert response.status_code == 204
def test_push_event_detected(client):
project = f.ProjectFactory()
url = reverse("github-hook-list")
url = "%s?project=%s" % (url, project.id)
data = {"commits": [
{"message": "test message"},
]}
GitHubViewSet._validate_signature = mock.Mock(return_value=True)
with mock.patch.object(event_hooks.PushEventHook, "process_event") as process_event_mock:
response = client.post(url, json.dumps(data),
HTTP_X_GITHUB_EVENT="push",
content_type="application/json")
assert process_event_mock.call_count == 1
assert response.status_code == 204
def test_push_event_issue_processing(client):
creation_status = f.IssueStatusFactory()
role = f.RoleFactory(project=creation_status.project, permissions=["view_issues"])
f.MembershipFactory(project=creation_status.project, role=role, user=creation_status.project.owner)
new_status = f.IssueStatusFactory(project=creation_status.project)
issue = f.IssueFactory.create(status=creation_status, project=creation_status.project, owner=creation_status.project.owner)
payload = {"commits": [
{"message": """test message
test TG-%s #%s ok
bye!
""" % (issue.ref, new_status.slug)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(issue.project, payload)
ev_hook.process_event()
issue = Issue.objects.get(id=issue.id)
assert issue.status.id == new_status.id
assert len(mail.outbox) == 1
def test_push_event_task_processing(client):
creation_status = f.TaskStatusFactory()
role = f.RoleFactory(project=creation_status.project, permissions=["view_tasks"])
f.MembershipFactory(project=creation_status.project, role=role, user=creation_status.project.owner)
new_status = f.TaskStatusFactory(project=creation_status.project)
task = f.TaskFactory.create(status=creation_status, project=creation_status.project, owner=creation_status.project.owner)
payload = {"commits": [
{"message": """test message
test TG-%s #%s ok
bye!
""" % (task.ref, new_status.slug)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(task.project, payload)
ev_hook.process_event()
task = Task.objects.get(id=task.id)
assert task.status.id == new_status.id
assert len(mail.outbox) == 1
def test_push_event_user_story_processing(client):
creation_status = f.UserStoryStatusFactory()
role = f.RoleFactory(project=creation_status.project, permissions=["view_us"])
f.MembershipFactory(project=creation_status.project, role=role, user=creation_status.project.owner)
new_status = f.UserStoryStatusFactory(project=creation_status.project)
user_story = f.UserStoryFactory.create(status=creation_status, project=creation_status.project, owner=creation_status.project.owner)
payload = {"commits": [
{"message": """test message
test TG-%s #%s ok
bye!
""" % (user_story.ref, new_status.slug)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(user_story.project, payload)
ev_hook.process_event()
user_story = UserStory.objects.get(id=user_story.id)
assert user_story.status.id == new_status.id
assert len(mail.outbox) == 1
def test_push_event_multiple_actions(client):
creation_status = f.IssueStatusFactory()
role = f.RoleFactory(project=creation_status.project, permissions=["view_issues"])
f.MembershipFactory(project=creation_status.project, role=role, user=creation_status.project.owner)
new_status = f.IssueStatusFactory(project=creation_status.project)
issue1 = f.IssueFactory.create(status=creation_status, project=creation_status.project, owner=creation_status.project.owner)
issue2 = f.IssueFactory.create(status=creation_status, project=creation_status.project, owner=creation_status.project.owner)
payload = {"commits": [
{"message": """test message
test TG-%s #%s ok
test TG-%s #%s ok
bye!
""" % (issue1.ref, new_status.slug, issue2.ref, new_status.slug)},
]}
mail.outbox = []
ev_hook1 = event_hooks.PushEventHook(issue1.project, payload)
ev_hook1.process_event()
issue1 = Issue.objects.get(id=issue1.id)
issue2 = Issue.objects.get(id=issue2.id)
assert issue1.status.id == new_status.id
assert issue2.status.id == new_status.id
assert len(mail.outbox) == 2
def test_push_event_processing_case_insensitive(client):
creation_status = f.TaskStatusFactory()
role = f.RoleFactory(project=creation_status.project, permissions=["view_tasks"])
f.MembershipFactory(project=creation_status.project, role=role, user=creation_status.project.owner)
new_status = f.TaskStatusFactory(project=creation_status.project)
task = f.TaskFactory.create(status=creation_status, project=creation_status.project, owner=creation_status.project.owner)
payload = {"commits": [
{"message": """test message
test tg-%s #%s ok
bye!
""" % (task.ref, new_status.slug.upper())},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(task.project, payload)
ev_hook.process_event()
task = Task.objects.get(id=task.id)
assert task.status.id == new_status.id
assert len(mail.outbox) == 1
def test_push_event_task_bad_processing_non_existing_ref(client):
issue_status = f.IssueStatusFactory()
payload = {"commits": [
{"message": """test message
test TG-6666666 #%s ok
bye!
""" % (issue_status.slug)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(issue_status.project, payload)
with pytest.raises(ActionSyntaxException) as excinfo:
ev_hook.process_event()
assert str(excinfo.value) == "The referenced element doesn't exist"
assert len(mail.outbox) == 0
def test_push_event_us_bad_processing_non_existing_status(client):
user_story = f.UserStoryFactory.create()
payload = {"commits": [
{"message": """test message
test TG-%s #non-existing-slug ok
bye!
""" % (user_story.ref)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(user_story.project, payload)
with pytest.raises(ActionSyntaxException) as excinfo:
ev_hook.process_event()
assert str(excinfo.value) == "The status doesn't exist"
assert len(mail.outbox) == 0
def test_push_event_bad_processing_non_existing_status(client):
issue = f.IssueFactory.create()
payload = {"commits": [
{"message": """test message
test TG-%s #non-existing-slug ok
bye!
""" % (issue.ref)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(issue.project, payload)
with pytest.raises(ActionSyntaxException) as excinfo:
ev_hook.process_event()
assert str(excinfo.value) == "The status doesn't exist"
assert len(mail.outbox) == 0
def test_issues_event_opened_issue(client):
issue = f.IssueFactory.create()
issue.project.default_issue_status = issue.status
issue.project.default_issue_type = issue.type
issue.project.default_severity = issue.severity
issue.project.default_priority = issue.priority
issue.project.save()
Membership.objects.create(user=issue.owner, project=issue.project, role=f.RoleFactory.create(project=issue.project), is_owner=True)
notify_policy = NotifyPolicy.objects.get(user=issue.owner, project=issue.project)
notify_policy.notify_level = NotifyLevel.watch
notify_policy.save()
payload = {
"action": "opened",
"issue": {
"title": "test-title",
"body": "test-body",
"html_url": "http://github.com/test/project/issues/11",
},
"assignee": {},
"label": {},
"repository": {
"html_url": "test",
},
}
mail.outbox = []
ev_hook = event_hooks.IssuesEventHook(issue.project, payload)
ev_hook.process_event()
assert Issue.objects.count() == 2
assert len(mail.outbox) == 1
def test_issues_event_other_than_opened_issue(client):
issue = f.IssueFactory.create()
issue.project.default_issue_status = issue.status
issue.project.default_issue_type = issue.type
issue.project.default_severity = issue.severity
issue.project.default_priority = issue.priority
issue.project.save()
payload = {
"action": "closed",
"issue": {
"title": "test-title",
"body": "test-body",
"html_url": "http://github.com/test/project/issues/11",
},
"assignee": {},
"label": {},
}
mail.outbox = []
ev_hook = event_hooks.IssuesEventHook(issue.project, payload)
ev_hook.process_event()
assert Issue.objects.count() == 1
assert len(mail.outbox) == 0
def test_issues_event_bad_issue(client):
issue = f.IssueFactory.create()
issue.project.default_issue_status = issue.status
issue.project.default_issue_type = issue.type
issue.project.default_severity = issue.severity
issue.project.default_priority = issue.priority
issue.project.save()
payload = {
"action": "opened",
"issue": {},
"assignee": {},
"label": {},
}
mail.outbox = []
ev_hook = event_hooks.IssuesEventHook(issue.project, payload)
with pytest.raises(ActionSyntaxException) as excinfo:
ev_hook.process_event()
assert str(excinfo.value) == "Invalid issue information"
assert Issue.objects.count() == 1
assert len(mail.outbox) == 0
def test_issue_comment_event_on_existing_issue_task_and_us(client):
project = f.ProjectFactory()
role = f.RoleFactory(project=project, permissions=["view_tasks", "view_issues", "view_us"])
f.MembershipFactory(project=project, role=role, user=project.owner)
user = f.UserFactory()
issue = f.IssueFactory.create(external_reference=["github", "http://github.com/test/project/issues/11"], owner=project.owner, project=project)
take_snapshot(issue, user=user)
task = f.TaskFactory.create(external_reference=["github", "http://github.com/test/project/issues/11"], owner=project.owner, project=project)
take_snapshot(task, user=user)
us = f.UserStoryFactory.create(external_reference=["github", "http://github.com/test/project/issues/11"], owner=project.owner, project=project)
take_snapshot(us, user=user)
payload = {
"action": "created",
"issue": {
"html_url": "http://github.com/test/project/issues/11",
},
"comment": {
"body": "Test body",
},
"repository": {
"html_url": "test",
},
}
mail.outbox = []
assert get_history_queryset_by_model_instance(issue).count() == 0
assert get_history_queryset_by_model_instance(task).count() == 0
assert get_history_queryset_by_model_instance(us).count() == 0
ev_hook = event_hooks.IssueCommentEventHook(issue.project, payload)
ev_hook.process_event()
issue_history = get_history_queryset_by_model_instance(issue)
assert issue_history.count() == 1
assert "Test body" in issue_history[0].comment
task_history = get_history_queryset_by_model_instance(task)
assert task_history.count() == 1
assert "Test body" in issue_history[0].comment
us_history = get_history_queryset_by_model_instance(us)
assert us_history.count() == 1
assert "Test body" in issue_history[0].comment
assert len(mail.outbox) == 3
def test_issue_comment_event_on_not_existing_issue_task_and_us(client):
issue = f.IssueFactory.create(external_reference=["github", "10"])
take_snapshot(issue, user=issue.owner)
task = f.TaskFactory.create(project=issue.project, external_reference=["github", "10"])
take_snapshot(task, user=task.owner)
us = f.UserStoryFactory.create(project=issue.project, external_reference=["github", "10"])
take_snapshot(us, user=us.owner)
payload = {
"action": "created",
"issue": {
"html_url": "http://github.com/test/project/issues/11",
},
"comment": {
"body": "Test body",
},
"repository": {
"html_url": "test",
},
}
mail.outbox = []
assert get_history_queryset_by_model_instance(issue).count() == 0
assert get_history_queryset_by_model_instance(task).count() == 0
assert get_history_queryset_by_model_instance(us).count() == 0
ev_hook = event_hooks.IssueCommentEventHook(issue.project, payload)
ev_hook.process_event()
assert get_history_queryset_by_model_instance(issue).count() == 0
assert get_history_queryset_by_model_instance(task).count() == 0
assert get_history_queryset_by_model_instance(us).count() == 0
assert len(mail.outbox) == 0
def test_issues_event_bad_comment(client):
issue = f.IssueFactory.create(external_reference=["github", "10"])
take_snapshot(issue, user=issue.owner)
payload = {
"action": "other",
"issue": {},
"comment": {},
"repository": {
"html_url": "test",
},
}
ev_hook = event_hooks.IssueCommentEventHook(issue.project, payload)
mail.outbox = []
with pytest.raises(ActionSyntaxException) as excinfo:
ev_hook.process_event()
assert str(excinfo.value) == "Invalid issue comment information"
assert Issue.objects.count() == 1
assert len(mail.outbox) == 0
def test_api_get_project_modules(client):
project = f.create_project()
f.MembershipFactory(project=project, user=project.owner, is_owner=True)
url = reverse("projects-modules", args=(project.id,))
client.login(project.owner)
response = client.get(url)
assert response.status_code == 200
content = response.data
assert "github" in content
assert content["github"]["secret"] != ""
assert content["github"]["webhooks_url"] != ""
def test_api_patch_project_modules(client):
project = f.create_project()
f.MembershipFactory(project=project, user=project.owner, is_owner=True)
url = reverse("projects-modules", args=(project.id,))
client.login(project.owner)
data = {
"github": {
"secret": "test_secret",
"url": "test_url",
}
}
response = client.patch(url, json.dumps(data), content_type="application/json")
assert response.status_code == 204
config = services.get_modules_config(project).config
assert "github" in config
assert config["github"]["secret"] == "test_secret"
assert config["github"]["webhooks_url"] != "test_url"
def test_replace_github_references():
assert event_hooks.replace_github_references("project-url", "#2") == "[GitHub#2](project-url/issues/2)"
assert event_hooks.replace_github_references("project-url", "#2 ") == "[GitHub#2](project-url/issues/2) "
assert event_hooks.replace_github_references("project-url", " #2 ") == " [GitHub#2](project-url/issues/2) "
assert event_hooks.replace_github_references("project-url", " #2") == " [GitHub#2](project-url/issues/2)"
assert event_hooks.replace_github_references("project-url", "#test") == "#test"
assert event_hooks.replace_github_references("project-url", None) == ""
| agpl-3.0 | 3,970,424,818,434,624,500 | 34.885892 | 147 | 0.652772 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.