repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
openstack/glance | glance/api/v2/tasks.py | 1 | 16834 | # Copyright 2013 IBM Corp.
# All Rights Reserved.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import debtcollector
import glance_store
from oslo_config import cfg
from oslo_log import log as logging
import oslo_serialization.jsonutils as json
from oslo_utils import encodeutils
from oslo_utils import uuidutils
import six
from six.moves import http_client as http
import six.moves.urllib.parse as urlparse
import webob.exc
from glance.api import common
from glance.api import policy
from glance.common import exception
from glance.common import timeutils
from glance.common import wsgi
import glance.db
import glance.gateway
from glance.i18n import _, _LW
import glance.notifier
import glance.schema
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.import_opt('task_time_to_live', 'glance.common.config', group='task')
_DEPRECATION_MESSAGE = ("The task API is being deprecated and "
"it will be superseded by the new image import "
"API. Please refer to this link for more "
"information about the aforementioned process: "
"https://specs.openstack.org/openstack/glance-specs/"
"specs/mitaka/approved/image-import/"
"image-import-refactor.html")
class TasksController(object):
"""Manages operations on tasks."""
def __init__(self, db_api=None, policy_enforcer=None, notifier=None,
store_api=None):
self.db_api = db_api or glance.db.get_api()
self.policy = policy_enforcer or policy.Enforcer()
self.notifier = notifier or glance.notifier.Notifier()
self.store_api = store_api or glance_store
self.gateway = glance.gateway.Gateway(self.db_api, self.store_api,
self.notifier, self.policy)
@debtcollector.removals.remove(message=_DEPRECATION_MESSAGE)
def create(self, req, task):
# NOTE(rosmaita): access to this call is enforced in the deserializer
ctxt = req.context
task_factory = self.gateway.get_task_factory(ctxt)
executor_factory = self.gateway.get_task_executor_factory(ctxt)
task_repo = self.gateway.get_task_repo(ctxt)
try:
new_task = task_factory.new_task(
task_type=task['type'],
owner=ctxt.owner,
task_input=task['input'],
image_id=task['input'].get('image_id'),
user_id=ctxt.user_id,
request_id=ctxt.request_id)
task_repo.add(new_task)
task_executor = executor_factory.new_task_executor(ctxt)
pool = common.get_thread_pool("tasks_pool")
pool.spawn(new_task.run, task_executor)
except exception.Forbidden as e:
msg = (_LW("Forbidden to create task. Reason: %(reason)s")
% {'reason': encodeutils.exception_to_unicode(e)})
LOG.warn(msg)
raise webob.exc.HTTPForbidden(explanation=e.msg)
return new_task
@debtcollector.removals.remove(message=_DEPRECATION_MESSAGE)
def index(self, req, marker=None, limit=None, sort_key='created_at',
sort_dir='desc', filters=None):
# NOTE(rosmaita): access to this call is enforced in the deserializer
result = {}
if filters is None:
filters = {}
filters['deleted'] = False
if limit is None:
limit = CONF.limit_param_default
limit = min(CONF.api_limit_max, limit)
task_repo = self.gateway.get_task_stub_repo(req.context)
try:
tasks = task_repo.list(marker, limit, sort_key,
sort_dir, filters)
if len(tasks) != 0 and len(tasks) == limit:
result['next_marker'] = tasks[-1].task_id
except (exception.NotFound, exception.InvalidSortKey,
exception.InvalidFilterRangeValue) as e:
LOG.warn(encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPBadRequest(explanation=e.msg)
except exception.Forbidden as e:
LOG.warn(encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPForbidden(explanation=e.msg)
result['tasks'] = tasks
return result
@debtcollector.removals.remove(message=_DEPRECATION_MESSAGE)
def get(self, req, task_id):
_enforce_access_policy(self.policy, req)
try:
task_repo = self.gateway.get_task_repo(req.context)
task = task_repo.get(task_id)
except exception.NotFound as e:
msg = (_LW("Failed to find task %(task_id)s. Reason: %(reason)s")
% {'task_id': task_id,
'reason': encodeutils.exception_to_unicode(e)})
LOG.warn(msg)
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Forbidden as e:
msg = (_LW("Forbidden to get task %(task_id)s. Reason:"
" %(reason)s")
% {'task_id': task_id,
'reason': encodeutils.exception_to_unicode(e)})
LOG.warn(msg)
raise webob.exc.HTTPForbidden(explanation=e.msg)
return task
@debtcollector.removals.remove(message=_DEPRECATION_MESSAGE)
def delete(self, req, task_id):
_enforce_access_policy(self.policy, req)
msg = (_("This operation is currently not permitted on Glance Tasks. "
"They are auto deleted after reaching the time based on "
"their expires_at property."))
raise webob.exc.HTTPMethodNotAllowed(explanation=msg,
headers={'Allow': 'GET'},
body_template='${explanation}')
class RequestDeserializer(wsgi.JSONRequestDeserializer):
_required_properties = ['type', 'input']
def _get_request_body(self, request):
output = super(RequestDeserializer, self).default(request)
if 'body' not in output:
msg = _('Body expected in request.')
raise webob.exc.HTTPBadRequest(explanation=msg)
return output['body']
def _validate_sort_dir(self, sort_dir):
if sort_dir not in ['asc', 'desc']:
msg = _('Invalid sort direction: %s') % sort_dir
raise webob.exc.HTTPBadRequest(explanation=msg)
return sort_dir
def _get_filters(self, filters):
status = filters.get('status')
if status:
if status not in ['pending', 'processing', 'success', 'failure']:
msg = _('Invalid status value: %s') % status
raise webob.exc.HTTPBadRequest(explanation=msg)
type = filters.get('type')
if type:
if type not in ['import']:
msg = _('Invalid type value: %s') % type
raise webob.exc.HTTPBadRequest(explanation=msg)
return filters
def _validate_marker(self, marker):
if marker and not uuidutils.is_uuid_like(marker):
msg = _('Invalid marker format')
raise webob.exc.HTTPBadRequest(explanation=msg)
return marker
def _validate_limit(self, limit):
try:
limit = int(limit)
except ValueError:
msg = _("limit param must be an integer")
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 0:
msg = _("limit param must be positive")
raise webob.exc.HTTPBadRequest(explanation=msg)
return limit
def _validate_create_body(self, body):
"""Validate the body of task creating request"""
for param in self._required_properties:
if param not in body:
msg = _("Task '%s' is required") % param
raise webob.exc.HTTPBadRequest(explanation=msg)
def __init__(self, schema=None, policy_engine=None):
super(RequestDeserializer, self).__init__()
self.schema = schema or get_task_schema()
# want to enforce the access policy as early as possible
self.policy_engine = policy_engine or policy.Enforcer()
def create(self, request):
_enforce_access_policy(self.policy_engine, request)
body = self._get_request_body(request)
self._validate_create_body(body)
try:
self.schema.validate(body)
except exception.InvalidObject as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
task = {}
properties = body
for key in self._required_properties:
try:
task[key] = properties.pop(key)
except KeyError:
pass
return dict(task=task)
def index(self, request):
_enforce_access_policy(self.policy_engine, request)
params = request.params.copy()
limit = params.pop('limit', None)
marker = params.pop('marker', None)
sort_dir = params.pop('sort_dir', 'desc')
query_params = {
'sort_key': params.pop('sort_key', 'created_at'),
'sort_dir': self._validate_sort_dir(sort_dir),
'filters': self._get_filters(params)
}
if marker is not None:
query_params['marker'] = self._validate_marker(marker)
if limit is not None:
query_params['limit'] = self._validate_limit(limit)
return query_params
class ResponseSerializer(wsgi.JSONResponseSerializer):
def __init__(self, task_schema=None, partial_task_schema=None):
super(ResponseSerializer, self).__init__()
self.task_schema = task_schema or get_task_schema()
self.partial_task_schema = (partial_task_schema
or _get_partial_task_schema())
def _inject_location_header(self, response, task):
location = self._get_task_location(task)
if six.PY2:
location = location.encode('utf-8')
response.headers['Location'] = location
def _get_task_location(self, task):
return '/v2/tasks/%s' % task.task_id
def _format_task(self, schema, task):
task_view = {
'id': task.task_id,
'input': task.task_input,
'type': task.type,
'status': task.status,
'owner': task.owner,
'message': task.message,
'result': task.result,
'created_at': timeutils.isotime(task.created_at),
'updated_at': timeutils.isotime(task.updated_at),
'self': self._get_task_location(task),
'schema': '/v2/schemas/task'
}
if task.image_id:
task_view['image_id'] = task.image_id
if task.request_id:
task_view['request_id'] = task.request_id
if task.user_id:
task_view['user_id'] = task.user_id
if task.expires_at:
task_view['expires_at'] = timeutils.isotime(task.expires_at)
task_view = schema.filter(task_view) # domain
return task_view
def _format_task_stub(self, schema, task):
task_view = {
'id': task.task_id,
'type': task.type,
'status': task.status,
'owner': task.owner,
'created_at': timeutils.isotime(task.created_at),
'updated_at': timeutils.isotime(task.updated_at),
'self': self._get_task_location(task),
'schema': '/v2/schemas/task'
}
if task.expires_at:
task_view['expires_at'] = timeutils.isotime(task.expires_at)
task_view = schema.filter(task_view) # domain
return task_view
def create(self, response, task):
response.status_int = http.CREATED
self._inject_location_header(response, task)
self.get(response, task)
def get(self, response, task):
task_view = self._format_task(self.task_schema, task)
body = json.dumps(task_view, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def index(self, response, result):
params = dict(response.request.params)
params.pop('marker', None)
query = urlparse.urlencode(params)
body = {
'tasks': [self._format_task_stub(self.partial_task_schema, task)
for task in result['tasks']],
'first': '/v2/tasks',
'schema': '/v2/schemas/tasks',
}
if query:
body['first'] = '%s?%s' % (body['first'], query)
if 'next_marker' in result:
params['marker'] = result['next_marker']
next_query = urlparse.urlencode(params)
body['next'] = '/v2/tasks?%s' % next_query
response.unicode_body = six.text_type(json.dumps(body,
ensure_ascii=False))
response.content_type = 'application/json'
_TASK_SCHEMA = {
"id": {
"description": _("An identifier for the task"),
"pattern": _('^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}'
'-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$'),
"type": "string"
},
"type": {
"description": _("The type of task represented by this content"),
"enum": [
"import",
"api_image_import"
],
"type": "string"
},
"status": {
"description": _("The current status of this task"),
"enum": [
"pending",
"processing",
"success",
"failure"
],
"type": "string"
},
"input": {
"description": _("The parameters required by task, JSON blob"),
"type": ["null", "object"],
},
"result": {
"description": _("The result of current task, JSON blob"),
"type": ["null", "object"],
},
"owner": {
"description": _("An identifier for the owner of this task"),
"type": "string"
},
"message": {
"description": _("Human-readable informative message only included"
" when appropriate (usually on failure)"),
"type": "string",
},
"image_id": {
"description": _("Image associated with the task"),
"type": "string",
},
"request_id": {
"description": _("Human-readable informative request-id"),
"type": "string",
},
"user_id": {
"description": _("User associated with the task"),
"type": "string",
},
"expires_at": {
"description": _("Datetime when this resource would be"
" subject to removal"),
"type": ["null", "string"]
},
"created_at": {
"description": _("Datetime when this resource was created"),
"type": "string"
},
"updated_at": {
"description": _("Datetime when this resource was updated"),
"type": "string"
},
'self': {
'readOnly': True,
'type': 'string'
},
'schema': {
'readOnly': True,
'type': 'string'
}
}
def _enforce_access_policy(policy_engine, request):
try:
policy_engine.enforce(request.context, 'tasks_api_access', {})
except exception.Forbidden:
LOG.debug("User does not have permission to access the Tasks API")
raise webob.exc.HTTPForbidden()
def get_task_schema():
properties = copy.deepcopy(_TASK_SCHEMA)
schema = glance.schema.Schema('task', properties)
return schema
def _get_partial_task_schema():
properties = copy.deepcopy(_TASK_SCHEMA)
hide_properties = ['input', 'result', 'message']
for key in hide_properties:
del properties[key]
schema = glance.schema.Schema('task', properties)
return schema
def get_collection_schema():
task_schema = _get_partial_task_schema()
return glance.schema.CollectionSchema('tasks', task_schema)
def create_resource():
"""Task resource factory method"""
task_schema = get_task_schema()
partial_task_schema = _get_partial_task_schema()
deserializer = RequestDeserializer(task_schema)
serializer = ResponseSerializer(task_schema, partial_task_schema)
controller = TasksController()
return wsgi.Resource(controller, deserializer, serializer)
| apache-2.0 | 6,334,792,962,047,081,000 | 35.595652 | 78 | 0.580908 | false |
rwl/PyDyn | pydyn/Pdoption.py | 1 | 1057 | # Copyright (C) 2009 Stijn Cole
# Copyright (C) 2010-2011 Richard Lincoln
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from numpy import array
def Pdoption():
""" Returns default option vector.
@see: U{http://www.esat.kuleuven.be/electa/teaching/matdyn/}
"""
## Options vector
options = array([
1, # method
1e-4, # tolerance
1e-3, # minstepsize
1e2, # maxstepsize
1, # output
1 # plots
])
return options
| apache-2.0 | 4,938,374,857,737,423,000 | 30.088235 | 74 | 0.634816 | false |
chfoo/wpull | wpull/application/tasks/conversion.py | 1 | 2011 | import asyncio
from typing import Optional
from wpull.database.base import NotFound
from wpull.pipeline.item import URLRecord
from wpull.pipeline.pipeline import ItemTask, ItemSource
from wpull.pipeline.app import AppSession
class LinkConversionSetupTask(ItemTask[AppSession]):
@asyncio.coroutine
def process(self, session: AppSession):
self._build_document_converter(session)
@classmethod
def _build_document_converter(cls, session: AppSession):
'''Build the Document Converter.'''
if not session.args.convert_links:
return
converter = session.factory.new(
'BatchDocumentConverter',
session.factory['HTMLParser'],
session.factory['ElementWalker'],
session.factory['URLTable'],
backup=session.args.backup_converted
)
return converter
class QueuedFileSession(object):
def __init__(self, app_session: AppSession, file_id: int,
url_record: URLRecord):
self.app_session = app_session
self.file_id = file_id
self.url_record = url_record
class QueuedFileSource(ItemSource[QueuedFileSession]):
def __init__(self, app_session: AppSession):
self._app_session = app_session
@asyncio.coroutine
def get_item(self) -> Optional[QueuedFileSession]:
if not self._app_session.args.convert_links:
return
try:
db_item = self._app_session.factory['URLTable'].convert_check_out()
except NotFound:
return
session = QueuedFileSession(
self._app_session, db_item[0], db_item[1])
return session
class LinkConversionTask(ItemTask[QueuedFileSession]):
@asyncio.coroutine
def process(self, session: QueuedFileSession):
converter = session.app_session.factory.instance_map.get(
'BatchDocumentConverter')
if not converter:
return
converter.convert_by_record(session.url_record)
| gpl-3.0 | -686,150,176,005,659,400 | 28.144928 | 79 | 0.655395 | false |
lcoandrade/DsgTools | core/Factories/DbCustomizationFactory/defaultCustomization.py | 1 | 2468 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
DsgTools
A QGIS plugin
Brazilian Army Cartographic Production Tools
-------------------
begin : 2016-07-31
git sha : $Format:%H$
copyright : (C) 2016 by Philipe Borba - Cartographic Engineer @ Brazilian Army
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
#DsgTools Imports
from DsgTools.core.Factories.DbCustomizationFactory.dbCustomization import DbCustomization
class DefaultCustomization(DbCustomization):
def __init__(self, customJson):
super(DefaultCustomization, self).__init__(customJson)
def buildSql(self, abstractDb):
"""
{'schema': schema, 'table': table, 'attrName':attrName, 'oldValue':oldValue, 'newValue':newValue}
"""
#Abstract method. Must be reimplemented in each child.
sql = """"""
for modItem in self.customJson['ChangeDefault']:
sql += """ALTER TABLE ONLY "{0}"."{1}" ALTER COLUMN "{2}" SET DEFAULT {3};\n""".format(modItem['schema'], modItem['table'], modItem['attrName'], modItem['newValue'])
return sql
def buildUndoSql(self):
"""
{'schema': schema, 'table': table, 'attrName':attrName, 'oldValue':oldValue, 'newValue':newValue}
"""
#Abstract method. Must be reimplemented in each child.
sql = """"""
for modItem in self.customJson['ChangeDefault']:
sql += """ALTER TABLE ONLY "{0}"."{1}" ALTER COLUMN "{2}" SET DEFAULT {3};\n""".format(modItem['schema'], modItem['table'], modItem['attrName'], modItem['oldValue'])
return sql
| gpl-2.0 | -3,361,011,689,052,593,000 | 49.387755 | 177 | 0.468395 | false |
mono9lith/rss-email | src/rss.py | 1 | 33035 | #! /usr/bin/env python
# -*- coding: UTF-8 -*-
#
"""Отправка RSS на E-mail
Требования:
- Python версии 3.0 и выше.
- config.py
- typo.py
Рекомендации:
- наличие в системе локали ru_RU.UTF-8
Использование:
- создать config.json
- (опционально) mkdir archive_<config name>; ARCHIVE -> True
- $ nohup nice -19 python3 rss.py &
Сайт проекта:
- <https://code.google.com/p/rss-email/>
- <http://rss-mail.blogspot.ru/>"""
__version__ = 1, 5, 0
__author__ = ["Александр <[email protected]>",]
__license__ = """\
Эта программа является свободным программным обеспечением: вы можете
использовать её согласно условиям открытого лицензионного соглашения GNU
(GNU GPL) версии 3 или, по вашему желанию, любой более поздней версии.
Эта программа распространяется в надежде, что она будет полезной, но без
каких-либо гарантий. Для получения более подробной информации смотрите
открытое лицензионное соглашение GNU: <https://gnu.org/licenses/gpl.html>."""
#
import html.entities
import html.parser
import datetime
import urllib.request
import re
import gzip
import typo
import json
import locale
import time
import urllib.parse
import smtplib
from email.mime.text import MIMEText
from email.header import Header as MIMEHeader
from email.mime.multipart import MIMEMultipart
__all__ = ["getGen", "getRss", "getWebPage", "parseHtml"]
DEBUG = False
ARCHIVE = True
IO_CODING = "utf_8" # кодировка по умолчанию зависит от системной!
UTF8 = "UTF-8"
JOIN = "".join
JOIN_N = "\n".join
RANGE = range
LEN = len
INT = int
CHR = chr
LIST = list
ENUMERATE = enumerate
RE_C = re.compile
DATE_TIME_FORMAT = "%d %b, %H:%M"
DATE_TIME_NOW = datetime.datetime.now
NAME_TO_CHR = html.entities.html5
ISINSTANCE = isinstance
HEADERS = {
# Connection: close - передаёт сам urllib.request
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0",
"Accept-Language": "ru,ru-ru;q=0.8,en-us;q=0.5,en;q=0.3",
"Accept-Encoding": "gzip, deflate",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
}
def subEnt(match):
"""замена именованных HTML символов на Unicode"""
# для повторного прогона извлеченного текста
try:
return NAME_TO_CHR[match.group(1)]
except KeyError:
return ""
def subNum(match):
"""замена числовых HTML символов на Unicode"""
# для повторного прогона извлеченного текста
text = match.group(1)
start = text[0]
xStart = start == "x" or start == "X"
try:
return CHR(INT(text[1:] if xStart else text))
except:
return ""
HELLIP = "\u2026" # многоточие (...)
HYP = "\u2010" # дефис
NBHYP = "\u2011" # дефис неразрывный (аб-вг)
NDASH = "\u2013" # тире короткое (аб - вг)
MDASH = "\u2014" # тире длинное (аб - вг)
SYM = HELLIP + HYP + NBHYP + NDASH + MDASH
# удаление знаков препинания с конца для обрезания текста
EXP_DEL = ((RE_C(r"[{0}\-.,!?:;/ ]+$".format(SYM)).sub, r""),)
EXP_CLEAN = (
(RE_C(r"(?:^[^<]*>|<[^>]*>|<[^>]*$)").sub, r" "), # удаляет HTML теги
(RE_C(r"&(\w{1,8};)").sub, subEnt), # заменяет HTML последовательности
(RE_C(r"&#(\d{1,5});").sub, subNum), # заменяет HTML последовательности
# удаляет лишние пробелы и табы в начале и конце строки, переносы строки
(RE_C(r"(?:(?<=\n)|^)[ \t]+|[ \t]+(?:(?=\n)|$)|(?<=\n\n)\n+").sub, r""),
(RE_C(r"[ \t]{2,}").sub, r" "), # удаляет лишние пробелы
(RE_C(r" *[\n\r]+ *").sub, r" "), # удаляет переносы строк
)
RULE_CLEAN = (
("\n", " "),
("\r", " "),
)
class MyHTMLParser(html.parser.HTMLParser):
def __init__(self):
html.parser.HTMLParser.__init__(self)
self.items = []
self.ITEMS_APPEND = self.items.append
self.NAME_TO_CHR = html.entities.html5
self.CHR = chr
self.INT = int
def handle_starttag(self, tag, attrs):
self.ITEMS_APPEND((0, tag))
self.items += [(1, attr) for attr in attrs]
def handle_endtag(self, tag):
self.ITEMS_APPEND((2, tag))
def handle_data(self, data):
self.ITEMS_APPEND((3, data))
def unknown_decl(self, data):
self.ITEMS_APPEND((4, data))
def handle_entityref(self, name):
"""конвертирует именованные HTML символы в Unicode 'amp;' -> '&'"""
try:
c = self.NAME_TO_CHR[name]
except KeyError:
return
self.ITEMS_APPEND((3, c))
def handle_charref(self, name):
"""конвертирует числовые HTML символы в Unicode"""
start = name[0]
xStart = start == "x" or start == "X"
try:
c = self.CHR(self.INT(name[1:] if xStart else name))
except:
return
self.ITEMS_APPEND((3, c))
def getRss(items, url, title_len, desc_len):
"""находит запись ленты и вынимает элементы"""
NOW = DATE_TIME_NOW()
NOW_DATE = INT(NOW.strftime("%Y%m%d"))
NOW_SHORT = NOW.strftime(DATE_TIME_FORMAT)
if not items:
LOG_APPEND(NOW_SHORT + " getRss: no items: " + url)
return {}
# определяет тип ленты
isRss = None
if LEN(items) > 5:
for i in RANGE(0, 5):
if items[i][0] == 0:
for txt in ("rss", "RSS"):
if items[i][1] == txt:
isRss = True
break
if isRss is None:
for txt in ("feed", "FEED"):
if items[i][1] == txt:
isRss = False
break
if isRss is None:
LOG_APPEND(NOW_SHORT + " getRss: can't determ feed type: " + url)
return {}
ITEM = "item" if isRss else "entry"
REPLACE = manyReplace
LINK = "link"
TITLE = "title"
DESC = "description" if isRss else "summary"
def getItemContent(items, name, index, atomLink=False):
"""вынимает содержимое элемента"""
result = ""
for i, j in findElement(items, name, index[0], index[1]):
for k in RANGE(i, j): # range(0, 9) -> 0,1,2,3,4,5,6,7,8
if atomLink:
if (items[k][0] == 1 and items[k][1][0] == "rel" and
items[k][1][1] == "alternate"):
result = items[k + 2][1][1]
break
else:
if items[k][0] == 3 and items[k][1]:
result = items[k][1]
break
elif items[k][0] == 4:
result = items[k][1].replace("CDATA[", "")
break
break
return result
# собирает новые элементы
result = {}
for i, j in findElement(items, ITEM):
title = getItemContent(items, TITLE, (i + 1, j - 1))
desc = getItemContent(items, DESC, (i + 1, j - 1))
link = getItemContent(items, LINK, (i + 1, j - 1), not isRss)
if not link.strip():
continue
title = REPLACE(title, RULE_CLEAN) # удаляет лишние символы
title = title[:title_len + 500] # обрезает (с учётом удалённых HTML тегов)
desc = REPLACE(desc, RULE_CLEAN) # удаляет лишние символы
desc = desc[:desc_len + 500] # обрезает (с учётом удалённых HTML тегов)
result[link] = {TITLE: title, "date": NOW_DATE, "desc": desc}
if not result:
LOG_APPEND(NOW_SHORT + " getRss: nothing extracted: " + url)
return result
def findElement(items, name, start=0, end=0):
"""ищет элементы в диапозоне"""
#TODO возвращает без конечного </элемента>
if not end:
end = LEN(items)
else:
end += 1 # компенсация range()
# ищет начальный <item>
index = start
for i in RANGE(start, end): # range(0, 9) -> 0,1,2,3,4,5,6,7,8
if i < index: # пропуск лишних элементов
continue
if items[i][0] == 0 and items[i][1] == name:
# ищет конечный </item>
for j in RANGE(i + 1, end):
if items[j][0] == 2 and items[j][1] == name:
yield i, j
index = j + 1
break # переходит к следующему начальному <item>
def getGen(items, root, url, width):
"""находит root элемент и вынимает ссылки"""
#TODO
#TODO обрезание и чистка
NOW = DATE_TIME_NOW()
NOW_DATE = INT(NOW.strftime("%Y%m%d"))
NOW_SHORT = NOW.strftime(DATE_TIME_FORMAT)
result = {}
for i, j in findElement(items, root[0]): # находит все <div></div>
# запускает извлечение
# если есть нужный атрибут в root
for k in RANGE(i + 1, j - 1):
if (
items[k][0] == 1 and
items[k][1][0] == root[1] and
(
items[k][1][1] == root[2] or
" " + root[2] + " " in items[k][1][1] or
items[k][1][1].startswith(root[2]) or
items[k][1][1].endswith(root[2])
)
):
# вынимает ссылки из элементов внутри root
for id, data in linkExtract(items, k, j - 1, url, width, NOW_DATE):
result[id] = data
break # завершает поиск по атрибутам
if not result:
LOG_APPEND(NOW_SHORT + " getGen: nothing extracted: " + url)
return result
def linkExtract(items, i, j, url, width, date):
"""вынимает ссылки из HTML"""
RE_SUB = reSub
CUT_TEXT = cutText
#TODO root-element is a
for k, m in findElement(items, "a", i, j):
title = ""
addr = ""
for n in RANGE(k, m):
if items[n][0] == 1 and items[n][1][0] == "href" and not addr:
addr = items[n][1][1]
if items[n][0] == 3 and not title:
title = items[n][1]
if addr:
title = RE_SUB(title, EXP_CLEAN) # удаляет лишние символы
title = CUT_TEXT(title, width, EXP_DEL) # обрезает заголовок
yield (
addr if addr.startswith("http") else URL_JOIN(url, addr),
{"title": title, "date": date, "desc": ""},
)
def getWebPage(url, xml=True):
# используется urllib вместо socket для обхода
# обработки "Transfer-Encoding: chunked" в сыром ответе
# и обхода обработки HTTPS
NOW_SHORT = DATE_TIME_NOW().strftime(DATE_TIME_FORMAT)
MAX_SIZE = 1048576 # 1 МиБ
data = None
contentType = b""
req = urllib.request.Request(url, headers=HEADERS)
# получает данные из интернета
try:
with urllib.request.urlopen(req, timeout=7) as inFile:
data = inFile.read()
contentType = inFile.getheader("Content-Type", "").encode("ascii")
contentEncoding = inFile.getheader("Content-Encoding", "")
if DEBUG:
LOG_APPEND(NOW_SHORT + " getWebPage: downloaded: " + url)
except Exception as ex:
LOG_APPEND(NOW_SHORT + " getWebPage: " + str(ex) + ": " + url)
return None
if LEN(data) > MAX_SIZE:
LOG_APPEND(NOW_SHORT + " getWebPage: too big: " + url)
return None
if contentEncoding == "gzip":
data = gzip.decompress(data)
elif contentEncoding != "": # другое сжатие
return None
data = contentType + data
# определяет тип кодировки
coding = None
tmp = data[:500 if xml else 1200]
for enc in (b"utf-8", b"UTF-8", b"utf8", b"UTF8"):
if enc in tmp:
coding = "utf_8"
break
if not coding:
for enc in (b"windows-1251", b"WINDOWS-1251", b"cp1251", b"CP1251"):
if enc in tmp:
coding = "cp1251"
break
if not coding:
for enc in (b"koi8-r", b"KOI8-R"):
if enc in tmp:
coding = "koi8_r"
break
if not coding:
for enc in (b"windows-1252", b"WINDOWS-1252", b"cp1252", b"CP1252",
b"iso-8859-1", b"ISO-8859-1", b"iso8859-1", b"ISO8859-1",
b"cp819", b"CP819", b"latin1", b"LATIN1",):
if enc in tmp:
coding = "cp1252"
break
if coding:
return data.decode(coding, errors="replace")
else:
LOG_APPEND(NOW_SHORT + " getWebPage: can't determine enc: " + url)
return None
def reSub(text, reList):
"""замена текста для re"""
for sub, repl in reList:
text = sub(repl, text)
return text
def manyReplace(text, rules):
"""заменяет части в тексте"""
for sub, repl in rules:
text = text.replace(sub, repl)
return text
def cutText(text, width, expCut):
"""Обрезает текст до нужной длины"""
if LEN(text) < width:
return text
HELLIP = "\u2026" # многоточие (...)
WORD_WIDTH = 20
text = text[:width]
index = text.rfind(" ", -WORD_WIDTH, -1)
if index != -1:
text = text[:index]
return reSub(text, expCut) + HELLIP
def formContent(new_file, header, title_len, desc_len, source):
"""собирает HTML"""
#TODO markdown, json
#TODO костыльно
RECORD_FORMAT = "<br>\n<br>\n<a target=\"_blank\" href=\"{1}\"><b>{0}</b></a><br>\n{2}".format
RECORD_FORMAT_NODESC = "<br>\n<br>\n<a target=\"_blank\" href=\"{1}\"><b>{0}</b></a>{2}".format
RECORD_FORMAT_FIRST = "\n<a target=\"_blank\" href=\"{1}\"><b>{0}</b></a><br>\n{2}".format
RECORD_FORMAT_NODESC_FIRST = "\n<a target=\"_blank\" href=\"{1}\"><b>{0}</b></a>{2}".format
STYLE = (
"\n<style>\n"
"html {\n"
" background-color: #f6f5f3;\n"
" font-family: sans-serif;\n"
" font-size: .8em;\n"
" line-height: 1.4;\n"
" color: #222;\n"
" margin: 0;\n"
" padding: 1em;\n"
"}\n"
"body {\n"
" background-color: #fff;\n"
" max-width: 600px;\n"
" margin: 2em auto;\n"
" padding: 2em 4em;\n"
" border: 1px solid #e6e6e6;\n"
" border-radius: 2px;\n"
"}\n"
"a {\n"
" text-decoration: none;\n"
"}\n"
"a:link {\n"
" color: #22c;\n"
"}\n"
"a:hover {\n"
" text-decoration: underline;\n"
"}\n"
"</style>"
)
HTML = "<!DOCTYPE html>\n<meta charset={0}>\n<title>~t~</title>".format(UTF8) + STYLE + "\n\n<h1>~t~</h1>\n~c~"
CAPTION_FORMAT = "\n<h2 class=\"first\">{0}</h2>\n<hr>\n".format
CAPTION_FORMAT2 = ("\n<h2><a href=\"{1}\">{0}</a> ({2})</h2>\n" if DEBUG
else "\n\n<h2>{0}</h2>\n").format
CUT_TEXT = cutText
REPLACE = manyReplace
RE_SUB = reSub
EXP_ESC = (
("&", "&"),
("<", "<"),
(">", ">"),
)
result = []
RESULT_APPEND = result.append
groups = LIST(new_file)
groups.sort()
for group in groups:
escaped_group = REPLACE(group, EXP_ESC)
RESULT_APPEND(CAPTION_FORMAT(escaped_group)) # добавляет раздел
firstFeed = True #TODO
for feed in new_file[group]:
leng = LEN(new_file[group][feed])
escaped_feed = REPLACE(feed, EXP_ESC)
source_feed = source[group][feed]
if not ISINSTANCE(source_feed, str):
source_url = source_feed["url"]
else:
source_url = source_feed
escaped_url = REPLACE(source_url, EXP_ESC)
RESULT_APPEND(CAPTION_FORMAT2(escaped_feed, escaped_url, leng)) # добавляет ленту
firstFeed = False
firstRecord = True
for record in new_file[group][feed]:
title = RE_SUB(new_file[group][feed][record]["title"], EXP_CLEAN) # удаляет лишнее
title = TYPO(title) # типографизирует
title = CUT_TEXT(title, title_len, EXP_DEL) # обрезает
desc = RE_SUB(new_file[group][feed][record]["desc"], EXP_CLEAN) # удаляет лишнее
desc = TYPO(desc) # типографизирует
desc = CUT_TEXT(desc, desc_len, EXP_DEL) # обрезает
escaped_title = REPLACE(title, EXP_ESC)
escaped_link = REPLACE(record, EXP_ESC)
escaped_desc = REPLACE(desc, EXP_ESC)
if not escaped_title:
escaped_title = "(нет заголовка)"
RE_FO = RECORD_FORMAT
if not escaped_desc and not firstRecord:
RE_FO = RECORD_FORMAT_NODESC
elif escaped_desc and not firstRecord:
RE_FO = RECORD_FORMAT
elif escaped_desc and firstRecord:
RE_FO = RECORD_FORMAT_FIRST
elif not escaped_desc and firstRecord:
RE_FO = RECORD_FORMAT_NODESC_FIRST
if DEBUG:
escaped_desc = str(len(escaped_desc)) + ", " + escaped_desc
RESULT_APPEND(RE_FO(escaped_title, escaped_link, escaped_desc)) # добавляет ссылку
firstRecord = False
return HTML.replace("~t~", header).replace("~c~", JOIN(result))
def parseHtml(text, url):
"""возвращает <list>(<tuple>(<int>, <str>)) список найденных элементов
тип, значение
"""
NOW_SHORT = DATE_TIME_NOW().strftime(DATE_TIME_FORMAT)
#TODO? from xml.etree.ElementTree import parse
#TODO удаление (3, '\n\n')
if not text:
return []
parser = MyHTMLParser()
itemsP = parser.items
try:
parser.feed(text)
parser.close()
except html.parser.HTMLParseError as ex:
LOG_APPEND(NOW_SHORT + " parseHtml: " + str(ex) + ": " + url)
return []
#TODO оптимизировать
# находит идущие подряд элементы данных
# для сбора в одно целое текста и элементов типа & и >
# текст, текст, ... -> текст
leng = LEN(itemsP)
result = []
RESULT_APPEND = result.append
for i in RANGE(0, leng):
if (
(
itemsP[i][0] == 3 and # item - текст
i < leng - 1 and # item не последний и не предпоследний
itemsP[i + 1][0] == 3 and # следующий item - текст
i != 0 and # item не первый элемент
itemsP[i - 1][0] != 3 # предыдущий item - не текст
) or
(
itemsP[i][0] == 3 and # item - текст
i < leng - 1 and # item не последний и не предпоследний
itemsP[i + 1][0] == 3 and # следующий item - текст
i == 0 # item - первый элемент
)
):
indexEnd = i
for j in RANGE(i, leng):
if itemsP[j][0] == 3:
indexEnd += 1 # ищет конечный элемент текста подряд
else:
RESULT_APPEND((i, indexEnd - 1))
break
# удаляет лишнее (элементы типа & и >)
# собирает в одно целое
for i, j in result[::-1]:
ll = (itemsP[k][1] for k in RANGE(i, j + 1))
itemsP[i] = (3, JOIN(ll))
for k in RANGE(i + 1, j + 1)[::-1]:
del itemsP[k]
return itemsP
def delEnt(current, dumped, changed):
"""удаляет записи, отсутствующие в текущем конфиге"""
for group in LIST(dumped):
if group not in current:
del dumped[group]
changed = True
LOG_APPEND("delEnt: del group: " + group)
continue
for feed in LIST(dumped[group]):
if feed not in current[group]:
del dumped[group][feed]
changed = True
LOG_APPEND("delEnt: del feed: " + feed)
return changed
def sendThrough(mailfrom, mailto, mailsubj, mailtext, server, port, login,
password, tls=True, filename="news.html"):
"""Собственно делает отправку"""
# может пригодиться
#TEXT_CODING = "utf_8"
#SUBJ_CODING = "utf_8"
#try:
# mailtext.encode("cp1251")
# TEXT_CODING = "cp1251"
#except:
# pass
#try:
# mailsubj.encode("cp1251")
# SUBJ_CODING = "cp1251"
#except:
# pass
# собирает сообщение
msg = MIMEMultipart()
msg["Subject"] = MIMEHeader(mailsubj, UTF8, 76)
msg["From"] = "\"{0}\" <{1}>".format(mailfrom.split("@")[0], mailfrom)
msg["To"] = "\"{0}\" <{1}>".format(mailto.split("@")[0], mailto)
msg.attach(MIMEText("новости:", "plain", UTF8))
a = MIMEText(mailtext, "html", UTF8)
a.add_header("Content-Disposition", "attachment", filename=filename)
msg.attach(a)
if DEBUG:
print(msg)
return
# отправляет
smtpserver = smtplib.SMTP(server, port, timeout=7)
smtpserver.ehlo("localhost.localdomain")
if tls:
smtpserver.starttls()
smtpserver.ehlo("localhost.localdomain")
smtpserver.login(login, password)
smtpserver.sendmail(mailfrom, mailto, msg.as_string())
smtpserver.quit()
def main(config_name, config):
NOW = DATE_TIME_NOW()
NOW_HOUR = NOW.hour
# loc = locale.getlocale()
try:
# необходимо наличие локали в системе
locale.setlocale(locale.LC_ALL, ("ru_RU","UTF8"))
HEADER = "Новости {0}".format(NOW.strftime("%-d %b %Y"))
# locale.setlocale(locale.LC_ALL, loc)
except:
HEADER = "Новости {0}".format(NOW.strftime("%d.%m.%Y"))
DUMP_F = config_name + "_dump.json"
NEW_F = config_name + "_new.json"
NOW_SHORT = NOW.strftime(DATE_TIME_FORMAT)
LOG_APPEND(NOW_SHORT + " ==================== " + config_name + ": Starting RSS to E-mail!")
# загружает файлы
try:
dump_file = loadJson(DUMP_F)
except:
dump_file = {}
LOG_APPEND(NOW_SHORT + " main: can't load DUMP_F")
try:
new_file = loadJson(NEW_F)
except:
new_file = {}
LOG_APPEND(NOW_SHORT + " main: can't load NEW_F")
MAX_ITEMS = config["RECORDS_MAX"] # длиннее длины самой длинной ленты
# формирует текущий дамп из Rss лент
dump = {} # новые, не отправленные
for group in config["FEEDS"]:
if group not in dump:
dump[group] = {}
for feed in config["FEEDS"][group]:
url = config["FEEDS"][group][feed]
if ISINSTANCE(url, str):
dump[group][feed] = getRss(
parseHtml(getWebPage(url), url),
url,
config["TITLE_LENGTH_MAX"],
config["DESC_LENGTH_MAX"]
)
else:
dump[group][feed] = getGen(
parseHtml(getWebPage(url["url"], False), url["url"]),
url["root"],
url["url"],
config["TITLE_LENGTH_MAX"]
)
# формирует дамп новых записей, отсутствующих в файле старых записей
# для записи в файл новых или отправки
# ! дополняет (формирует) структуру new_file и dump_file
# new_file = (dump - dump_file) + new_file
new_file_changed = False
for group in dump:
# новая группа в конфиге
if group not in new_file:
new_file[group] = {}
if group not in dump_file:
dump_file[group] = {}
for feed in dump[group]:
# новая лента в конфиге
if feed not in new_file[group]:
new_file[group][feed] = {}
if feed not in dump_file[group]:
dump_file[group][feed] = {}
for record in dump[group][feed]:
if record not in dump_file[group][feed]:
new_file[group][feed][record] =\
dump[group][feed][record]
new_file_changed = True
# удаляет из new_file группы и ленты, отсутствующие в config-rss
new_file_changed = delEnt(dump, new_file, new_file_changed)
# сохраняет дамп новых перед отправкой
if new_file_changed:
try:
dumpJson(new_file, NEW_F)
except:
LOG_APPEND(NOW_SHORT + " main: can't dump new file")
return 1
# делает отправку
# первый запуск
if not config_name in sendState:
sendState[config_name] = False
# вышли из часа для отправки
if config["HOUR"] != NOW_HOUR:
sendState[config_name] = False
if (config["HOUR"] == NOW_HOUR and not sendState[config_name]) or DEBUG:
# if True:
# собирает HTML, отправляет
result = formContent(new_file, HEADER, config["TITLE_LENGTH_MAX"],
config["DESC_LENGTH_MAX"], config["FEEDS"])
try:
sendThrough(
config["FROM"], config["TO"], HEADER, result, config["SMTP"],
config["SMTP_PORT"], config["LOGIN"], config["PASSWORD"],
tls=config["TLS"], filename="news-" + NOW.strftime("%Y%m%d") + ".html"
)
LOG_APPEND(NOW_SHORT + " *** E-mail sended!")
sendState[config_name] = True # отправили
except Exception as ex:
LOG_APPEND(NOW_SHORT + " main: can't send e-mail! Error: " + str(ex))
if DEBUG:
raise
return 1
# пишет результат в файл
if ARCHIVE:
arcName = "archive_" + config_name + "/news-" + NOW.strftime("%Y%m%d")
try:
with open(arcName + ".html", "w", encoding=IO_CODING) as outFile:
outFile.write(result)
dumpJson(new_file, arcName + ".json", True)
except Exception as ex:
LOG_APPEND(NOW_SHORT + " main: can't write archive html: " + str(ex))
# формирует новый дамп старых записей для записи в файл после отправки
# dump_file = dump_file + new_file (отправленный)
for group in new_file:
for item in new_file[group]:
for record in new_file[group][item]:
dump_file[group][item][record] =\
new_file[group][item][record]
# записывает дамп старых
try:
dumpJson(dump_file, DUMP_F)
except:
LOG_APPEND(NOW_SHORT + " main: can't dump old file")
return 1
# очищает дамп новых
try:
with open(NEW_F, "w") as outFile:
outFile.write("{}")
except:
LOG_APPEND(NOW_SHORT + " main: can't write clean new")
# делает очистку
# первый запуск
if not config_name in cleanState:
cleanState[config_name] = False
# вышли из часа для отправки
if config["HOUR"] + 1 != NOW_HOUR:
cleanState[config_name] = False
if config["HOUR"] + 1 == NOW_HOUR and not cleanState[config_name]:
dump_file_changed = False
# удаляет из dump_file группы и ленты, отсутствующие в config-rss
dump_file_changed = delEnt(dump, dump_file, dump_file_changed)
# удаляет из dump_file старые записи (усекает)
for group in dump_file:
for feed in dump_file[group]:
records = dump_file[group][feed]
i = LEN(records)
delta = i - MAX_ITEMS
if delta > 0:
# from operator import itemgetter
# rows = list(records)
# rows_by_date = sorted(rows, key=itemgetter('date'))
lst = [(rc, records[rc]["date"]) for rc in records]
lst.sort(key=lambda i:i[1])
lst = lst[:delta]
for uid, date in lst:
del records[uid]
LOG_APPEND(NOW_SHORT + " main: del records: " + feed + " " + str(delta))
dump_file_changed = True
# записывает дамп старых
if dump_file_changed:
try:
dumpJson(dump_file, DUMP_F)
except:
LOG_APPEND(NOW_SHORT + " main: can't dump old file")
LOG_APPEND(NOW_SHORT + " *** Cleaned!")
cleanState[config_name] = True # очистили
if DEBUG:
result = formContent(new_file, "HEADER", config["TITLE_LENGTH_MAX"],
config["DESC_LENGTH_MAX"], config["FEEDS"])
try:
with open(config_name + "_test_index.html", "w", encoding=IO_CODING) as outFile:
outFile.write(result)
except:
LOG_APPEND(NOW_SHORT + " main: can't write debug html")
def dumpJson(data, name, human=DEBUG):
dumped = json.dumps(
data,
ensure_ascii=False,
separators=(",", ":") if not human else None,
indent=None if not human else 4,
)
with open(name, "w", encoding=IO_CODING) as outFile:
outFile.write(dumped)
def loadJson(name):
with open(name, encoding=IO_CODING) as inFile:
return(json.loads(inFile.read(), encoding=IO_CODING))
if __name__ == "__main__":
# кеширующие переменные - для _нагруженных_ циклов
TYPO = typo.typographize
URL_JOIN = urllib.parse.urljoin
sendState = {} # статус: отправлено ли уже письмо
cleanState = {} # статус: очищено ли уже
while 1:
# перечитывает конфиг при каждом запуске
# без конфига - падает
CONFIG = loadJson("config.json")
for config in CONFIG:
log = []
LOG_APPEND = log.append
try: #TODO костыль чтобы программа не упала целиком
main(config, CONFIG[config])
except Exception as ex:
LOG_APPEND("EE: _loop_: error in main: " + str(ex))
if DEBUG:
raise
# пишет лог
try:
with open("rss.log", "a", encoding=IO_CODING) as outFile:
outFile.write(JOIN_N(log) + "\n")
except Exception as ex:
print("EE: can't write log: " + str(ex))
if DEBUG:
print("end of job")
time.sleep(600)
| gpl-3.0 | -8,024,633,454,508,756,000 | 34.374408 | 115 | 0.531987 | false |
AdamBSteele/yournewhomepage | fbone/modules/frontend/forms.py | 1 | 4310 | # -*- coding: utf-8 -*-
from flask import Markup, current_app
from flask.ext.wtf import Form
from flask.ext.wtf.html5 import URLField, EmailField, TelField
from wtforms import (ValidationError, BooleanField, TextField, HiddenField, PasswordField,
SubmitField, TextAreaField, IntegerField, RadioField,FileField,
DecimalField, SelectField, DateField, Field, widgets)
from wtforms.validators import (Required, Length, EqualTo, Email, NumberRange, AnyOf, Optional, URL)
from flask import current_app
from flask.ext.babel import lazy_gettext as _
from fbone.modules.user import User, UserDetail
from fbone.utils import (PASSWORD_LEN_MIN, PASSWORD_LEN_MAX,
USERNAME_LEN_MIN, USERNAME_LEN_MAX)
from fbone.extensions import db
class LoginForm(Form):
next = HiddenField()
login = TextField(_('Username or email'), [Required()])
password = PasswordField(_('Password'), [Required(), Length(PASSWORD_LEN_MIN, PASSWORD_LEN_MAX)])
remember = BooleanField(_('Remember me'))
submit = SubmitField(_('Sign in'))
class SignupForm(Form):
next = HiddenField()
email = EmailField(_('Email'), [Required(), Email()],
description=_("What's your email address?"))
password = PasswordField(_('Password'), [Required(), Length(PASSWORD_LEN_MIN, PASSWORD_LEN_MAX)],
description=_('%(minChar)s characters or more! Be tricky.', minChar = PASSWORD_LEN_MIN) )
name = TextField(_('Choose your username'), [Required(), Length(USERNAME_LEN_MIN, USERNAME_LEN_MAX)],
description=_("Don't worry. you can change it later."))
agree = BooleanField(_('Agree to the ') +
Markup('<a target="blank" href="/terms">'+_('Terms of Service')+'</a>'), [Required()])
submit = SubmitField('Sign up')
def validate_name(self, field):
if User.query.filter_by(name=field.data).first() is not None:
raise ValidationError(_('This username is taken'))
def validate_email(self, field):
if User.query.filter_by(email=field.data).first() is not None:
raise ValidationError(_('This email is taken'))
def signup(self):
user = User()
user.user_detail = UserDetail()
self.populate_obj(user)
db.session.add(user)
db.session.commit()
return user
class RecoverPasswordForm(Form):
email = EmailField(_('Your email'), [Email()])
submit = SubmitField(_('Send instructions'))
class ChangePasswordForm(Form):
activation_key = HiddenField()
password = PasswordField(_('Password'), [Required()])
password_again = PasswordField(_('Password again'), [EqualTo('password', message="Passwords don't match")])
submit = SubmitField(_('Save'))
class ReauthForm(Form):
next = HiddenField()
password = PasswordField(_('Password'), [Required(), Length(PASSWORD_LEN_MIN, PASSWORD_LEN_MAX)])
submit = SubmitField(_('Reauthenticate'))
class OpenIDForm(Form):
openid = TextField(_('Your OpenID'), [Required()])
submit = SubmitField(_('Log in with OpenID'))
def login(self,oid):
openid = self.openid.data
current_app.logger.debug('login with openid(%s)...' % openid)
return oid.try_login(openid, ask_for=['email', 'fullname', 'nickname'])
class CreateProfileForm(Form):
openid = HiddenField()
name = TextField(_('Choose your username'), [Required(), Length(USERNAME_LEN_MIN, USERNAME_LEN_MAX)],
description=_("Don't worry. you can change it later."))
email = EmailField(_('Email'), [Required(), Email()], description=_("What's your email address?"))
password = PasswordField(_('Password'), [Required(), Length(PASSWORD_LEN_MIN, PASSWORD_LEN_MAX)],
description=_('%(minChar)s characters or more! Be tricky.',minChar =PASSWORD_LEN_MIN))
submit = SubmitField(_('Create Profile'))
def validate_name(self, field):
if User.query.filter_by(name=field.data).first() is not None:
raise ValidationError(_('This username is taken.'))
def validate_email(self, field):
if User.query.filter_by(email=field.data).first() is not None:
raise ValidationError(_('This email is taken.'))
def create_profile(self):
user = User()
self.populate_obj(user)
db.session.add(user)
db.session.commit()
| bsd-3-clause | -8,060,816,719,266,212,000 | 38.181818 | 111 | 0.660325 | false |
AutorestCI/azure-sdk-for-python | azure-batch/azure/batch/models/pool_remove_nodes_options.py | 1 | 3080 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class PoolRemoveNodesOptions(Model):
"""Additional parameters for remove_nodes operation.
:param timeout: The maximum time that the server can spend processing the
request, in seconds. The default is 30 seconds. Default value: 30 .
:type timeout: int
:param client_request_id: The caller-generated request identity, in the
form of a GUID with no decoration such as curly braces, e.g.
9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
:type client_request_id: str
:param return_client_request_id: Whether the server should return the
client-request-id in the response. Default value: False .
:type return_client_request_id: bool
:param ocp_date: The time the request was issued. Client libraries
typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly.
:type ocp_date: datetime
:param if_match: An ETag value associated with the version of the resource
known to the client. The operation will be performed only if the
resource's current ETag on the service exactly matches the value specified
by the client.
:type if_match: str
:param if_none_match: An ETag value associated with the version of the
resource known to the client. The operation will be performed only if the
resource's current ETag on the service does not match the value specified
by the client.
:type if_none_match: str
:param if_modified_since: A timestamp indicating the last modified time of
the resource known to the client. The operation will be performed only if
the resource on the service has been modified since the specified time.
:type if_modified_since: datetime
:param if_unmodified_since: A timestamp indicating the last modified time
of the resource known to the client. The operation will be performed only
if the resource on the service has not been modified since the specified
time.
:type if_unmodified_since: datetime
"""
def __init__(self, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None):
self.timeout = timeout
self.client_request_id = client_request_id
self.return_client_request_id = return_client_request_id
self.ocp_date = ocp_date
self.if_match = if_match
self.if_none_match = if_none_match
self.if_modified_since = if_modified_since
self.if_unmodified_since = if_unmodified_since
| mit | -2,259,469,980,075,833,300 | 49.491803 | 191 | 0.692532 | false |
endlessm/chromium-browser | third_party/chromite/cbuildbot/afdo_unittest.py | 1 | 29295 | # -*- coding: utf-8 -*-
# Copyright 2017 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Sometimes we poke 'private' AFDO methods, since that's the most direct way to
# test what we're looking to test. That's OK.
#
# pylint: disable=protected-access
"""Unit tests for afdo module."""
from __future__ import print_function
import collections
import datetime
import json
import os
import sys
import time
import mock
from chromite.cbuildbot import afdo
from chromite.lib import (cros_build_lib, cros_test_lib, gs, osutils,
path_util, portage_util)
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
MockGsFile = collections.namedtuple('MockGsFile', ['url', 'creation_time'])
def _benchmark_afdo_profile_name(major=0,
minor=0,
build=0,
patch=0,
rev=1,
merged_suffix=False,
compression_suffix=True):
suffix = '-merged' if merged_suffix else ''
result = 'chromeos-chrome-amd64-%d.%d.%d.%d_rc-r%d%s' % (major, minor, build,
patch, rev, suffix)
result += afdo.AFDO_SUFFIX
if compression_suffix:
result += afdo.COMPRESSION_SUFFIX
return result
class AfdoTest(cros_test_lib.MockTempDirTestCase):
"""Unit test of afdo module."""
def testEnumerateMostRecentProfilesRaisesOnNoListing(self):
mock_gs = mock.Mock()
mock_gs.List = lambda *args, **kwargs: []
with self.assertRaises(ValueError):
afdo._EnumerateMostRecentProfiles(mock_gs, [1, 2, 3], 'some_url', None)
def testEnumerateMostRecentProfilesFindsTheNewestProfiles(self):
def mock_list(*_args, **_kwargs):
return [
MockGsFile(url='gs://foo/1_1', creation_time=None),
MockGsFile(url='gs://foo/2_2', creation_time=None),
MockGsFile(url='gs://foo/2_1', creation_time=None),
MockGsFile(url='gs://foo/1_2', creation_time=None),
MockGsFile(url='gs://foo/3_1', creation_time=None),
]
mock_gs = mock.Mock()
mock_gs.List = mock_list
Version = collections.namedtuple('Version', ['major', 'minor'])
def parse_name(name):
major, minor = name.split('_')
# Note that the version key uses the *negative* minor number. So _1
# should be considered the newest. This is to be sure that we're ordering
# using these Version keys, rather than the strings.
return Version(int(major), -int(minor))
milestones = (1, 2, 4)
most_recent = afdo._EnumerateMostRecentProfiles(mock_gs, milestones, '',
parse_name)
self.assertDictEqual(most_recent, {
1: 'gs://foo/1_1',
2: 'gs://foo/2_1',
})
def testParseProfileMatchesUncompressedProfiles(self):
# Local profiles will be uncompressed, and the profile parser needs to
# handle that.
profile_name = 'chromeos-chrome-amd64-76.0.3795.2_rc-r1.afdo'
profile_name_compressed = profile_name + '.bz2'
parsed = afdo._ParseBenchmarkProfileName(profile_name)
parsed_compressed = afdo._ParseBenchmarkProfileName(profile_name_compressed)
self.assertEqual(parsed, parsed_compressed)
def testEnumerateBenchmarkProfilesMatchesRealWorldNames(self):
enumerate_profiles = self.PatchObject(afdo, '_EnumerateMostRecentProfiles')
afdo._EnumerateMostRecentBenchmarkProfiles(object(), [1])
enumerate_profiles.assert_called_once()
parse_profile = enumerate_profiles.call_args_list[0][0][-1]
parsed = parse_profile('chromeos-chrome-amd64-57.0.2958.0_rc-r1.afdo.bz2')
self.assertEqual(parsed.major, 57)
parsed = parse_profile('chromeos-chrome-amd64-58.0.2959.0_rc-r1.afdo.bz2')
self.assertEqual(parsed.major, 58)
# ...Note that not all profiles have the _rc.
no_rc = parse_profile('chromeos-chrome-amd64-58.0.2959.0-r1.afdo.bz2')
self.assertEqual(no_rc, parsed)
# ...And we don't like merged profiles.
merged_profile = parse_profile(
'chromeos-chrome-amd64-58.0.2959.0-r1-merged.afdo.bz2')
self.assertIsNone(merged_profile)
profile_order = [
'chromeos-chrome-amd64-10.9.9.9_rc-r9.afdo.bz2',
'chromeos-chrome-amd64-9.10.9.9_rc-r9.afdo.bz2',
'chromeos-chrome-amd64-9.9.10.9_rc-r9.afdo.bz2',
'chromeos-chrome-amd64-9.9.9.10_rc-r9.afdo.bz2',
'chromeos-chrome-amd64-9.9.9.9_rc-r10.afdo.bz2',
'chromeos-chrome-amd64-9.9.9.9_rc-r9.afdo.bz2',
]
for higher, lower in zip(profile_order, profile_order[1:]):
self.assertGreater(parse_profile(higher), parse_profile(lower))
def testEnumerateCWPProfilesMatchesRealWorldNames(self):
enumerate_profiles = self.PatchObject(afdo, '_EnumerateMostRecentProfiles')
afdo._EnumerateMostRecentCWPProfiles(object(), [1])
enumerate_profiles.assert_called_once()
parse_profile = enumerate_profiles.call_args_list[0][0][-1]
parsed = parse_profile('R75-3759.4-1555926322.afdo.xz')
self.assertEqual(parsed.major, 75)
parsed = parse_profile('R76-3759.4-1555926322.afdo.xz')
self.assertEqual(parsed.major, 76)
profile_order = [
'R10-9.9-9.afdo.xz',
'R9-10.9-9.afdo.xz',
'R9-9.10-9.afdo.xz',
'R9-9.9-10.afdo.xz',
'R9-9.9-9.afdo.xz',
]
for higher, lower in zip(profile_order, profile_order[1:]):
self.assertGreater(parse_profile(higher), parse_profile(lower))
def testGenerateMergePlanMatchesProfilesAppropriately(self):
milestones = (1, 2, 3, 4)
gs_ctx = object()
def mock_enumerate(gs_context, milestones2, glob_url, _parse_profile_name):
self.assertIs(milestones, milestones2)
self.assertIs(gs_context, gs_ctx)
if afdo.GSURL_BASE_CWP in glob_url:
return {
1: 'gs://cwp/1',
2: 'gs://cwp/2',
4: 'gs://cwp/4',
}
assert afdo.GSURL_BASE_BENCH in glob_url
return {
1: 'gs://bench/1',
2: 'gs://bench/2',
3: 'gs://bench/3',
}
self.PatchObject(afdo, '_EnumerateMostRecentProfiles', mock_enumerate)
skipped, to_merge = afdo.GenerateReleaseProfileMergePlan(gs_ctx, milestones)
self.assertEqual(skipped, [3, 4])
self.assertDictEqual(to_merge, {
1: ('gs://cwp/1', 'gs://bench/1'),
2: ('gs://cwp/2', 'gs://bench/2'),
})
def testExecuteMergePlanWorks(self):
mock_gs = mock.Mock()
gs_copy = mock_gs.Copy
compress_file = self.PatchObject(cros_build_lib, 'CompressFile')
uncompress_file = self.PatchObject(cros_build_lib, 'UncompressFile')
merge_afdo_profiles = self.PatchObject(afdo, '_MergeAFDOProfiles')
# The only way to know for sure that we created a sufficient set of
# directories is a tryjob. Just make sure there are no side-effects.
self.PatchObject(osutils, 'SafeMakedirs')
merge_plan = {
1: ('gs://cwp/1.afdo.xz', 'gs://bench/1.afdo.bz2'),
}
build_root = '/build/root'
chroot = os.path.join(build_root, 'chroot')
merged_files = afdo.ExecuteReleaseProfileMergePlan(mock_gs, build_root,
merge_plan)
self.assertSetEqual(set(merged_files.keys()), {1})
merged_output = merged_files[1]
def assert_call_args(the_mock, call_args):
self.assertEqual(the_mock.call_count, len(call_args))
the_mock.assert_has_calls(call_args)
assert_call_args(gs_copy, [
mock.call('gs://bench/1.afdo.bz2',
chroot + '/tmp/afdo_data_merge/benchmark.afdo.bz2'),
mock.call('gs://cwp/1.afdo.xz',
chroot + '/tmp/afdo_data_merge/cwp.afdo.xz'),
])
assert_call_args(uncompress_file, [
mock.call(chroot + '/tmp/afdo_data_merge/benchmark.afdo.bz2',
chroot + '/tmp/afdo_data_merge/benchmark.afdo'),
mock.call(chroot + '/tmp/afdo_data_merge/cwp.afdo.xz',
chroot + '/tmp/afdo_data_merge/cwp.afdo'),
])
uncompressed_merged_output = os.path.splitext(merged_output)[0]
uncompressed_chroot_merged_output = uncompressed_merged_output[len(chroot):]
assert_call_args(merge_afdo_profiles, [
mock.call([('/tmp/afdo_data_merge/cwp.afdo', 75),
('/tmp/afdo_data_merge/benchmark.afdo', 25)],
uncompressed_chroot_merged_output,
use_compbinary=True),
])
assert_call_args(compress_file, [
mock.call(uncompressed_merged_output, merged_output),
])
def testUploadReleaseProfilesUploadsAsExpected(self):
mock_gs = mock.Mock()
gs_copy = mock_gs.Copy
write_file = self.PatchObject(osutils, 'WriteFile')
global_tmpdir = '/global/tmp'
self.PatchObject(osutils, 'GetGlobalTempDir', return_value=global_tmpdir)
merge_plan = {
1: ('gs://cwp/1.afdo.xz', 'gs://bench/1.afdo.bz2'),
2: ('gs://cwp/2.afdo.xz', 'gs://bench/2.afdo.bz2'),
}
merge_results = {
1: '/tmp/foo.afdo.bz2',
2: '/tmp/bar.afdo.bz2',
}
run_id = '1234'
afdo.UploadReleaseProfiles(mock_gs, run_id, merge_plan, merge_results)
write_file.assert_called_once()
meta_file_local_location, meta_file_data = write_file.call_args_list[0][0]
self.assertEqual(meta_file_data, json.dumps(merge_plan))
self.assertTrue(meta_file_local_location.startswith(global_tmpdir))
def expected_upload_location(profile_version):
return os.path.join(afdo.GSURL_BASE_RELEASE, run_id,
'profiles/m%d.afdo.bz2' % profile_version)
expected_copy_calls = [
mock.call(
merge_results[1],
expected_upload_location(1),
acl='public-read',
version=0),
mock.call(
merge_results[2],
expected_upload_location(2),
acl='public-read',
version=0),
mock.call(
meta_file_local_location,
os.path.join(afdo.GSURL_BASE_RELEASE, run_id, 'meta.json'),
acl='public-read',
version=0),
]
gs_copy.assert_has_calls(expected_copy_calls)
self.assertEqual(gs_copy.call_count, len(expected_copy_calls))
def runCreateAndUploadMergedAFDOProfileOnce(self, upload_ok=True, **kwargs):
if 'unmerged_name' not in kwargs:
# Match everything.
kwargs['unmerged_name'] = _benchmark_afdo_profile_name(major=9999)
Mocks = collections.namedtuple('Mocks', [
'gs_context',
'run_command',
'uncompress_file',
'compress_file',
'upload',
'remove_indirect_call_targets',
])
def MockList(*_args, **_kwargs):
files = [
_benchmark_afdo_profile_name(major=10, build=9),
_benchmark_afdo_profile_name(major=10, build=10),
_benchmark_afdo_profile_name(major=10, build=10, merged_suffix=True),
_benchmark_afdo_profile_name(major=10, build=11),
_benchmark_afdo_profile_name(major=10, build=12),
_benchmark_afdo_profile_name(major=10, build=13),
_benchmark_afdo_profile_name(major=10, build=13, merged_suffix=True),
_benchmark_afdo_profile_name(major=10, build=13, patch=1),
_benchmark_afdo_profile_name(major=10, build=13, patch=2),
_benchmark_afdo_profile_name(
major=10, build=13, patch=2, merged_suffix=True),
_benchmark_afdo_profile_name(major=11, build=14),
_benchmark_afdo_profile_name(major=11, build=14, merged_suffix=True),
_benchmark_afdo_profile_name(major=11, build=15),
]
results = []
for i, name in enumerate(files):
url = os.path.join(afdo.GSURL_BASE_BENCH, name)
now = datetime.datetime(year=1990, month=1, day=1 + i)
results.append(MockGsFile(url=url, creation_time=now))
return results
mock_gs = mock.Mock()
mock_gs.List = MockList
run_command = self.PatchObject(cros_build_lib, 'run')
uncompress_file = self.PatchObject(cros_build_lib, 'UncompressFile')
compress_file = self.PatchObject(cros_build_lib, 'CompressFile')
upload = self.PatchObject(afdo, 'GSUploadIfNotPresent')
remove_indirect_call_targets = self.PatchObject(
afdo, '_RemoveIndirectCallTargetsFromProfile')
upload.return_value = upload_ok
merged_name, uploaded = afdo.CreateAndUploadMergedAFDOProfile(
mock_gs, '/buildroot', **kwargs)
return merged_name, uploaded, Mocks(
gs_context=mock_gs,
run_command=run_command,
uncompress_file=uncompress_file,
compress_file=compress_file,
upload=upload,
remove_indirect_call_targets=remove_indirect_call_targets)
def testCreateAndUploadMergedAFDOProfileMergesBranchProfiles(self):
unmerged_name = _benchmark_afdo_profile_name(major=10, build=13, patch=99)
_, uploaded, mocks = \
self.runCreateAndUploadMergedAFDOProfileOnce(
recent_to_merge=5,
unmerged_name=unmerged_name)
self.assertTrue(uploaded)
def _afdo_name(major, build, patch=0, merged_suffix=False):
return _benchmark_afdo_profile_name(
major=major,
build=build,
patch=patch,
merged_suffix=merged_suffix,
compression_suffix=False)
expected_unordered_args = [
'-output=/tmp/raw-' +
_afdo_name(major=10, build=13, patch=2, merged_suffix=True),
'-weighted-input=1,/tmp/' + _afdo_name(major=10, build=11),
'-weighted-input=1,/tmp/' + _afdo_name(major=10, build=12),
'-weighted-input=1,/tmp/' + _afdo_name(major=10, build=13),
'-weighted-input=1,/tmp/' + _afdo_name(major=10, build=13, patch=1),
'-weighted-input=1,/tmp/' + _afdo_name(major=10, build=13, patch=2),
]
# Note that these should all be in-chroot names.
expected_ordered_args = ['llvm-profdata', 'merge', '-sample']
args = mocks.run_command.call_args[0][0]
ordered_args = args[:len(expected_ordered_args)]
self.assertEqual(ordered_args, expected_ordered_args)
unordered_args = args[len(expected_ordered_args):]
self.assertCountEqual(unordered_args, expected_unordered_args)
self.assertEqual(mocks.gs_context.Copy.call_count, 5)
def testCreateAndUploadMergedAFDOProfileRemovesIndirectCallTargets(self):
unmerged_name = _benchmark_afdo_profile_name(major=10, build=13, patch=99)
merged_name, uploaded, mocks = \
self.runCreateAndUploadMergedAFDOProfileOnce(
recent_to_merge=2,
unmerged_name=unmerged_name)
self.assertTrue(uploaded)
def _afdo_name(major, build, patch=0, merged_suffix=False):
return _benchmark_afdo_profile_name(
major=major,
build=build,
patch=patch,
merged_suffix=merged_suffix,
compression_suffix=False)
merge_output_name = 'raw-' + _afdo_name(
major=10, build=13, patch=2, merged_suffix=True)
self.assertNotEqual(merged_name, merge_output_name)
expected_unordered_args = [
'-output=/tmp/' + merge_output_name,
'-weighted-input=1,/tmp/' + _afdo_name(major=10, build=13, patch=1),
'-weighted-input=1,/tmp/' + _afdo_name(major=10, build=13, patch=2),
]
# Note that these should all be in-chroot names.
expected_ordered_args = ['llvm-profdata', 'merge', '-sample']
args = mocks.run_command.call_args[0][0]
ordered_args = args[:len(expected_ordered_args)]
self.assertEqual(ordered_args, expected_ordered_args)
unordered_args = args[len(expected_ordered_args):]
self.assertCountEqual(unordered_args, expected_unordered_args)
mocks.remove_indirect_call_targets.assert_called_once_with(
'/tmp/' + merge_output_name, '/tmp/' + merged_name)
def testCreateAndUploadMergedAFDOProfileWorksInTheHappyCase(self):
merged_name, uploaded, mocks = \
self.runCreateAndUploadMergedAFDOProfileOnce(recent_to_merge=5)
self.assertTrue(uploaded)
# Note that we always return the *basename*
self.assertEqual(
merged_name,
_benchmark_afdo_profile_name(
major=11, build=15, merged_suffix=True, compression_suffix=False))
self.assertTrue(uploaded)
mocks.run_command.assert_called_once()
# Note that these should all be in-chroot names.
expected_ordered_args = ['llvm-profdata', 'merge', '-sample']
def _afdo_name(major, build, patch=0, merged_suffix=False):
return _benchmark_afdo_profile_name(
major=major,
build=build,
patch=patch,
merged_suffix=merged_suffix,
compression_suffix=False)
input_afdo_names = [
_afdo_name(major=10, build=13),
_afdo_name(major=10, build=13, patch=1),
_afdo_name(major=10, build=13, patch=2),
_afdo_name(major=11, build=14),
_afdo_name(major=11, build=15),
]
output_afdo_name = _afdo_name(major=11, build=15, merged_suffix=True)
expected_unordered_args = ['-output=/tmp/raw-' + output_afdo_name]
expected_unordered_args += [
'-weighted-input=1,/tmp/' + n for n in input_afdo_names
]
args = mocks.run_command.call_args[0][0]
ordered_args = args[:len(expected_ordered_args)]
self.assertEqual(ordered_args, expected_ordered_args)
unordered_args = args[len(expected_ordered_args):]
self.assertCountEqual(unordered_args, expected_unordered_args)
self.assertEqual(mocks.gs_context.Copy.call_count, 5)
self.assertEqual(mocks.uncompress_file.call_count, 5)
def call_for(name):
basis = '/buildroot/chroot/tmp/' + name
return mock.call(basis + afdo.COMPRESSION_SUFFIX, basis)
mocks.uncompress_file.assert_has_calls(
any_order=True, calls=[call_for(n) for n in input_afdo_names])
compressed_output_afdo_name = output_afdo_name + afdo.COMPRESSION_SUFFIX
compressed_target = '/buildroot/chroot/tmp/' + compressed_output_afdo_name
mocks.compress_file.assert_called_once()
args = mocks.compress_file.call_args[0]
self.assertEqual(args, (
compressed_target[:-len(afdo.COMPRESSION_SUFFIX)],
compressed_target,
))
mocks.upload.assert_called_once()
args = mocks.upload.call_args[0]
self.assertEqual(args, (
mocks.gs_context,
compressed_target,
'%s/%s' % (afdo.GSURL_BASE_BENCH, compressed_output_afdo_name),
))
def testCreateAndUploadMergedAFDOProfileSucceedsIfUploadFails(self):
merged_name, uploaded, _ = \
self.runCreateAndUploadMergedAFDOProfileOnce(upload_ok=False)
self.assertIsNotNone(merged_name)
self.assertFalse(uploaded)
def testMergeIsOKIfWeFindFewerProfilesThanWeWant(self):
merged_name, uploaded, mocks = \
self.runCreateAndUploadMergedAFDOProfileOnce(recent_to_merge=1000,
max_age_days=1000)
self.assertTrue(uploaded)
self.assertIsNotNone(merged_name)
self.assertEqual(mocks.gs_context.Copy.call_count, 9)
def testNoProfileIsGeneratedIfNoFilesBeforeMergedNameExist(self):
merged_name, uploaded, _ = \
self.runCreateAndUploadMergedAFDOProfileOnce(
unmerged_name=_benchmark_afdo_profile_name())
self.assertIsNone(merged_name)
self.assertFalse(uploaded)
merged_name, uploaded, _ = \
self.runCreateAndUploadMergedAFDOProfileOnce(
unmerged_name=_benchmark_afdo_profile_name(major=10, build=8))
self.assertIsNone(merged_name)
self.assertFalse(uploaded)
merged_name, uploaded, _ = \
self.runCreateAndUploadMergedAFDOProfileOnce(
unmerged_name=_benchmark_afdo_profile_name(major=10, build=9))
self.assertIsNone(merged_name)
self.assertFalse(uploaded)
merged_name, uploaded, _ = \
self.runCreateAndUploadMergedAFDOProfileOnce(
unmerged_name=_benchmark_afdo_profile_name(major=10, build=10))
self.assertIsNotNone(merged_name)
self.assertTrue(uploaded)
def testNoFilesAfterUnmergedNameAreIncluded(self):
max_name = _benchmark_afdo_profile_name(major=10, build=11)
merged_name, uploaded, mocks = \
self.runCreateAndUploadMergedAFDOProfileOnce(unmerged_name=max_name)
self.assertEqual(
_benchmark_afdo_profile_name(
major=10, build=11, merged_suffix=True, compression_suffix=False),
merged_name)
self.assertTrue(uploaded)
def _afdo_name(major, build, merged_suffix=False):
return _benchmark_afdo_profile_name(
major=major,
build=build,
merged_suffix=merged_suffix,
compression_suffix=False)
# Note that these should all be in-chroot names.
expected_ordered_args = ['llvm-profdata', 'merge', '-sample']
expected_unordered_args = [
'-output=/tmp/raw-' +
_afdo_name(major=10, build=11, merged_suffix=True),
'-weighted-input=1,/tmp/' + _afdo_name(major=10, build=9),
'-weighted-input=1,/tmp/' + _afdo_name(major=10, build=10),
'-weighted-input=1,/tmp/' + _afdo_name(major=10, build=11),
]
args = mocks.run_command.call_args[0][0]
ordered_args = args[:len(expected_ordered_args)]
self.assertEqual(ordered_args, expected_ordered_args)
unordered_args = args[len(expected_ordered_args):]
self.assertCountEqual(unordered_args, expected_unordered_args)
self.assertEqual(mocks.gs_context.Copy.call_count, 3)
self.assertEqual(mocks.uncompress_file.call_count, 3)
def testMergeDoesntHappenIfNoProfilesAreMerged(self):
runs = [
self.runCreateAndUploadMergedAFDOProfileOnce(recent_to_merge=1),
self.runCreateAndUploadMergedAFDOProfileOnce(max_age_days=0),
]
for merged_name, uploaded, mocks in runs:
self.assertIsNone(merged_name)
self.assertFalse(uploaded)
mocks.gs_context.Copy.assert_not_called()
mocks.run_command.assert_not_called()
mocks.uncompress_file.assert_not_called()
mocks.compress_file.assert_not_called()
mocks.upload.assert_not_called()
def testRemoveIndirectCallTargetsActuallyAppearsToWork(self):
run_command = self.PatchObject(cros_build_lib, 'run')
path_exists = self.PatchObject(os.path, 'exists', return_value=False)
input_path = '/input/path'
input_path_txt = input_path + '.txt'
output_path = '/output/path'
output_path_txt = output_path + '.txt'
afdo._RemoveIndirectCallTargetsFromProfile(input_path, output_path)
self.assertEqual(run_command.call_count, 3)
merge_to_text, removal, merge_to_bin = run_command.call_args_list
path_exists.insert_called_with(os.path.join('/chroot', input_path_txt))
self.assertEqual(
merge_to_text,
mock.call(
[
'llvm-profdata',
'merge',
'-sample',
'-output=%s' % input_path_txt,
'-text',
input_path,
],
enter_chroot=True,
print_cmd=True,
))
# Probably no value in checking for the actual script name.
script_name = removal[0][0][0]
removal.assert_equal(
removal,
mock.call(
[
script_name,
'--input=%s' % input_path_txt,
'--output=%s' % output_path_txt,
],
enter_chroot=True,
print_cmd=True,
))
self.assertEqual(
merge_to_bin,
mock.call(
[
'llvm-profdata',
'merge',
'-sample',
'-output=' + output_path,
output_path_txt,
],
enter_chroot=True,
print_cmd=True,
))
def testFindLatestProfile(self):
versions = [[1, 0, 0, 0], [1, 2, 3, 4], [2, 2, 2, 2]]
self.assertEqual(afdo.FindLatestProfile([0, 0, 0, 0], versions), None)
self.assertEqual(
afdo.FindLatestProfile([1, 0, 0, 0], versions), [1, 0, 0, 0])
self.assertEqual(
afdo.FindLatestProfile([1, 2, 0, 0], versions), [1, 0, 0, 0])
self.assertEqual(
afdo.FindLatestProfile([9, 9, 9, 9], versions), [2, 2, 2, 2])
def testPatchKernelEbuild(self):
before = [
'The following line contains the version:',
'AFDO_PROFILE_VERSION="R63-9901.21-1506581597"', 'It should be changed.'
]
after = [
'The following line contains the version:',
'AFDO_PROFILE_VERSION="R12-3456.78-9876543210"', 'It should be changed.'
]
tf = os.path.join(self.tempdir, 'test.ebuild')
osutils.WriteFile(tf, '\n'.join(before))
afdo.PatchKernelEbuild(tf, [12, 3456, 78, 9876543210])
x = osutils.ReadFile(tf).splitlines()
self.assertEqual(after, x)
def testGetAvailableKernelProfiles(self):
def MockGsList(path):
unused = {
'content_length': None,
'creation_time': None,
'generation': None,
'metageneration': None
}
path = path.replace('*', '%s')
return [
gs.GSListResult(
url=(path % ('4.4', 'R63-9901.21-1506581597')), **unused),
gs.GSListResult(
url=(path % ('3.8', 'R61-9765.70-1506575230')), **unused),
]
self.PatchObject(gs.GSContext, 'List',
lambda _, path, **kwargs: MockGsList(path))
profiles = afdo.GetAvailableKernelProfiles()
self.assertIn([63, 9901, 21, 1506581597], profiles['4.4'])
self.assertIn([61, 9765, 70, 1506575230], profiles['3.8'])
def testFindKernelEbuilds(self):
ebuilds = [(os.path.basename(ebuild[0]), ebuild[1])
for ebuild in afdo.FindKernelEbuilds()]
self.assertIn(('chromeos-kernel-4_4-9999.ebuild', '4.4'), ebuilds)
self.assertIn(('chromeos-kernel-3_8-9999.ebuild', '3.8'), ebuilds)
def testProfileAge(self):
self.assertEqual(0, afdo.ProfileAge([0, 0, 0, int(time.time())]))
self.assertEqual(1, afdo.ProfileAge([0, 0, 0, int(time.time() - 86400)]))
def testGetCWPProfile(self):
profiles = [
'R62-3202.43-320243.afdo.xz', 'R63-3223.0-233200.afdo.xz',
'R63-3239.20-323920.afdo.xz', 'R63-3239.42-323942.afdo.xz',
'R63-3239.50-323950.afdo.xz', 'R63-3239.50-323999.afdo.xz',
'R64-3280.5-328005.afdo.xz', 'R64-3282.41-328241.afdo.xz',
'R65-3299.0-329900.afdo.xz'
]
def MockGsList(path):
unused = {
'content_length': None,
'creation_time': None,
'generation': None,
'metageneration': None
}
return [
gs.GSListResult(url=os.path.join(path, f), **unused) for f in profiles
]
self.PatchObject(gs.GSContext, 'List',
lambda _, path, **kwargs: MockGsList(path))
def _test(version, idx):
unused = {
'pv': None,
'package': None,
'version_no_rev': None,
'rev': None,
'category': None,
'cpv': None,
'cp': None,
'cpf': None
}
cpv = portage_util.CPV(version=version, **unused)
profile = afdo.GetCWPProfile(cpv, 'silvermont', 'unused', gs.GSContext())
# Expect the most recent profile on the same branch.
self.assertEqual(profile, profiles[idx][:-3])
_test('66.0.3300.0_rc-r1', 8)
_test('65.0.3283.0_rc-r1', 7)
_test('65.0.3283.1_rc-r1', 7)
_test('64.0.3282.42_rc-r1', 7)
_test('64.0.3282.40_rc-r1', 6)
_test('63.0.3239.30_rc-r1', 2)
_test('63.0.3239.42_rc-r0', 2)
_test('63.0.3239.10_rc-r1', 1)
def testCWPProfileToVersionTuple(self):
self.assertEqual(
afdo.CWPProfileToVersionTuple('gs://chromeos-prebuilt/afdo-job/cwp/'
'chrome/R66-3325.65-1519321598.afdo.xz'),
[66, 3325, 65, 1519321598])
self.assertEqual(
afdo.CWPProfileToVersionTuple('R66-3325.65-1519321598.afdo.xz'),
[66, 3325, 65, 1519321598])
def testPatchChromeEbuildAFDOFile(self):
before = [
'The following line contains the version:',
'AFDO_FILE["benchmark"]="chromeos-chrome-amd64-67.0.3379.0_rc-r1.afdo"',
'AFDO_FILE["silvermont"]="R67-3359.31-1522059092.afdo"',
'AFDO_FILE["airmont"]="airmont_before.afdo"',
'AFDO_FILE["broadwell"]="broadwell_before.afdo"',
'It should be changed.'
]
after = [
'The following line contains the version:',
'AFDO_FILE["benchmark"]="chromeos-chrome-amd64-67.0.3388.0_rc-r1.afdo"',
'AFDO_FILE["silvermont"]="R67-3360.42-153456789.afdo"',
'AFDO_FILE["airmont"]="airmont_after.afdo"',
'AFDO_FILE["broadwell"]="broadwell_after.afdo"', 'It should be changed.'
]
self.PatchObject(path_util, 'FromChrootPath', lambda x: x)
tf = os.path.join(self.tempdir, 'test.ebuild')
osutils.WriteFile(tf, '\n'.join(before))
afdo.PatchChromeEbuildAFDOFile(
tf, {
'benchmark': 'chromeos-chrome-amd64-67.0.3388.0_rc-r1.afdo',
'broadwell': 'broadwell_after.afdo',
'airmont': 'airmont_after.afdo',
'silvermont': 'R67-3360.42-153456789.afdo'
})
x = osutils.ReadFile(tf).splitlines()
self.assertEqual(after, x)
| bsd-3-clause | 5,869,353,235,322,691,000 | 36.082278 | 80 | 0.625772 | false |
nakagami/reportlab | src/reportlab/lib/normalDate.py | 1 | 20963 | #!/usr/bin/env python
# normalDate.py - version 1.0 - 20000717
#hacked by Robin Becker 10/Apr/2001
#major changes include
# using Types instead of type(0) etc
# BusinessDate class
# __radd__, __rsub__ methods
# formatMS stuff
# derived from an original version created
# by Jeff Bauer of Rubicon Research and used
# with his kind permission
import string, re, time, datetime
from reportlab.lib.utils import fp_str, isStrType
__version__=''' $Id: normalDate.py 3677 2010-02-16 17:00:00Z rgbecker $ '''
__doc__="Jeff Bauer's lightweight date class, extended by us. Predates Python's datetime module."
_bigBangScalar = -4345732 # based on (-9999, 1, 1) BC/BCE minimum
_bigCrunchScalar = 2958463 # based on (9999,12,31) AD/CE maximum
_daysInMonthNormal = [31,28,31,30,31,30,31,31,30,31,30,31]
_daysInMonthLeapYear = [31,29,31,30,31,30,31,31,30,31,30,31]
_dayOfWeekName = ['Monday', 'Tuesday', 'Wednesday', 'Thursday',
'Friday', 'Saturday', 'Sunday']
_monthName = ['January', 'February', 'March', 'April', 'May', 'June',
'July','August','September','October','November','December']
if hasattr(time,'struct_time'):
_DateSeqTypes = (list,tuple,time.struct_time)
else:
_DateSeqTypes = (list,tuple)
_fmtPat = re.compile('\\{(m{1,5}|yyyy|yy|d{1,4})\\}',re.MULTILINE|re.IGNORECASE)
_iso_re = re.compile(r'(\d\d\d\d|\d\d)-(\d\d)-(\d\d)')
def getStdMonthNames():
return [name.lower() for name in _monthName]
def getStdShortMonthNames():
return map(lambda x: x[:3],getStdMonthNames())
def getStdDayNames():
return map(string.lower,_dayOfWeekName)
def getStdShortDayNames():
return map(lambda x: x[:3],getStdDayNames())
def isLeapYear(year):
"""determine if specified year is leap year, returns Python boolean"""
if year < 1600:
if year % 4:
return 0
else:
return 1
elif year % 4 != 0:
return 0
elif year % 100 != 0:
return 1
elif year % 400 != 0:
return 0
else:
return 1
class NormalDateException(Exception):
"""Exception class for NormalDate"""
pass
class NormalDate:
"""
NormalDate is a specialized class to handle dates without
all the excess baggage (time zones, daylight savings, leap
seconds, etc.) of other date structures. The minimalist
strategy greatly simplifies its implementation and use.
Internally, NormalDate is stored as an integer with values
in a discontinuous range of -99990101 to 99991231. The
integer value is used principally for storage and to simplify
the user interface. Internal calculations are performed by
a scalar based on Jan 1, 1900.
Valid NormalDate ranges include (-9999,1,1) B.C.E. through
(9999,12,31) C.E./A.D.
1.0
No changes, except the version number. After 3 years of use by
various parties I think we can consider it stable.
0.8
Added Prof. Stephen Walton's suggestion for a range method
- module author resisted the temptation to use lambda <0.5 wink>
0.7
Added Dan Winkler's suggestions for __add__, __sub__ methods
0.6
Modifications suggested by Kevin Digweed to fix:
- dayOfWeek, dayOfWeekAbbrev, clone methods
- Permit NormalDate to be a better behaved superclass
0.5
Minor tweaking
0.4
- Added methods __cmp__, __hash__
- Added Epoch variable, scoped to the module
- Added setDay, setMonth, setYear methods
0.3
Minor touch-ups
0.2
- Fixed bug for certain B.C.E leap years
- Added Jim Fulton's suggestions for short alias class name =ND
and __getstate__, __setstate__ methods
Special thanks: Roedy Green
"""
def __init__(self, normalDate=None):
"""
Accept 1 of 4 values to initialize a NormalDate:
1. None - creates a NormalDate for the current day
2. integer in yyyymmdd format
3. string in yyyymmdd format
4. tuple in (yyyy, mm, dd) - localtime/gmtime can also be used
"""
if normalDate is None:
self.setNormalDate(time.localtime(time.time()))
else:
self.setNormalDate(normalDate)
def add(self, days):
"""add days to date; use negative integers to subtract"""
if not isinstance(days,int):
raise NormalDateException( \
'add method parameter must be integer type')
self.normalize(self.scalar() + days)
def __add__(self, days):
"""add integer to normalDate and return a new, calculated value"""
if not isinstance(days,int):
raise NormalDateException( \
'__add__ parameter must be integer type')
cloned = self.clone()
cloned.add(days)
return cloned
def __radd__(self,days):
'''for completeness'''
return self.__add__(days)
def clone(self):
"""return a cloned instance of this normalDate"""
return self.__class__(self.normalDate)
def __cmp__(self, target):
if target is None:
return 1
elif not hasattr(target, 'normalDate'):
return 1
else:
return cmp(self.normalDate, target.normalDate)
def day(self):
"""return the day as integer 1-31"""
return int(repr(self.normalDate)[-2:])
def dayOfWeek(self):
"""return integer representing day of week, Mon=0, Tue=1, etc."""
return dayOfWeek(*self.toTuple())
def dayOfWeekAbbrev(self):
"""return day of week abbreviation for current date: Mon, Tue, etc."""
return _dayOfWeekName[self.dayOfWeek()][:3]
def dayOfWeekName(self):
"""return day of week name for current date: Monday, Tuesday, etc."""
return _dayOfWeekName[self.dayOfWeek()]
def dayOfYear(self):
"""day of year"""
if self.isLeapYear():
daysByMonth = _daysInMonthLeapYear
else:
daysByMonth = _daysInMonthNormal
priorMonthDays = 0
for m in range(self.month() - 1):
priorMonthDays = priorMonthDays + daysByMonth[m]
return self.day() + priorMonthDays
def daysBetweenDates(self, normalDate):
"""
return value may be negative, since calculation is
self.scalar() - arg
"""
if isinstance(normalDate,NormalDate):
return self.scalar() - normalDate.scalar()
else:
return self.scalar() - NormalDate(normalDate).scalar()
def equals(self, target):
if isinstance(target,NormalDate):
if target is None:
return self.normalDate is None
else:
return self.normalDate == target.normalDate
else:
return 0
def endOfMonth(self):
"""returns (cloned) last day of month"""
return self.__class__(self.__repr__()[-8:-2]+str(self.lastDayOfMonth()))
def firstDayOfMonth(self):
"""returns (cloned) first day of month"""
return self.__class__(self.__repr__()[-8:-2]+"01")
def formatUS(self):
"""return date as string in common US format: MM/DD/YY"""
d = self.__repr__()
return "%s/%s/%s" % (d[-4:-2], d[-2:], d[-6:-4])
def formatUSCentury(self):
"""return date as string in 4-digit year US format: MM/DD/YYYY"""
d = self.__repr__()
return "%s/%s/%s" % (d[-4:-2], d[-2:], d[-8:-4])
def _fmtM(self):
return str(self.month())
def _fmtMM(self):
return '%02d' % self.month()
def _fmtMMM(self):
return self.monthAbbrev()
def _fmtMMMM(self):
return self.monthName()
def _fmtMMMMM(self):
return self.monthName()[0]
def _fmtD(self):
return str(self.day())
def _fmtDD(self):
return '%02d' % self.day()
def _fmtDDD(self):
return self.dayOfWeekAbbrev()
def _fmtDDDD(self):
return self.dayOfWeekName()
def _fmtYY(self):
return '%02d' % (self.year()%100)
def _fmtYYYY(self):
return str(self.year())
def formatMS(self,fmt):
'''format like MS date using the notation
{YY} --> 2 digit year
{YYYY} --> 4 digit year
{M} --> month as digit
{MM} --> 2 digit month
{MMM} --> abbreviated month name
{MMMM} --> monthname
{MMMMM} --> first character of monthname
{D} --> day of month as digit
{DD} --> 2 digit day of month
{DDD} --> abrreviated weekday name
{DDDD} --> weekday name
'''
r = fmt[:]
f = 0
while 1:
m = _fmtPat.search(r,f)
if m:
y = getattr(self,'_fmt'+string.upper(m.group()[1:-1]))()
i, j = m.span()
r = (r[0:i] + y) + r[j:]
f = i + len(y)
else:
return r
def __getstate__(self):
"""minimize persistent storage requirements"""
return self.normalDate
def __hash__(self):
return hash(self.normalDate)
def __int__(self):
return self.normalDate
def isLeapYear(self):
"""
determine if specified year is leap year, returning true (1) or
false (0)
"""
return isLeapYear(self.year())
def _isValidNormalDate(self, normalDate):
"""checks for date validity in [-]yyyymmdd format"""
if not isinstance(normalDate,int):
return 0
if len(repr(normalDate)) > 9:
return 0
if normalDate < 0:
dateStr = "%09d" % normalDate
else:
dateStr = "%08d" % normalDate
if len(dateStr) < 8:
return 0
elif len(dateStr) == 9:
if (dateStr[0] != '-' and dateStr[0] != '+'):
return 0
year = int(dateStr[:-4])
if year < -9999 or year > 9999 or year == 0:
return 0 # note: zero (0) is not a valid year
month = int(dateStr[-4:-2])
if month < 1 or month > 12:
return 0
if isLeapYear(year):
maxDay = _daysInMonthLeapYear[month - 1]
else:
maxDay = _daysInMonthNormal[month - 1]
day = int(dateStr[-2:])
if day < 1 or day > maxDay:
return 0
if year == 1582 and month == 10 and day > 4 and day < 15:
return 0 # special case of 10 days dropped: Oct 5-14, 1582
return 1
def lastDayOfMonth(self):
"""returns last day of the month as integer 28-31"""
if self.isLeapYear():
return _daysInMonthLeapYear[self.month() - 1]
else:
return _daysInMonthNormal[self.month() - 1]
def localeFormat(self):
"""override this method to use your preferred locale format"""
return self.formatUS()
def month(self):
"""returns month as integer 1-12"""
return int(repr(self.normalDate)[-4:-2])
def monthAbbrev(self):
"""returns month as a 3-character abbreviation, i.e. Jan, Feb, etc."""
return _monthName[self.month() - 1][:3]
def monthName(self):
"""returns month name, i.e. January, February, etc."""
return _monthName[self.month() - 1]
def normalize(self, scalar):
"""convert scalar to normalDate"""
if scalar < _bigBangScalar:
msg = "normalize(%d): scalar below minimum" % \
_bigBangScalar
raise NormalDateException(msg)
if scalar > _bigCrunchScalar:
msg = "normalize(%d): scalar exceeds maximum" % \
_bigCrunchScalar
raise NormalDateException(msg)
from math import floor
if scalar >= -115860:
year = 1600 + int(floor((scalar + 109573) / 365.2425))
elif scalar >= -693597:
year = 4 + int(floor((scalar + 692502) / 365.2425))
else:
year = -4 + int(floor((scalar + 695058) / 365.2425))
days = scalar - firstDayOfYear(year) + 1
if days <= 0:
year = year - 1
days = scalar - firstDayOfYear(year) + 1
daysInYear = 365
if isLeapYear(year):
daysInYear = daysInYear + 1
if days > daysInYear:
year = year + 1
days = scalar - firstDayOfYear(year) + 1
# add 10 days if between Oct 15, 1582 and Dec 31, 1582
if (scalar >= -115860 and scalar <= -115783):
days = days + 10
if isLeapYear(year):
daysByMonth = _daysInMonthLeapYear
else:
daysByMonth = _daysInMonthNormal
dc = 0; month = 12
for m in range(len(daysByMonth)):
dc = dc + daysByMonth[m]
if dc >= days:
month = m + 1
break
# add up the days in prior months
priorMonthDays = 0
for m in range(month - 1):
priorMonthDays = priorMonthDays + daysByMonth[m]
day = days - priorMonthDays
self.setNormalDate((year, month, day))
def range(self, days):
"""Return a range of normalDates as a list. Parameter
may be an int or normalDate."""
if not isinstance(days,int):
days = days - self # if not int, assume arg is normalDate type
r = []
for i in range(days):
r.append(self + i)
return r
def __repr__(self):
"""print format: [-]yyyymmdd"""
# Note: When disassembling a NormalDate string, be sure to
# count from the right, i.e. epochMonth = int(repr(Epoch)[-4:-2]),
# or the slice won't work for dates B.C.
if self.normalDate < 0:
return "%09d" % self.normalDate
else:
return "%08d" % self.normalDate
def scalar(self):
"""days since baseline date: Jan 1, 1900"""
(year, month, day) = self.toTuple()
days = firstDayOfYear(year) + day - 1
if self.isLeapYear():
for m in range(month - 1):
days = days + _daysInMonthLeapYear[m]
else:
for m in range(month - 1):
days = days + _daysInMonthNormal[m]
if year == 1582:
if month > 10 or (month == 10 and day > 4):
days = days - 10
return days
def setDay(self, day):
"""set the day of the month"""
maxDay = self.lastDayOfMonth()
if day < 1 or day > maxDay:
msg = "day is outside of range 1 to %d" % maxDay
raise NormalDateException(msg)
(y, m, d) = self.toTuple()
self.setNormalDate((y, m, day))
def setMonth(self, month):
"""set the month [1-12]"""
if month < 1 or month > 12:
raise NormalDateException('month is outside range 1 to 12')
(y, m, d) = self.toTuple()
self.setNormalDate((y, month, d))
def setNormalDate(self, normalDate):
"""
accepts date as scalar string/integer (yyyymmdd) or tuple
(year, month, day, ...)"""
if isinstance(normalDate,int):
self.normalDate = normalDate
elif isStrType(normalDate):
try:
self.normalDate = int(normalDate)
except:
m = _iso_re.match(normalDate)
if m:
self.setNormalDate(m.group(1)+m.group(2)+m.group(3))
else:
raise NormalDateException("unable to setNormalDate(%s)" % repr(normalDate))
elif isinstance(normalDate,_DateSeqTypes):
self.normalDate = int("%04d%02d%02d" % normalDate[:3])
elif isinstance(normalDate,NormalDate):
self.normalDate = normalDate.normalDate
elif isinstance(normalDate,(datetime.datetime,datetime.date)):
self.normalDate = (normalDate.year*100+normalDate.month)*100+normalDate.day
if not self._isValidNormalDate(self.normalDate):
raise NormalDateException("unable to setNormalDate(%s)" % repr(normalDate))
def setYear(self, year):
if year == 0:
raise NormalDateException('cannot set year to zero')
elif year < -9999:
raise NormalDateException('year cannot be less than -9999')
elif year > 9999:
raise NormalDateException('year cannot be greater than 9999')
(y, m, d) = self.toTuple()
self.setNormalDate((year, m, d))
__setstate__ = setNormalDate
def __sub__(self, v):
if isinstance(v,int):
return self.__add__(-v)
return self.scalar() - v.scalar()
def __rsub__(self,v):
if isinstance(v,int):
return NormalDate(v) - self
else:
return v.scalar() - self.scalar()
def toTuple(self):
"""return date as (year, month, day) tuple"""
return (self.year(), self.month(), self.day())
def year(self):
"""return year in yyyy format, negative values indicate B.C."""
return int(repr(self.normalDate)[:-4])
################# Utility functions #################
def bigBang():
"""return lower boundary as a NormalDate"""
return NormalDate((-9999, 1, 1))
def bigCrunch():
"""return upper boundary as a NormalDate"""
return NormalDate((9999, 12, 31))
def dayOfWeek(y, m, d):
"""return integer representing day of week, Mon=0, Tue=1, etc."""
if m == 1 or m == 2:
m = m + 12
y = y - 1
return (d + 2*m + 3*(m+1)/5 + y + y/4 - y/100 + y/400) % 7
def firstDayOfYear(year):
"""number of days to the first of the year, relative to Jan 1, 1900"""
if not isinstance(year,int):
msg = "firstDayOfYear() expected integer, got %s" % type(year)
raise NormalDateException(msg)
if year == 0:
raise NormalDateException('first day of year cannot be zero (0)')
elif year < 0: # BCE calculation
firstDay = (year * 365) + int((year - 1) / 4) - 693596
else: # CE calculation
leapAdjust = int((year + 3) / 4)
if year > 1600:
leapAdjust = leapAdjust - int((year + 99 - 1600) / 100) + \
int((year + 399 - 1600) / 400)
firstDay = year * 365 + leapAdjust - 693963
if year > 1582:
firstDay = firstDay - 10
return firstDay
def FND(d):
'''convert to ND if required'''
return isinstance(d,NormalDate) and d or ND(d)
Epoch=bigBang()
ND=NormalDate
BDEpoch=ND(15821018)
BDEpochScalar = -115857
class BusinessDate(NormalDate):
"""
Specialised NormalDate
"""
def add(self, days):
"""add days to date; use negative integers to subtract"""
if not isinstance(days,int):
raise NormalDateException('add method parameter must be integer')
self.normalize(self.scalar() + days)
def __add__(self, days):
"""add integer to BusinessDate and return a new, calculated value"""
if not isinstance(days,int):
raise NormalDateException('__add__ parameter must be integer')
cloned = self.clone()
cloned.add(days)
return cloned
def __sub__(self, v):
return isinstance(v,int) and self.__add__(-v) or self.scalar() - v.scalar()
def asNormalDate(self):
return ND(self.normalDate)
def daysBetweenDates(self, normalDate):
return self.asNormalDate.daysBetweenDates(normalDate)
def _checkDOW(self):
if self.dayOfWeek()>4: raise NormalDateException("%r isn't a business day" % self.normalDate)
def normalize(self, i):
i = int(i)
NormalDate.normalize(self,(i/5)*7+i%5+BDEpochScalar)
def scalar(self):
d = self.asNormalDate()
i = d - BDEpoch #luckily BDEpoch is a Monday so we don't have a problem
#concerning the relative weekday
return 5*(i/7) + i%7
def setNormalDate(self, normalDate):
NormalDate.setNormalDate(self,normalDate)
self._checkDOW()
if __name__ == '__main__':
today = NormalDate()
print("NormalDate test:")
print(" Today (%s) is: %s %s" % (today, today.dayOfWeekAbbrev(), today.localeFormat()))
yesterday = today - 1
print(" Yesterday was: %s %s" % (yesterday.dayOfWeekAbbrev(), yesterday.localeFormat()))
tomorrow = today + 1
print(" Tomorrow will be: %s %s" % (tomorrow.dayOfWeekAbbrev(), tomorrow.localeFormat()))
print(" Days between tomorrow and yesterday: %d" % (tomorrow - yesterday))
print(today.formatMS('{d}/{m}/{yy}'))
print(today.formatMS('{dd}/{m}/{yy}'))
print(today.formatMS('{ddd} {d}/{m}/{yy}'))
print(today.formatMS('{dddd} {d}/{m}/{yy}'))
print(today.formatMS('{d}/{mm}/{yy}'))
print(today.formatMS('{d}/{mmm}/{yy}'))
print(today.formatMS('{d}/{mmmm}/{yy}'))
print(today.formatMS('{d}/{m}/{yyyy}'))
b = BusinessDate('20010116')
print('b=',b,'b.scalar()', b.scalar())
| bsd-3-clause | -4,366,325,312,494,363,000 | 32.81129 | 101 | 0.569146 | false |
GoogleCloudPlatform/sap-deployment-automation | third_party/github.com/ansible/awx/awx/main/tests/functional/commands/test_oauth2_token_revoke.py | 2 | 2720 | # Python
import datetime
import pytest
import string
import random
from io import StringIO
# Django
from django.core.management import call_command
from django.core.management.base import CommandError
# AWX
from awx.main.models import RefreshToken
from awx.main.models.oauth import OAuth2AccessToken
from awx.api.versioning import reverse
@pytest.mark.django_db
class TestOAuth2RevokeCommand:
def test_non_existing_user(self):
out = StringIO()
fake_username = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
arg = '--user=' + fake_username
with pytest.raises(CommandError) as excinfo:
call_command('revoke_oauth2_tokens', arg, stdout=out)
assert 'A user with that username does not exist' in str(excinfo.value)
out.close()
def test_revoke_all_access_tokens(self, post, admin, alice):
url = reverse('api:o_auth2_token_list')
for user in (admin, alice):
post(
url,
{'description': 'test token', 'scope': 'read'},
user
)
assert OAuth2AccessToken.objects.count() == 2
call_command('revoke_oauth2_tokens')
assert OAuth2AccessToken.objects.count() == 0
def test_revoke_access_token_for_user(self, post, admin, alice):
url = reverse('api:o_auth2_token_list')
post(
url,
{'description': 'test token', 'scope': 'read'},
alice
)
assert OAuth2AccessToken.objects.count() == 1
call_command('revoke_oauth2_tokens', '--user=admin')
assert OAuth2AccessToken.objects.count() == 1
call_command('revoke_oauth2_tokens', '--user=alice')
assert OAuth2AccessToken.objects.count() == 0
def test_revoke_all_refresh_tokens(self, post, admin, oauth_application):
url = reverse('api:o_auth2_token_list')
post(
url,
{
'description': 'test token for',
'scope': 'read',
'application': oauth_application.pk
},
admin
)
assert OAuth2AccessToken.objects.count() == 1
assert RefreshToken.objects.count() == 1
call_command('revoke_oauth2_tokens')
assert OAuth2AccessToken.objects.count() == 0
assert RefreshToken.objects.count() == 1
for r in RefreshToken.objects.all():
assert r.revoked is None
call_command('revoke_oauth2_tokens', '--all')
assert RefreshToken.objects.count() == 1
for r in RefreshToken.objects.all():
assert r.revoked is not None
assert isinstance(r.revoked, datetime.datetime)
| apache-2.0 | 7,834,790,525,911,286,000 | 33.43038 | 104 | 0.610294 | false |
wglass/lighthouse | tests/haproxy/control_tests.py | 1 | 15691 | import errno
import socket
import subprocess
import sys
try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch, Mock, mock_open
from lighthouse.haproxy.control import (
HAProxyControl,
UnknownCommandError, PermissionError, UnknownServerError
)
if sys.version_info[0] == 3:
builtin_module = "builtins"
else:
builtin_module = "__builtin__"
class HAProxyControlTests(unittest.TestCase):
def setUp(self):
self.stub_commands = {}
self.command_patcher = patch.object(HAProxyControl, "send_command")
mock_send_command = self.command_patcher.start()
def stop_command_patcher():
try:
self.command_patcher.stop()
except RuntimeError:
pass
self.addCleanup(stop_command_patcher)
def get_stub_response(command):
if command not in self.stub_commands:
raise AssertionError("Got un-stubbed command '%s'" % command)
return self.stub_commands[command]
mock_send_command.side_effect = get_stub_response
@patch("lighthouse.haproxy.control.Peer")
def test_gets_current_peer(self, Peer):
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertEqual(ctl.peer, Peer.current.return_value)
@patch("lighthouse.haproxy.control.subprocess")
def test_get_version(self, mock_subprocess):
mock_subprocess.check_output.return_value = "\n".join((
"HA-Proxy version 1.5.9 2014/11/25",
"Copyright 2000-2014 Willy Tarreau <[email protected]>"
))
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertEqual(ctl.get_version(), (1, 5, 9))
@patch("lighthouse.haproxy.control.subprocess")
def test_get_version__error(self, mock_subprocess):
mock_subprocess.CalledProcessError = subprocess.CalledProcessError
error = subprocess.CalledProcessError(-1, "haproxy")
mock_subprocess.check_output.side_effect = error
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertEqual(ctl.get_version(), None)
@patch("lighthouse.haproxy.control.subprocess")
def test_get_version__weird_output(self, mock_subprocess):
mock_subprocess.check_output.return_value = "\n".join((
"HA-Proxy version wefaewlfkjalwekja;kj",
))
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertEqual(ctl.get_version(), None)
def test_enable_node(self):
self.stub_commands = {
"enable server rediscache/redis01": "OK"
}
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
result = ctl.enable_node("rediscache", "redis01")
self.assertEqual(result, "OK")
def test_disable_node(self):
self.stub_commands = {
"disable server rediscache/redis02": "OK"
}
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
result = ctl.disable_node("rediscache", "redis02")
self.assertEqual(result, "OK")
def test_get_active_nodes(self):
self.stub_commands = {
"show stat -1 4 -1":
"""# pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq
rediscache,redis01,,,0,0,1000,0,0,500,0
rediscache,redis02,0,0,0,0,1000,0,0,0,1000
web,app03,0,0,0,0,1000,0,0,0,0"""
}
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertEqual(
ctl.get_active_nodes(),
{
"rediscache": [
{
'bin': '0', 'smax': '0', 'scur': '0', 'stot': '0',
'slim': '1000', 'qmax': '', 'dreq': '0', 'qcur': '',
'bout': '500', 'svname': 'redis01'
},
{
'bin': '0', 'smax': '0', 'scur': '0', 'stot': '0',
'slim': '1000', 'qmax': '0', 'dreq': '1000', 'qcur':
'0', 'bout': '0', 'svname': 'redis02'
},
],
"web": [
{
"svname": "app03",
'bin': '0', 'smax': '0', 'scur': '0', 'stot': '0',
'slim': '1000', 'qmax': '0', 'dreq': '0', 'qcur': '0',
'bout': '0'
},
]
}
)
def test_get_active_nodes__no_response(self):
self.stub_commands = {
"show stat -1 4 -1": ""
}
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertEqual(
ctl.get_active_nodes(),
[]
)
def test_get_info(self):
self.stub_commands = {
"show info": """Name: HAProxy
Version: 1.4-dev2-49
Release_date: 2009/09/23
Nbproc: 1
Process_num: 1
Pid: 12334"""
}
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertEqual(
ctl.get_info(),
{
"name": "HAProxy",
"nbproc": "1",
"pid": "12334",
"process_num": "1",
"version": "1.4-dev2-49",
"release_date": "2009/09/23"
}
)
def test_info__no_response(self):
self.stub_commands = {
"show info": ""
}
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertEqual(ctl.get_info(), {})
@patch.object(HAProxyControl, "get_version")
@patch("lighthouse.haproxy.control.Peer")
@patch("lighthouse.haproxy.control.os")
@patch("lighthouse.haproxy.control.subprocess")
@patch(builtin_module + ".open", mock_open(read_data="12355"))
def test_restart_with_peer(
self, mock_subprocess, mock_os, Peer, get_version
):
get_version.return_value = (1, 5, 11)
mock_os.path.exists.return_value = True
peer = Mock(host="app08", port=8888)
peer.name = "app08"
Peer.current.return_value = peer
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
ctl.restart()
mock_subprocess.check_output.assert_called_once_with([
"haproxy", "-f", "/etc/haproxy.cfg", "-p", "/var/run/haproxy.pid",
"-L", "app08", "-sf", "12355"
])
@patch.object(HAProxyControl, "get_version")
@patch("lighthouse.haproxy.control.Peer")
@patch("lighthouse.haproxy.control.os")
@patch("lighthouse.haproxy.control.subprocess")
@patch(builtin_module + ".open", mock_open(read_data="12355"))
def test_restart_without_peer(
self, mock_subprocess, mock_os, Peer, get_version
):
get_version.return_value = (1, 4, 9)
mock_os.path.exists.return_value = True
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
ctl.restart()
mock_subprocess.check_output.assert_called_once_with([
"haproxy", "-f", "/etc/haproxy.cfg", "-p", "/var/run/haproxy.pid",
"-sf", "12355"
])
@patch.object(HAProxyControl, "get_version")
@patch("lighthouse.haproxy.control.Peer")
@patch("lighthouse.haproxy.control.os")
@patch("lighthouse.haproxy.control.subprocess")
def test_restart_without_peer_or_pid_file(
self, mock_subprocess, mock_os, Peer, get_version
):
get_version.return_value = (1, 4, 9)
mock_os.path.exists.return_value = False
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
ctl.restart()
mock_subprocess.check_output.assert_called_once_with([
"haproxy", "-f", "/etc/haproxy.cfg", "-p", "/var/run/haproxy.pid",
])
@patch("lighthouse.haproxy.control.Peer")
@patch("lighthouse.haproxy.control.os")
@patch("lighthouse.haproxy.control.subprocess")
@patch(builtin_module + ".open", mock_open(read_data="12355"))
def test_restart__process_error(self, mock_subprocess, mock_os, Peer):
mock_subprocess.CalledProcessError = subprocess.CalledProcessError
mock_os.path.exists.return_value = True
peer = Mock(host="app08", port=8888)
peer.name = "app08:8888"
Peer.current.return_value = peer
error = subprocess.CalledProcessError(-1, "haproxy")
mock_subprocess.check_output.side_effect = error
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
ctl.restart()
mock_subprocess.check_output.assert_called_with([
"haproxy", "-f", "/etc/haproxy.cfg", "-p", "/var/run/haproxy.pid",
"-sf", "12355"
])
@patch.object(HAProxyControl, "get_info")
@patch.object(HAProxyControl, "get_version", Mock(return_value=(1, 4, 12)))
@patch("lighthouse.haproxy.control.subprocess")
def test_restart__get_info_error(self, mock_subprocess, mock_get_info):
mock_get_info.side_effect = Exception("oh no!")
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
ctl.restart()
mock_subprocess.check_output.assert_called_with([
"haproxy", "-f", "/etc/haproxy.cfg", "-p", "/var/run/haproxy.pid"
])
@patch("lighthouse.haproxy.control.socket")
def test_send_command_uses_sendall_and_closes_socket(self, mock_socket):
self.command_patcher.stop()
mock_sock = mock_socket.socket.return_value
mock_sock.recv.return_value = ""
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
ctl.send_command("show foobar")
mock_socket.socket.assert_called_once_with(
mock_socket.AF_UNIX, mock_socket.SOCK_STREAM
)
mock_sock.connect.assert_called_once_with("/var/run/haproxy.sock")
mock_sock.sendall.assert_called_once_with(b"show foobar\n")
mock_sock.close.assert_called_once_with()
@patch("lighthouse.haproxy.control.socket")
def test_send_command_error_connection_refused(self, mock_socket):
mock_socket.error = socket.error
self.command_patcher.stop()
mock_sock = mock_socket.socket.return_value
mock_sock.connect.side_effect = socket.error(
errno.ECONNREFUSED, ""
)
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
result = ctl.send_command("show info")
self.assertEqual(result, None)
@patch("lighthouse.haproxy.control.socket")
def test_send_command_error_other_connection_error(self, mock_socket):
mock_socket.error = socket.error
self.command_patcher.stop()
mock_sock = mock_socket.socket.return_value
mock_sock.connect.side_effect = socket.error(
errno.ENOMEM, ""
)
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertRaises(
socket.error,
ctl.send_command, "show info"
)
@patch("lighthouse.haproxy.control.socket")
def test_send_command_handles_flaky_socket(self, mock_socket):
mock_socket.error = socket.error
self.command_patcher.stop()
mock_sock = mock_socket.socket.return_value
response_chunks = [
b"EVERYTHING",
socket.error(errno.EAGAIN, ""),
b" A-",
socket.error(errno.EINTR, ""),
socket.error(errno.EINTR, ""),
b"OK\n",
b"\n"
]
def get_next_chunk(bufsize):
try:
chunk = response_chunks.pop(0)
except IndexError:
return ""
if isinstance(chunk, Exception):
raise chunk
return chunk
mock_sock.recv.side_effect = get_next_chunk
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
result = ctl.send_command("show foobar")
self.assertEqual(result, "EVERYTHING A-OK")
@patch("lighthouse.haproxy.control.socket")
def test_send_command_really_flaky_socket(self, mock_socket):
mock_socket.error = socket.error
self.command_patcher.stop()
mock_sock = mock_socket.socket.return_value
response_chunks = [
b"EVERYTHING",
socket.error(errno.ECONNREFUSED, ""),
]
def get_next_chunk(bufsize):
try:
chunk = response_chunks.pop(0)
except IndexError:
return ""
if isinstance(chunk, Exception):
raise chunk
return chunk
mock_sock.recv.side_effect = get_next_chunk
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertRaises(
socket.error,
ctl.send_command, "show foobar"
)
@patch("lighthouse.haproxy.control.socket")
def test_unknown_command_response(self, mock_socket):
self.command_patcher.stop()
mock_sock = mock_socket.socket.return_value
chunks = [b"Unknown command.", b"\n", ""]
def get_next_chunk(bufsize):
return chunks.pop(0)
mock_sock.recv.side_effect = get_next_chunk
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertRaises(
UnknownCommandError,
ctl.send_command, "show foobar"
)
@patch("lighthouse.haproxy.control.socket")
def test_permission_denied_response(self, mock_socket):
self.command_patcher.stop()
mock_sock = mock_socket.socket.return_value
chunks = [b"Permission denied.\n", ""]
def get_next_chunk(bufsize):
return chunks.pop(0)
mock_sock.recv.side_effect = get_next_chunk
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertRaises(
PermissionError,
ctl.send_command, "show foobar"
)
@patch("lighthouse.haproxy.control.socket")
def test_no_such_backend_response(self, mock_socket):
self.command_patcher.stop()
mock_sock = mock_socket.socket.return_value
chunks = [b"No such backend.\n", None]
def get_next_chunk(bufsize):
return chunks.pop(0)
mock_sock.recv.side_effect = get_next_chunk
ctl = HAProxyControl(
"/etc/haproxy.cfg", "/var/run/haproxy.sock", "/var/run/haproxy.pid"
)
self.assertRaises(
UnknownServerError,
ctl.send_command, "disable server foobar/bazz"
)
| apache-2.0 | 8,366,356,421,827,712,000 | 29.408915 | 79 | 0.558983 | false |
nschaetti/EchoTorch | echotorch/transforms/text/Transformer.py | 1 | 2127 | # -*- coding: utf-8 -*-
#
# Imports
import torch
# Base class for text transformers
class Transformer(object):
"""
Base class for text transformers
"""
# Constructor
def __init__(self):
"""
Constructor
"""
# Properties
self.symbols = self.generate_symbols()
# end __init__
##############################################
# Properties
##############################################
# Get the number of inputs
@property
def input_dim(self):
"""
Get the number of inputs.
:return: The input size.
"""
return len(self.get_tags())
# end input_dim
##############################################
# Public
##############################################
# Get tags
def get_tags(self):
"""
Get tags.
:return: A list of tags.
"""
return []
# end get_tags
# Get symbol from tag
def tag_to_symbol(self, tag):
"""
Get symbol from tag.
:param tag: Tag.
:return: The corresponding symbols.
"""
if tag in self.symbols.keys():
return self.symbols[tag]
return None
# end word_to_symbol
# Generate symbols
def generate_symbols(self):
"""
Generate word symbols.
:return: Dictionary of tag to symbols.
"""
result = dict()
for index, p in enumerate(self.get_tags()):
result[p] = torch.zeros(1, self.input_dim)
result[p][0, index] = 1.0
# end for
return result
# end generate_symbols
##############################################
# Override
##############################################
# Convert a string
def __call__(self, tokens):
"""
Convert a string to a ESN input
:param tokens: Text to convert
:return: A list of symbols
"""
pass
# end convert
##############################################
# Static
##############################################
# end TextTransformer
| gpl-3.0 | 7,657,291,632,524,109,000 | 21.62766 | 54 | 0.41796 | false |
dstufft/warehouse | warehouse/migrations/versions/f404a67e0370_disable_legacy_file_types_unless_a_.py | 1 | 1518 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Disable legacy file types unless a project has used them previously
Revision ID: f404a67e0370
Revises: b8fda0d7fbb5
Create Date: 2016-12-17 02:58:55.328035
"""
from alembic import op
revision = "f404a67e0370"
down_revision = "b8fda0d7fbb5"
def upgrade():
op.execute(
r""" UPDATE packages
SET allow_legacy_files = 'f'
WHERE name NOT IN (
SELECT DISTINCT ON (packages.name) packages.name
FROM packages, release_files
WHERE packages.name = release_files.name
AND (
filename !~* '.+?\.(tar\.gz|zip|whl|egg)$'
OR packagetype NOT IN (
'sdist',
'bdist_wheel',
'bdist_egg'
)
)
)
"""
)
def downgrade():
raise RuntimeError("Order No. 227 - Ни шагу назад!")
| apache-2.0 | 6,441,794,969,805,481,000 | 29.755102 | 74 | 0.58925 | false |
wangyum/beam | sdks/python/apache_beam/runners/dataflow/internal/apiclient_test.py | 1 | 10147 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the apiclient module."""
import unittest
import mock
import pkg_resources
from apache_beam.metrics.cells import DistributionData
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.runners.dataflow.internal import dependency
from apache_beam.runners.dataflow.internal.clients import dataflow
# Protect against environments where apitools library is not available.
# pylint: disable=wrong-import-order, wrong-import-position
try:
from apache_beam.runners.dataflow.internal import apiclient
except ImportError:
apiclient = None
# pylint: enable=wrong-import-order, wrong-import-position
@unittest.skipIf(apiclient is None, 'GCP dependencies are not installed')
class UtilTest(unittest.TestCase):
@unittest.skip("Enable once BEAM-1080 is fixed.")
def test_create_application_client(self):
pipeline_options = PipelineOptions()
apiclient.DataflowApplicationClient(pipeline_options)
def test_set_network(self):
pipeline_options = PipelineOptions(
['--network', 'anetworkname',
'--temp_location', 'gs://any-location/temp'])
env = apiclient.Environment([], #packages
pipeline_options,
'2.0.0') #any environment version
self.assertEqual(env.proto.workerPools[0].network,
'anetworkname')
def test_set_subnetwork(self):
pipeline_options = PipelineOptions(
['--subnetwork', '/regions/MY/subnetworks/SUBNETWORK',
'--temp_location', 'gs://any-location/temp'])
env = apiclient.Environment([], #packages
pipeline_options,
'2.0.0') #any environment version
self.assertEqual(env.proto.workerPools[0].subnetwork,
'/regions/MY/subnetworks/SUBNETWORK')
def test_invalid_default_job_name(self):
# Regexp for job names in dataflow.
regexp = '^[a-z]([-a-z0-9]{0,61}[a-z0-9])?$'
job_name = apiclient.Job._build_default_job_name('invalid.-_user_n*/ame')
self.assertRegexpMatches(job_name, regexp)
job_name = apiclient.Job._build_default_job_name(
'invalid-extremely-long.username_that_shouldbeshortened_or_is_invalid')
self.assertRegexpMatches(job_name, regexp)
def test_default_job_name(self):
job_name = apiclient.Job.default_job_name(None)
regexp = 'beamapp-.*-[0-9]{10}-[0-9]{6}'
self.assertRegexpMatches(job_name, regexp)
def test_split_int(self):
number = 12345
split_number = apiclient.to_split_int(number)
self.assertEqual((split_number.lowBits, split_number.highBits),
(number, 0))
shift_number = number << 32
split_number = apiclient.to_split_int(shift_number)
self.assertEqual((split_number.lowBits, split_number.highBits),
(0, number))
def test_translate_distribution(self):
metric_update = dataflow.CounterUpdate()
distribution_update = DistributionData(16, 2, 1, 15)
apiclient.translate_distribution(distribution_update, metric_update)
self.assertEqual(metric_update.distribution.min.lowBits,
distribution_update.min)
self.assertEqual(metric_update.distribution.max.lowBits,
distribution_update.max)
self.assertEqual(metric_update.distribution.sum.lowBits,
distribution_update.sum)
self.assertEqual(metric_update.distribution.count.lowBits,
distribution_update.count)
def test_translate_means(self):
metric_update = dataflow.CounterUpdate()
accumulator = mock.Mock()
accumulator.sum = 16
accumulator.count = 2
apiclient.MetricUpdateTranslators.translate_scalar_mean_int(accumulator,
metric_update)
self.assertEqual(metric_update.integerMean.sum.lowBits, accumulator.sum)
self.assertEqual(metric_update.integerMean.count.lowBits, accumulator.count)
accumulator.sum = 16.0
accumulator.count = 2
apiclient.MetricUpdateTranslators.translate_scalar_mean_float(accumulator,
metric_update)
self.assertEqual(metric_update.floatingPointMean.sum, accumulator.sum)
self.assertEqual(
metric_update.floatingPointMean.count.lowBits, accumulator.count)
def test_default_ip_configuration(self):
pipeline_options = PipelineOptions(
['--temp_location', 'gs://any-location/temp'])
env = apiclient.Environment([], pipeline_options, '2.0.0')
self.assertEqual(env.proto.workerPools[0].ipConfiguration, None)
def test_public_ip_configuration(self):
pipeline_options = PipelineOptions(
['--temp_location', 'gs://any-location/temp',
'--use_public_ips'])
env = apiclient.Environment([], pipeline_options, '2.0.0')
self.assertEqual(
env.proto.workerPools[0].ipConfiguration,
dataflow.WorkerPool.IpConfigurationValueValuesEnum.WORKER_IP_PUBLIC)
def test_private_ip_configuration(self):
pipeline_options = PipelineOptions(
['--temp_location', 'gs://any-location/temp',
'--no_use_public_ips'])
env = apiclient.Environment([], pipeline_options, '2.0.0')
self.assertEqual(
env.proto.workerPools[0].ipConfiguration,
dataflow.WorkerPool.IpConfigurationValueValuesEnum.WORKER_IP_PRIVATE)
def test_harness_override_present_in_dataflow_distributions(self):
pipeline_options = PipelineOptions(
['--temp_location', 'gs://any-location/temp', '--streaming'])
override = ''.join(
['runner_harness_container_image=',
dependency.DATAFLOW_CONTAINER_IMAGE_REPOSITORY,
'/harness:2.2.0'])
distribution = pkg_resources.Distribution(version='2.2.0')
with mock.patch(
'apache_beam.runners.dataflow.internal.dependency.pkg_resources'
'.get_distribution',
mock.MagicMock(return_value=distribution)):
env = apiclient.Environment([], #packages
pipeline_options,
'2.0.0') #any environment version
self.assertIn(override, env.proto.experiments)
@mock.patch('apache_beam.runners.dataflow.internal.dependency.'
'beam_version.__version__', '2.2.0')
def test_harness_override_present_in_beam_releases(self):
pipeline_options = PipelineOptions(
['--temp_location', 'gs://any-location/temp', '--streaming'])
override = ''.join(
['runner_harness_container_image=',
dependency.DATAFLOW_CONTAINER_IMAGE_REPOSITORY,
'/harness:2.2.0'])
with mock.patch(
'apache_beam.runners.dataflow.internal.dependency.pkg_resources'
'.get_distribution',
mock.Mock(side_effect=pkg_resources.DistributionNotFound())):
env = apiclient.Environment([], #packages
pipeline_options,
'2.0.0') #any environment version
self.assertIn(override, env.proto.experiments)
@mock.patch('apache_beam.runners.dataflow.internal.dependency.'
'beam_version.__version__', '2.2.0-dev')
def test_harness_override_absent_in_unreleased_sdk(self):
pipeline_options = PipelineOptions(
['--temp_location', 'gs://any-location/temp', '--streaming'])
with mock.patch(
'apache_beam.runners.dataflow.internal.dependency.pkg_resources'
'.get_distribution',
mock.Mock(side_effect=pkg_resources.DistributionNotFound())):
env = apiclient.Environment([], #packages
pipeline_options,
'2.0.0') #any environment version
if env.proto.experiments:
for experiment in env.proto.experiments:
self.assertNotIn('runner_harness_container_image=', experiment)
def test_labels(self):
pipeline_options = PipelineOptions(
['--project', 'test_project', '--job_name', 'test_job_name',
'--temp_location', 'gs://test-location/temp'])
job = apiclient.Job(pipeline_options)
self.assertIsNone(job.proto.labels)
pipeline_options = PipelineOptions(
['--project', 'test_project', '--job_name', 'test_job_name',
'--temp_location', 'gs://test-location/temp',
'--label', 'key1=value1',
'--label', 'key2',
'--label', 'key3=value3',
'--labels', 'key4=value4',
'--labels', 'key5'])
job = apiclient.Job(pipeline_options)
self.assertEqual(5, len(job.proto.labels.additionalProperties))
self.assertEqual('key1', job.proto.labels.additionalProperties[0].key)
self.assertEqual('value1', job.proto.labels.additionalProperties[0].value)
self.assertEqual('key2', job.proto.labels.additionalProperties[1].key)
self.assertEqual('', job.proto.labels.additionalProperties[1].value)
self.assertEqual('key3', job.proto.labels.additionalProperties[2].key)
self.assertEqual('value3', job.proto.labels.additionalProperties[2].value)
self.assertEqual('key4', job.proto.labels.additionalProperties[3].key)
self.assertEqual('value4', job.proto.labels.additionalProperties[3].value)
self.assertEqual('key5', job.proto.labels.additionalProperties[4].key)
self.assertEqual('', job.proto.labels.additionalProperties[4].value)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -1,636,396,044,485,334,300 | 43.504386 | 80 | 0.666995 | false |
Azure/azure-sdk-for-python | sdk/recoveryservices/azure-mgmt-recoveryservices/azure/mgmt/recoveryservices/aio/operations/_replication_usages_operations.py | 1 | 5558 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ReplicationUsagesOperations:
"""ReplicationUsagesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.recoveryservices.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
vault_name: str,
**kwargs
) -> AsyncIterable["_models.ReplicationUsageList"]:
"""Fetches the replication usages of the vault.
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ReplicationUsageList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.recoveryservices.models.ReplicationUsageList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ReplicationUsageList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vaultName': self._serialize.url("vault_name", vault_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ReplicationUsageList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/replicationUsages'} # type: ignore
| mit | -4,773,754,817,970,686,000 | 46.504274 | 187 | 0.644476 | false |
half0wl/simon | simon/simon.py | 1 | 4207 | from Foundation import NSTimer, NSRunLoop
from AppKit import NSApplication, NSStatusBar, NSMenu, NSMenuItem, \
NSEventTrackingRunLoopMode
from .stats import cpu_usage, ram_usage, available_memory, disk_read, \
disk_written, network_recv, network_sent
class Simon(NSApplication):
def finishLaunching(self):
self._setup_menuBar()
# Create a timer which fires the update_ method every 1second,
# and add it to the runloop
NSRunLoop.currentRunLoop().addTimer_forMode_(
NSTimer
.scheduledTimerWithTimeInterval_target_selector_userInfo_repeats_(
1, self, 'update:', '', True
),
NSEventTrackingRunLoopMode
)
print('Simon is now running.')
print('CTRL+C does not work here.')
print('You can quit through the menubar (Simon -> Quit).')
def update_(self, timer):
# System
self.CPU_USAGE.setTitle_('CPU Usage: {}%'.format(cpu_usage()))
self.RAM_USAGE.setTitle_('RAM Usage: {}%'.format(ram_usage()))
self.RAM_AVAILABLE.setTitle_('Available Memory: {}'.format(
available_memory())
)
# Disk I/O
self.DATA_READ.setTitle_('Read: {}'.format(disk_read()))
self.DATA_WRITTEN.setTitle_('Written: {}'.format(disk_written()))
# Network
self.NETWORK_RECV.setTitle_('Received: {}'.format(network_recv()))
self.NETWORK_SENT.setTitle_('Sent: {}'.format(network_sent()))
def _setup_menuBar(self):
statusBar = NSStatusBar.systemStatusBar()
self.statusItem = statusBar.statusItemWithLength_(-1)
self.menuBar = NSMenu.alloc().init()
self.statusItem.setTitle_('Simon')
# Labels/buttons
self.SYSTEM = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(
'System', 'doNothing:', ''
)
self.DISKIO = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(
'Disk I/O', 'doNothing:', ''
)
self.NETWORK = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(
'Network', 'doNothing:', ''
)
self.QUIT = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(
'Quit', 'terminate:', ''
)
# System
self.CPU_USAGE = self._create_empty_menu_item()
self.RAM_USAGE = self._create_empty_menu_item()
self.RAM_AVAILABLE = self._create_empty_menu_item()
# Disk I/O
self.DATA_READ = self._create_empty_menu_item()
self.DATA_WRITTEN = self._create_empty_menu_item()
# Network
self.NETWORK_RECV = self._create_empty_menu_item()
self.NETWORK_SENT = self._create_empty_menu_item()
'''
Add our items to the menuBar - yields the following output:
Simon
System
CPU Usage
RAM Usage
Available Memory
Disk I/O
Read
Written
Network
Received
Sent
-----------------------
Quit
'''
self.menuBar.addItem_(self.SYSTEM) # system label
self.menuBar.addItem_(self.CPU_USAGE)
self.menuBar.addItem_(self.RAM_USAGE)
self.menuBar.addItem_(self.RAM_AVAILABLE)
self.menuBar.addItem_(self.DISKIO) # disk I/O label
self.menuBar.addItem_(self.DATA_READ)
self.menuBar.addItem_(self.DATA_WRITTEN)
self.menuBar.addItem_(self.NETWORK) # network label
self.menuBar.addItem_(self.NETWORK_RECV)
self.menuBar.addItem_(self.NETWORK_SENT)
self.menuBar.addItem_(NSMenuItem.separatorItem()) # seperator
self.menuBar.addItem_(self.QUIT) # quit button
# Add menu to status bar
self.statusItem.setMenu_(self.menuBar)
def _create_empty_menu_item(self):
return NSMenuItem \
.alloc().initWithTitle_action_keyEquivalent_('', '', '')
def doNothing_(self, sender):
# hack to enable menuItems by passing them this method as action
# setEnabled_ isn't working, so this should do for now (achieves
# the same thing)
pass
| mit | -2,908,278,978,923,838,500 | 33.203252 | 78 | 0.589018 | false |
dmlc/tvm | tests/python/unittest/test_target_codegen_c_host.py | 1 | 4402 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
import tvm.testing
from tvm import te
import numpy as np
from tvm.contrib import utils
def test_add():
nn = 1024
n = tvm.runtime.convert(nn)
A = te.placeholder((n,), name="A")
B = te.placeholder((n,), name="B")
C = te.compute(A.shape, lambda *i: A(*i) + B(*i), name="C")
s = te.create_schedule(C.op)
def check_c():
mhost = tvm.build(s, [A, B, C], "c", name="fadd")
temp = utils.tempdir()
path_dso = temp.relpath("temp.so")
mhost.export_library(path_dso)
m = tvm.runtime.load_module(path_dso)
fadd = m["fadd"]
ctx = tvm.cpu(0)
# launch the kernel.
n = nn
a = tvm.nd.array(np.random.uniform(size=n).astype(A.dtype), ctx)
b = tvm.nd.array(np.random.uniform(size=n).astype(B.dtype), ctx)
c = tvm.nd.array(np.zeros(n, dtype=C.dtype), ctx)
fadd(a, b, c)
tvm.testing.assert_allclose(c.asnumpy(), a.asnumpy() + b.asnumpy())
check_c()
def test_add_pipeline():
nn = 1024
n = tvm.runtime.convert(nn)
A = te.placeholder((n,), name="A")
B = te.placeholder((n,), name="B")
AA = te.compute((n,), lambda *i: A(*i), name="A")
BB = te.compute((n,), lambda *i: B(*i), name="B")
T = te.compute(A.shape, lambda *i: AA(*i) + BB(*i), name="T")
C = te.compute(A.shape, lambda *i: T(*i), name="C")
s = te.create_schedule(C.op)
xo, xi = s[C].split(C.op.axis[0], factor=4)
xo1, xo2 = s[C].split(xo, factor=13)
s[C].parallel(xo2)
s[C].pragma(xo1, "parallel_launch_point")
s[C].pragma(xo2, "parallel_stride_pattern")
s[C].pragma(xo2, "parallel_barrier_when_finish")
s[C].vectorize(xi)
def check_c():
# Specifically allow offset to test codepath when offset is available
Ab = tvm.tir.decl_buffer(
A.shape, A.dtype, elem_offset=te.size_var("Aoffset"), offset_factor=8, name="A"
)
binds = {A: Ab}
# BUILD and invoke the kernel.
f1 = tvm.lower(s, [A, B, C], name="fadd_pipeline")
mhost = tvm.build(f1, target="c")
temp = utils.tempdir()
path_dso = temp.relpath("temp.so")
mhost.export_library(path_dso)
m = tvm.runtime.load_module(path_dso)
fadd = m["fadd_pipeline"]
ctx = tvm.cpu(0)
# launch the kernel.
n = nn
a = tvm.nd.array(np.random.uniform(size=n).astype(A.dtype), ctx)
b = tvm.nd.array(np.random.uniform(size=n).astype(B.dtype), ctx)
c = tvm.nd.array(np.zeros(n, dtype=C.dtype), ctx)
fadd(a, b, c)
tvm.testing.assert_allclose(c.asnumpy(), a.asnumpy() + b.asnumpy())
check_c()
def test_reinterpret():
nn = 1024
n = tvm.runtime.convert(nn)
A = te.placeholder((n,), name="A", dtype="int32")
B = te.compute(
A.shape, lambda *i: tvm.tir.call_intrin("float32", "tir.reinterpret", 2 + A(*i)), name="B"
)
s = te.create_schedule(B.op)
def check_c():
mhost = tvm.build(s, [A, B], "c", name="reinterpret")
temp = utils.tempdir()
path_dso = temp.relpath("temp.so")
mhost.export_library(path_dso)
m = tvm.runtime.load_module(path_dso)
fadd = m["reinterpret"]
ctx = tvm.cpu(0)
n = nn
a = tvm.nd.array(np.random.randint(-(2 ** 30), 2 ** 30, size=n).astype(A.dtype), ctx)
b = tvm.nd.array(np.zeros(n, dtype=B.dtype), ctx)
fadd(a, b)
tvm.testing.assert_allclose(b.asnumpy(), (2 + a.asnumpy()).view("float32"))
check_c()
if __name__ == "__main__":
test_add()
test_add_pipeline()
test_reinterpret()
| apache-2.0 | 8,880,396,732,525,257,000 | 34.216 | 98 | 0.597001 | false |
openstack/designate | designate/scheduler/filters/fallback_filter.py | 1 | 1584 | # Copyright 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from designate.scheduler.filters import base
from designate import objects
cfg.CONF.register_opts([
cfg.StrOpt('default_pool_id',
default='794ccc2c-d751-44fe-b57f-8894c9f5c842',
help="The name of the default pool"),
], group='service:central')
class FallbackFilter(base.Filter):
"""If there is no zones available to schedule to, this filter will insert
the default_pool_id.
.. note::
This should be used as one of the last filters, if you want to preserve
behavior from before the scheduler existed.
"""
name = 'fallback'
"""Name to enable in the ``[designate:central:scheduler].filters`` option
list
"""
def filter(self, context, pools, zone):
if not pools:
pools = objects.PoolList()
pools.append(
objects.Pool(id=cfg.CONF['service:central'].default_pool_id)
)
return pools
| apache-2.0 | 272,423,216,452,741,820 | 32 | 79 | 0.684975 | false |
crslade/HomeAPI | devices.py | 1 | 8689 | from utilities import ParameterException, get_table_ref
import os
import boto3
import json
from datetime import datetime
from uuid import uuid4 as uuid
from boto3.dynamodb.conditions import Key
#Actions
#create
def create_device(deviceData,roomID):
print("Creating Device")
#ValidateInput
if not 'deviceName' in deviceData:
raise ParameterException(400, "Invalid Parameter: Missing deviceName")
if not 'deviceType' in deviceData:
raise ParameterException(400, "Invalid Parameter: Missing deviceType")
if not 'parameters' in deviceData:
raise ParameterException(400, "Invalid Parameter: Missing parameters")
uid = uuid().hex
nowtime = datetime.now().isoformat()
device = {
'uuid': uid,
'roomID': roomID,
'deviceName': deviceData['deviceName'],
'deviceType': deviceData['deviceType'],
'created_at': nowtime,
'updated_at': nowtime
}
params = create_parameters(deviceData['parameters'],uid)
devices_table().put_item(Item=device)
device['path'] = "/devices/"+uid
device['parameters'] = params
response = {
"isBase64Encoded": "false",
"statusCode": 200,
"body": json.dumps(device)
}
return response
#Get Devices
def get_all_devices():
print("Getting Devices")
table = devices_table()
result = table.scan()
devices = result['Items']
while 'LastEvaluateKey' in result:
result = table.scan(ExclusiveStartKey=result['LastEvaluateKey'])
devices += result['Items']
#load parameters
for device in devices:
params = get_parameters(device['uuid'])
device['parameters'] = params
response = {
"isBase64Encoded": "false",
"statusCode": 200,
"body": json.dumps(devices)
}
return response
def get_room_devices(roomID):
print("Getting Room Devices")
table = devices_table()
if not roomID:
raise ParameterException(400, "Invalid Parameter: Missing roomID")
#This is inefficient because we are scaning the full table, and then removing results.
#A query would be better, but then we would need a local secondary index.
#Since there will be a limited number of devices (<50), I am not worrying about it.
result = table.scan(FilterExpression=Key('roomID').eq(roomID))
devices = result['Items']
while 'LastEvaluateKey' in result:
result = table.scan(ExclusiveStartKey=result['LastEvaluateKey'])
devices += result['Items']
for device in devices:
params = get_parameters(device['uuid'])
device['parameters'] = params
response = {
"isBase64Encoded": "false",
"statusCode": 200,
"body": json.dumps(devices)
}
return response
def get_device(deviceID):
device = load_device(deviceID)
device['parameters'] = get_parameters(deviceID)
response = {
"isBase64Encoded": "false",
"statusCode": 200,
"body": json.dumps(device)
}
return response
#Update Device
def update_device(deviceID, deviceData):
print("Updating Device")
if not deviceID:
raise ParameterException(404, "Missing Device ID")
nowtime = datetime.now().isoformat()
updateExpressions=[]
attributeValues={}
if 'deviceName' in deviceData:
updateExpressions.append("deviceName = :n")
attributeValues[':n'] = deviceData['deviceName']
if 'deviceType' in deviceData:
updateExpressions.append("deviceType = :t")
attributeValues[':t'] = deviceData['deviceType']
if 'roomID' in deviceData:
updateExpressions.append("roomID = :r")
attributeValues[':r'] = deviceData['roomID']
print
if len(updateExpressions) < 1:
#error if not updating anything
raise ParameterException(400, "Not updating any properties.")
#update time
updateExpressions.append("updated_at = :u")
attributeValues[':u'] = datetime.now().isoformat()
updateExpressionStr = "set "+(",".join(updateExpressions))
print(updateExpressionStr)
print(attributeValues)
result = devices_table().update_item(
Key={'uuid': deviceID},
UpdateExpression=updateExpressionStr,
ExpressionAttributeValues=attributeValues)
response = {
"isBase64Encoded": "false",
"statusCode": 200,
"body": "{\"message\": \"Device updated\"}"
}
return response
#Delete Device
def delete_device(deviceID):
print("Deleting Device")
delete_device_parameters(deviceID)
devices_table().delete_item(Key={'uuid': deviceID})
response = {
"isBase64Encoded": "false",
"statusCode": 200,
"body": "{\"message\": \"Device and its parameters Deleted.\"}"
}
return response
#Helper Methods
def get_deviceID(event):
if event['pathParameters'] and 'deviceID' in event['pathParameters']:
return event['pathParameters']['deviceID']
else:
return None
def load_device(uuid):
print("Getting Device")
response = devices_table().query(KeyConditionExpression=Key('uuid').eq(uuid))
if len(response['Items'])==1:
return response['Items'][0]
else:
return None
def create_parameters(parameters, deviceID):
print("Creating device parameters")
#Validate Parameters
for parameter in parameters:
if not ('paramName' in parameter and 'paramType' in parameter and 'paramActions' in parameter):
raise ParameterException(400, "Invalid Parameter: Device Parameters do not include all required fields. Need paramName, paramType, paramActions")
parameters_table = get_table_ref('PARAMETERS')
nowtime = datetime.now().isoformat()
newParams = []
with parameters_table.batch_writer() as batch:
for parameter in parameters:
uid = uuid().hex
paramItem = {
'uuid': uid,
'deviceID': deviceID,
'paramName': parameter['paramName'],
'paramType': parameter['paramType'],
'paramActions': parameter['paramActions'],
'created_at': nowtime,
'updated_at': nowtime
}
batch.put_item(Item=paramItem)
newParams.append(paramItem)
return newParams
def get_parameters(deviceID):
print("Getting Parameters")
ptable = params_table()
result = ptable.scan(FilterExpression=Key('deviceID').eq(deviceID))
parameters = result['Items']
while 'LastEvaluateKey' in result:
result = ptable.scan(ExclusiveStartKey=result['LastEvaluateKey'])
parameters += result['Items']
return parameters
def delete_device_parameters(deviceID):
print("Deleting Parameters")
parameters_table = params_table()
#This is inefficient because we are scaning the full table, and then removing results.
#A query would be better, but then we would need a local secondary index.
#Since there will be a limited number of devices (<50), I am not worrying about it.
result = parameters_table.scan(FilterExpression=Key('deviceID').eq(deviceID))
parameters = result['Items']
while 'LastEvaluateKey' in result:
result = parameters_table.scan(ExclusiveStartKey=result['LastEvaluateKey'])
parameters += result['Items']
for parameter in parameters:
parameters_table.delete_item(Key={'uuid': parameter['uuid']})
def devices_table():
return get_table_ref('DEVICES')
def params_table():
return get_table_ref('PARAMETERS')
def lambda_handler(event, context):
print("Starting Devices Lambda Function")
try:
if event['httpMethod'] == "GET":
deviceID = get_deviceID(event)
if deviceID:
return get_device(deviceID)
else:
if event['pathParameters'] and 'roomID' in event['pathParameters']:
return get_room_devices(event['pathParameters']['roomID'])
else:
return get_all_devices()
elif event['httpMethod'] == "POST":
roomID = event['pathParameters']['roomID'] if event['pathParameters'] and 'roomID' in event['pathParameters'] else None
return create_device(json.loads(event['body']),roomID)
elif event['httpMethod'] == "DELETE":
deviceID = get_deviceID(event)
return delete_device(deviceID)
elif event['httpMethod'] == 'PATCH':
deviceID = get_deviceID(event)
return update_device(deviceID,json.loads(event['body']))
except ParameterException as e:
response = {
"isBase64Encoded": "false",
"statusCode": e.args[0],
"body": "{\"errorMessage\": \""+e.args[1]+".\"}"
}
return response
except json.JSONDecodeError as e:
response = {
"isBase64Encoded": "false",
"statusCode": 400,
"body": "{\"errorMessage\": \"Malformed JSON: "+e.args[0]+"\"}"
}
return response | mit | -7,731,628,886,154,534,000 | 30.146953 | 151 | 0.659454 | false |
room77/py77 | pylib/monitor/haproxy/conditions.py | 1 | 1403 | """
Conditions for sending alert emails for HAProxy Monitor
A condition is a callable that takes a ProxyStats object and
returns an error string or None for success
"""
from datetime import datetime
__copyright__ = '2013, Room 77, Inc.'
__author__ = 'Kyle Konrad <[email protected]>'
def more_than_n_down(n):
"""
Args:
n (int): number of instances down to trigger an alert
"""
def func(stats):
if stats.down_count > n:
return '%d of %d %s instances are down' % (
stats.down_count, stats.down_count + stats.up_count, stats.name)
return func
def more_than_proportion_down(prop):
"""
Args:
prop (float): proportion of instance down to trigger an alert
"""
def func(stats):
total_count = stats.down_count + stats.up_count
if total_count and stats.down_count // total_count > prop:
return '%d of %d %s instances are down' % (
stats.down_count, stats.down_count + stats.up_count, stats.name)
return func
def any_down_longer_than(duration):
"""
Args:
duration (timedelta): duration of instance down to trigger an alert
"""
def func(stats):
now = datetime.utcnow()
for server, last_up in list(stats.last_up_times.items()):
downtime = now - last_up
if downtime > duration:
return '%s on %s has been down for %d minutes' % (
server, stats.name, downtime.total_seconds() // 60)
return func
| mit | -4,863,717,034,792,221,000 | 28.229167 | 72 | 0.655025 | false |
kernelci/kernelci-backend | app/handlers/tests/test_upload_handler.py | 1 | 3108 | # Copyright (C) Linaro Limited 2015
# Author: Milo Casagrande <[email protected]>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Test module for the UploadHandler."""
import tornado
import urls
from handlers.tests.test_handler_base import TestHandlerBase
class TestUploadHandler(TestHandlerBase):
def get_app(self):
return tornado.web.Application([urls._UPLOAD_URL], **self.settings)
def test_get(self):
headers = {"Authorization": "foo"}
response = self.fetch(
"/upload", method="GET", headers=headers)
self.assertEqual(response.code, 501)
self.assertEqual(
response.headers["Content-Type"], self.content_type)
def test_get_no_token(self):
response = self.fetch("/upload", method="GET")
self.assertEqual(response.code, 403)
self.assertEqual(
response.headers["Content-Type"], self.content_type)
def test_delete(self):
headers = {"Authorization": "foo"}
response = self.fetch(
"/upload", method="DELETE", headers=headers)
self.assertEqual(response.code, 501)
self.assertEqual(
response.headers["Content-Type"], self.content_type)
def test_delete_no_token(self):
response = self.fetch("/upload", method="DELETE")
self.assertEqual(response.code, 403)
self.assertEqual(
response.headers["Content-Type"], self.content_type)
def test_post_no_token(self):
response = self.fetch("/upload", method="POST", body="")
self.assertEqual(response.code, 403)
self.assertEqual(
response.headers["Content-Type"], self.content_type)
def test_post_token_wrong_content(self):
headers = {
"Authorization": "foo",
"Content-Type": "application/json"
}
response = self.fetch(
"/upload", method="POST", body="", headers=headers)
self.assertEqual(response.code, 415)
self.assertEqual(
response.headers["Content-Type"], self.content_type)
def test_post_token_missing_content(self):
headers = {
"Authorization": "foo"
}
response = self.fetch(
"/upload", method="POST", body="", headers=headers)
self.assertEqual(response.code, 415)
self.assertEqual(
response.headers["Content-Type"], self.content_type)
| lgpl-2.1 | -8,388,358,542,386,236,000 | 34.724138 | 79 | 0.651223 | false |
PrognosisML/ConvNet-Prostate | Malignancy Diagnosis/Process.py | 1 | 4723 | import os
import sys
import tensorflow as tf
import Input
import os, re
FLAGS = tf.app.flags.FLAGS
TOWER_NAME = 'tower'
tf.app.flags.DEFINE_integer('batch_size', 1, "hello")
def _activation_summary(x):
with tf.device('/cpu:0'):
tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
tf.summary.histogram(tensor_name + '/activations', x)
tf.summary.scalar(tensor_name + '/sparsity', tf.nn.zero_fraction(x))
def inputs():
images, labels = Input.inputs(batch_size = FLAGS.batch_size)
return images, labels
def eval_inputs():
data_dir = 'VALIDATION'
images, labels = Input.eval_inputs(data_dir = data_dir, batch_size = 1)
return images, labels
def weight_variable(name, shape):
with tf.device('/gpu:0'):
initial = tf.random_normal(shape, stddev=0.035)
var = tf.Variable(initial, name)
return var
def bias_variable(shape):
with tf.device('/cpu:0'):
initial = tf.constant(0.1, shape = shape)
return tf.Variable(initial)
def conv(images, W):
return tf.nn.conv2d(images, W, strides = [1, 1, 1, 1], padding = 'SAME')
def forward_propagation(images, dropout_value):
with tf.variable_scope("conv1"):
with tf.device('/gpu:0'):
conv1_feature = weight_variable('conv1_feature', [11, 11, 3, 10])
conv1_bias = bias_variable([10])
image_matrix = tf.reshape(images, [-1, 200, 200, 3])
conv1_result = tf.nn.relu(conv(image_matrix, conv1_feature) + conv1_bias)
_activation_summary(conv1_result)
with tf.device('/cpu:0'):
kernel_transposed = tf.transpose(conv1_feature, [3, 0, 1, 2])
tf.summary.image('conv1/filters', kernel_transposed, max_outputs=10)
conv1_pool = tf.nn.max_pool(conv1_result, ksize = [1, 2, 2, 1], strides = [1, 2, 2, 1], padding = 'SAME')
with tf.variable_scope("conv2"):
conv2_feature = weight_variable('conv2_feature', [3, 3, 10, 20])
conv2_bias = bias_variable([20])
conv2_result = tf.nn.relu(conv(conv1_pool, conv2_feature) + conv2_bias)
_activation_summary(conv2_result)
conv2_pool = tf.nn.max_pool(conv2_result, ksize = [1, 2, 2, 1], strides = [1, 2, 2, 1], padding = 'SAME')
with tf.variable_scope("conv3"):
conv3_feature = weight_variable('conv3_feature', [3, 3, 20, 30])
conv3_bias = bias_variable([30])
conv3_result = tf.nn.relu(conv(conv2_pool, conv3_feature) + conv3_bias)
_activation_summary(conv3_result)
conv3_pool = tf.nn.max_pool(conv3_result, ksize = [1, 2, 2, 1], strides = [1, 2, 2, 1], padding = 'SAME')
with tf.variable_scope("conv4"):
conv4_feature = weight_variable('conv4_feature', [3, 3, 30, 30])
conv4_bias = bias_variable([30])
conv4_result = tf.nn.relu(conv(conv3_pool, conv4_feature) + conv4_bias)
with tf.variable_scope("conv5"):
conv5_feature = weight_variable('conv5_feature', [3, 3, 30, 15])
conv5_bias = bias_variable([15])
conv5_result = tf.nn.relu(conv(conv4_result, conv5_feature) + conv5_bias)
with tf.variable_scope("fcl"):
perceptron1_weight = weight_variable('perceptron1_weight', [25 * 25 * 15, 25 * 25 * 15])
perceptron1_bias = bias_variable([25 * 25 * 15])
flatten_dense_connect = tf.reshape(conv5_result, [-1, 25 * 25 * 15])
compute_perceptron1_layer = tf.nn.relu(tf.matmul(flatten_dense_connect, perceptron1_weight) + perceptron1_bias)
dropout = tf.nn.dropout(compute_perceptron1_layer, dropout_value)
_activation_summary(compute_perceptron1_layer)
perceptron2_weight = weight_variable('perceptron2_weight', [25 * 25 * 15, 4])
perceptron2_bias = bias_variable([4])
result1 = tf.matmul(dropout, perceptron2_weight) + perceptron2_bias
_activation_summary(result1)
return result1
def error(forward_propagation_results, labels):
with tf.device('/cpu:0'):
labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=forward_propagation_results, labels=labels)
cost = tf.reduce_mean(cross_entropy)
tf.add_to_collection('loss', cost)
total_loss = tf.add_n(tf.get_collection('loss'), name='total_loss')
_activation_summary(total_loss)
return total_loss
def train(cost):
global_step = tf.Variable(0, trainable=False)
starter_learning_rate = 0.001
rate = tf.train.exponential_decay(starter_learning_rate, global_step, 100000, 0.95, staircase=True)
train_loss = tf.train.GradientDescentOptimizer(learning_rate = rate).minimize(cost, global_step=global_step)
return train_loss
| lgpl-3.0 | 7,402,761,261,317,179,000 | 40.069565 | 121 | 0.635613 | false |
hbp-brain-charting/public_protocols | spatial_navigation/protocol/py/genRndSequences.py | 1 | 2221 | # _*_ coding: utf-8 _*_
import itertools
import random
from csv_io import write_to_csv as writeToFile
import cPickle
'''
Sequences will be generated as lists, containing all possible combinations of intersections (4), directions (4) and targets (2)
The sequences are encoded as follows: (intersection, direction (starting point), target); see defineOptions for the corresponding positions of each number
'''
numSeq = 50
# experimental condition
sequences_exp = ()
while len(sequences_exp) <= numSeq:
seqFound = False
i = 1
print 'Start generating exp sequence...'
while seqFound == False:
sequence = list(itertools.product(range(4),range(4),range(2))) * 3
random.shuffle(sequence)
duplFound = False
i += 1
for first, sec, third, fourth in zip(sequence, sequence[1:], sequence[2:],sequence[3:]):
if first[0:2] == sec[0:2] or first [0:2] == third[0:2] or first[0:3] == sec[0:3] or first [0:3] == third[0:3] or first[0:3] == fourth[0:3]:
duplFound = True
break
# if no adjacent duplicates are found, exit while loop
if duplFound == False:
seqFound = True
sequences_exp = sequences_exp + (sequence, )
writeToFile(sequence,'sequences_exp.csv')
print 'Done..., it took ', i
print len(sequences_exp)
output_exp = open('sequences_exp.pkl', 'wb')
cPickle.dump(sequences_exp,output_exp)
output_exp.close()
# control condition
sequences_ctrl = ()
while len(sequences_ctrl) <= numSeq:
seqFound = False
i = 1
print 'Start generating ctrl sequence...'
while seqFound == False:
sequence = list(itertools.product(range(4),range(4),range(4)))
random.shuffle(sequence)
duplFound = False
i += 1
for first, sec, third in zip(sequence, sequence[1:], sequence[2:]):
if first[0:2] == sec[0:2] or first [0:2] == third[0:2]:
duplFound = True
break
# if no adjacent duplicates are found, exit while loop
if duplFound == False:
seqFound = True
sequences_ctrl = sequences_ctrl + (sequence, )
writeToFile(sequence,'sequences_ctrl.csv')
print 'Done..., it took ', i
print len(sequences_ctrl)
output_ctrl = open('sequences_ctrl.pkl', 'wb')
cPickle.dump(sequences_ctrl,output_ctrl)
output_ctrl.close()
| bsd-3-clause | 7,523,443,422,153,598,000 | 21.642857 | 154 | 0.673727 | false |
ntymtsiv/tempest | tempest/tests/test_compute_xml_common.py | 1 | 2919 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
from tempest.services.compute.xml import common
from tempest.tests import base
class TestXMLParser(base.TestCase):
def test_xml_to_json_parser_bool_value(self):
node = etree.fromstring('''<health_monitor
xmlns="http://openstack.org/quantum/api/v2.0"
xmlns:quantum="http://openstack.org/quantum/api/v2.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<admin_state_up quantum:type="bool">False</admin_state_up>
<fake_state_up quantum:type="bool">True</fake_state_up>
</health_monitor>''')
body = common.xml_to_json(node)
self.assertEqual(body['admin_state_up'], False)
self.assertEqual(body['fake_state_up'], True)
def test_xml_to_json_parser_int_value(self):
node = etree.fromstring('''<health_monitor
xmlns="http://openstack.org/quantum/api/v2.0"
xmlns:quantum="http://openstack.org/quantum/api/v2.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<delay quantum:type="long">4</delay>
<max_retries quantum:type="int">3</max_retries>
</health_monitor>''')
body = common.xml_to_json(node)
self.assertEqual(body['delay'], 4L)
self.assertEqual(body['max_retries'], 3)
def test_xml_to_json_parser_text_value(self):
node = etree.fromstring('''<health_monitor
xmlns="http://openstack.org/quantum/api/v2.0"
xmlns:quantum="http://openstack.org/quantum/api/v2.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<status>ACTIVE</status>
</health_monitor>''')
body = common.xml_to_json(node)
self.assertEqual(body['status'], 'ACTIVE')
def test_xml_to_json_parser_list_as_value(self):
node = etree.fromstring('''<health_monitor
xmlns="http://openstack.org/quantum/api/v2.0"
xmlns:quantum="http://openstack.org/quantum/api/v2.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<elements>
<element>first_element</element>
<element>second_element</element>
</elements>
</health_monitor>''')
body = common.xml_to_json(node, 'elements')
self.assertEqual(body['elements'], ['first_element', 'second_element'])
| apache-2.0 | 8,565,960,721,395,748,000 | 42.567164 | 79 | 0.643714 | false |
yusufm/mobly | mobly/records.py | 1 | 11891 | #!/usr/bin/env python3.4
#
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module is where all the record definitions and record containers live.
"""
import json
import logging
import pprint
from mobly import signals
from mobly import utils
class TestResultEnums(object):
"""Enums used for TestResultRecord class.
Includes the tokens to mark test result with, and the string names for each
field in TestResultRecord.
"""
RECORD_NAME = 'Test Name'
RECORD_CLASS = 'Test Class'
RECORD_BEGIN_TIME = 'Begin Time'
RECORD_END_TIME = 'End Time'
RECORD_RESULT = 'Result'
RECORD_UID = 'UID'
RECORD_EXTRAS = 'Extras'
RECORD_EXTRA_ERRORS = 'Extra Errors'
RECORD_DETAILS = 'Details'
TEST_RESULT_PASS = 'PASS'
TEST_RESULT_FAIL = 'FAIL'
TEST_RESULT_SKIP = 'SKIP'
TEST_RESULT_ERROR = 'ERROR'
class TestResultRecord(object):
"""A record that holds the information of a test case execution.
Attributes:
test_name: A string representing the name of the test case.
begin_time: Epoch timestamp of when the test case started.
end_time: Epoch timestamp of when the test case ended.
self.uid: Unique identifier of a test case.
self.result: Test result, PASS/FAIL/SKIP.
self.extras: User defined extra information of the test result.
self.details: A string explaining the details of the test case.
"""
def __init__(self, t_name, t_class=None):
self.test_name = t_name
self.test_class = t_class
self.begin_time = None
self.end_time = None
self.uid = None
self.result = None
self.extras = None
self.details = None
self.extra_errors = {}
def test_begin(self):
"""Call this when the test case it records begins execution.
Sets the begin_time of this record.
"""
self.begin_time = utils.get_current_epoch_time()
def _test_end(self, result, e):
"""Class internal function to signal the end of a test case execution.
Args:
result: One of the TEST_RESULT enums in TestResultEnums.
e: A test termination signal (usually an exception object). It can
be any exception instance or of any subclass of
mobly.signals.TestSignal.
"""
self.end_time = utils.get_current_epoch_time()
self.result = result
if self.extra_errors:
self.result = TestResultEnums.TEST_RESULT_ERROR
if isinstance(e, signals.TestSignal):
self.details = e.details
self.extras = e.extras
elif e:
self.details = str(e)
def test_pass(self, e=None):
"""To mark the test as passed in this record.
Args:
e: An instance of mobly.signals.TestPass.
"""
self._test_end(TestResultEnums.TEST_RESULT_PASS, e)
def test_fail(self, e=None):
"""To mark the test as failed in this record.
Only test_fail does instance check because we want 'assert xxx' to also
fail the test same way assert_true does.
Args:
e: An exception object. It can be an instance of AssertionError or
mobly.base_test.TestFailure.
"""
self._test_end(TestResultEnums.TEST_RESULT_FAIL, e)
def test_skip(self, e=None):
"""To mark the test as skipped in this record.
Args:
e: An instance of mobly.signals.TestSkip.
"""
self._test_end(TestResultEnums.TEST_RESULT_SKIP, e)
def test_error(self, e=None):
"""To mark the test as error in this record.
Args:
e: An exception object.
"""
self._test_end(TestResultEnums.TEST_RESULT_ERROR, e)
def add_error(self, tag, e):
"""Add extra error happened during a test mark the test result as
ERROR.
If an error is added the test record, the record's result is equivalent
to the case where an uncaught exception happened.
Args:
tag: A string describing where this error came from, e.g. 'on_pass'.
e: An exception object.
"""
self.result = TestResultEnums.TEST_RESULT_ERROR
self.extra_errors[tag] = str(e)
def __str__(self):
d = self.to_dict()
l = ['%s = %s' % (k, v) for k, v in d.items()]
s = ', '.join(l)
return s
def __repr__(self):
"""This returns a short string representation of the test record."""
t = utils.epoch_to_human_time(self.begin_time)
return '%s %s %s' % (t, self.test_name, self.result)
def to_dict(self):
"""Gets a dictionary representating the content of this class.
Returns:
A dictionary representating the content of this class.
"""
d = {}
d[TestResultEnums.RECORD_NAME] = self.test_name
d[TestResultEnums.RECORD_CLASS] = self.test_class
d[TestResultEnums.RECORD_BEGIN_TIME] = self.begin_time
d[TestResultEnums.RECORD_END_TIME] = self.end_time
d[TestResultEnums.RECORD_RESULT] = self.result
d[TestResultEnums.RECORD_UID] = self.uid
d[TestResultEnums.RECORD_EXTRAS] = self.extras
d[TestResultEnums.RECORD_DETAILS] = self.details
d[TestResultEnums.RECORD_EXTRA_ERRORS] = self.extra_errors
return d
def json_str(self):
"""Converts this test record to a string in json format.
Format of the json string is:
{
'Test Name': <test name>,
'Begin Time': <epoch timestamp>,
'Details': <details>,
...
}
Returns:
A json-format string representing the test record.
"""
return json.dumps(self.to_dict())
class TestResult(object):
"""A class that contains metrics of a test run.
This class is essentially a container of TestResultRecord objects.
Attributes:
self.requested: A list of strings, each is the name of a test requested
by user.
self.failed: A list of records for tests failed.
self.executed: A list of records for tests that were actually executed.
self.passed: A list of records for tests passed.
self.skipped: A list of records for tests skipped.
self.error: A list of records for tests with error result token.
"""
def __init__(self):
self.requested = []
self.failed = []
self.executed = []
self.passed = []
self.skipped = []
self.error = []
self.controller_info = {}
def __add__(self, r):
"""Overrides '+' operator for TestResult class.
The add operator merges two TestResult objects by concatenating all of
their lists together.
Args:
r: another instance of TestResult to be added
Returns:
A TestResult instance that's the sum of two TestResult instances.
"""
if not isinstance(r, TestResult):
raise TypeError('Operand %s of type %s is not a TestResult.' %
(r, type(r)))
sum_result = TestResult()
for name in sum_result.__dict__:
r_value = getattr(r, name)
l_value = getattr(self, name)
if isinstance(r_value, list):
setattr(sum_result, name, l_value + r_value)
elif isinstance(r_value, dict):
# '+' operator for TestResult is only valid when multiple
# TestResult objs were created in the same test run, which means
# the controller info would be the same across all of them.
# TODO(angli): have a better way to validate this situation.
setattr(sum_result, name, l_value)
return sum_result
def add_record(self, record):
"""Adds a test record to test result.
A record is considered executed once it's added to the test result.
Args:
record: A test record object to add.
"""
self.executed.append(record)
if record.result == TestResultEnums.TEST_RESULT_FAIL:
self.failed.append(record)
elif record.result == TestResultEnums.TEST_RESULT_SKIP:
self.skipped.append(record)
elif record.result == TestResultEnums.TEST_RESULT_PASS:
self.passed.append(record)
else:
self.error.append(record)
def add_controller_info(self, name, info):
try:
json.dumps(info)
except TypeError:
logging.warning('Controller info for %s is not JSON serializable!'
' Coercing it to string.' % name)
self.controller_info[name] = str(info)
return
self.controller_info[name] = info
def fail_class(self, test_record):
"""Add a record to indicate a test class setup has failed and no test
in the class was executed.
Args:
test_record: A TestResultRecord object for the test class.
"""
self.executed.append(test_record)
self.failed.append(test_record)
@property
def is_all_pass(self):
"""True if no tests failed or threw errors, False otherwise."""
num_of_failures = len(self.failed) + len(self.error)
if num_of_failures == 0:
return True
return False
def json_str(self):
"""Converts this test result to a string in json format.
Format of the json string is:
{
'Results': [
{<executed test record 1>},
{<executed test record 2>},
...
],
'Summary': <summary dict>
}
Returns:
A json-format string representing the test results.
"""
d = {}
d['ControllerInfo'] = self.controller_info
d['Results'] = [record.to_dict() for record in self.executed]
d['Summary'] = self.summary_dict()
json_str = json.dumps(d, indent=4, sort_keys=True)
return json_str
def summary_str(self):
"""Gets a string that summarizes the stats of this test result.
The summary rovides the counts of how many test cases fall into each
category, like 'Passed', 'Failed' etc.
Format of the string is:
Requested <int>, Executed <int>, ...
Returns:
A summary string of this test result.
"""
l = ['%s %d' % (k, v) for k, v in self.summary_dict().items()]
# Sort the list so the order is the same every time.
msg = ', '.join(sorted(l))
return msg
def summary_dict(self):
"""Gets a dictionary that summarizes the stats of this test result.
The summary rovides the counts of how many test cases fall into each
category, like 'Passed', 'Failed' etc.
Returns:
A dictionary with the stats of this test result.
"""
d = {}
d['Requested'] = len(self.requested)
d['Executed'] = len(self.executed)
d['Passed'] = len(self.passed)
d['Failed'] = len(self.failed)
d['Skipped'] = len(self.skipped)
d['Error'] = len(self.error)
return d
| apache-2.0 | 3,069,121,730,700,974,600 | 32.877493 | 80 | 0.593558 | false |
DayGitH/Python-Challenges | DailyProgrammer/DP20170929C.py | 1 | 2063 | """
[2017-09-29] Challenge #333 [Hard] Build a Web API-driven Data Site
https://www.reddit.com/r/dailyprogrammer/comments/739j8c/20170929_challenge_333_hard_build_a_web_apidriven/
# Description
A common theme in present-day programming are web APIs. We've had a previous challenge where you had to _consume_ an
API, today's challenge is to _implement_ one. Today's is relatively simple: a single CSV file as input that can
probably be represented by a single database table.
Your solution may use whatever technologies you wish to build on:
* Web server software, e.g. Flask, Rails, Play!, etc
* Database software, e.g. MySQL, MongoDB, etc - or none, using a database is optional
* Database interaction layer, e.g. SQLAlchemy, ActiveRecord, Ecto, etc
This challenge focuses less on the guts of the server and more on routing requests, transforming a request into a data
extraction method, and returning those results.
Today's challenge will utilize the State of Iowa - Monthly Voter Registration Totals by County data set:
https://data.iowa.gov/Communities-People/State-of-Iowa-Monthly-Voter-Registration-Totals-by/cp55-uurs
Download the JSON, CSV or other and use that as your input. It contains 19 columns and over 20,000 rows. Now expose the
data via a web API.
Your solution **must** implement the following API behaviors:
* A "get_voters_where" endpoint that takes the following optional arguments: county, month, party affiliation,
active_status, and limit (the max number of results to return). The endpoint must return a JSON-formatted output, but
the schema is up to you.
* All APIs must be RESTful (see [The REST API in five minutes](https://developer.marklogic.com/try/rest/index) for some
background if you need it).
This challenge extends Wednesday's idea of practicality and real world scenarios. Wednesday was some basic data
science, today is some basic application development. It's open ended.
# Bonus
Ensure your API is immune to attack vectors like SQL injection.
"""
def main():
pass
if __name__ == "__main__":
main()
| mit | 5,035,554,185,857,980,000 | 53.289474 | 119 | 0.768299 | false |
Asurada2015/TFAPI_translation | NeuralNekworks_function/Activefunction/tf_sigmoid.py | 1 | 1199 | """tf.sigmoid(x, name = None)
解释:这个函数的作用是计算 x 的 sigmoid 函数。具体计算公式为 y = 1 / (1 + exp(-x))。
函数的返回值位于区间[0.0 , 1.0]中,当输入值较大时,tf.sigmoid将返回一个接近于1.0的值,而当输入值较小时,返回值将接近于0.0.
对于在真实输出位于[0.0,1.0]的样本上训练的神经网络,sigmoid函数可将输出保持在[0.0,1.0]内的能力非常有用.
当输出接近于饱和或者剧烈变化是,对输出返回的这种缩减会带来一些不利影响.
当输入为0时,sigmoid函数的输出为0.5,即sigmoid函数值域的中间点
使用例子:"""
import tensorflow as tf
a = tf.constant([[-1.0, -2.0], [1.0, 2.0], [0.0, 0.0]])
sess = tf.Session()
print(sess.run(tf.sigmoid(a)))
# [[ 0.26894143 0.11920292]
# [ 0.7310586 0.88079703]
# [ 0.5 0.5 ]]
"""
输入参数:
● x: 一个Tensor。数据类型必须是float,double,int32,complex64,int64或者qint32。
● name: (可选)为这个操作取一个名字。
输出参数:
● 一个Tensor,如果 x.dtype != qint32 ,那么返回的数据类型和x相同,否则返回的数据类型是 quint8 。"""
| apache-2.0 | 2,380,927,556,128,051,000 | 30.347826 | 75 | 0.676838 | false |
willingc/succulent-pups | config/settings/production.py | 1 | 5159 | # -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use Amazon's S3 for storing static files and uploaded media
- Use mailgun to send emails
- Use Redis on Heroku
'''
from __future__ import absolute_import, unicode_literals
from boto.s3.connection import OrdinaryCallingFormat
from django.utils import six
from .common import * # noqa
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANGO_SECRET_KEY not in os.environ
SECRET_KEY = env("DJANGO_SECRET_KEY")
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# django-secure
# ------------------------------------------------------------------------------
INSTALLED_APPS += ("djangosecure", )
SECURITY_MIDDLEWARE = (
'djangosecure.middleware.SecurityMiddleware',
)
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
MIDDLEWARE_CLASSES = SECURITY_MIDDLEWARE + MIDDLEWARE_CLASSES
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
"DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True)
SECURE_FRAME_DENY = env.bool("DJANGO_SECURE_FRAME_DENY", default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
"DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True)
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = False
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
INSTALLED_APPS += ("gunicorn", )
# STORAGE CONFIGURATION
# ------------------------------------------------------------------------------
# Uploaded Media Files
# ------------------------
# See: http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME')
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
AWS_S3_CALLING_FORMAT = OrdinaryCallingFormat()
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
# TODO See: https://github.com/jschneier/django-storages/issues/47
# Revert the following and use str after the above-mentioned bug is fixed in
# either django-storage-redux or boto
AWS_HEADERS = {
'Cache-Control': six.b('max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY))
}
# URL that handles the media served from MEDIA_ROOT, used for managing
# stored files.
MEDIA_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
# Static Assests
# ------------------------
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# EMAIL
# ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='succulent-pups <[email protected]>')
EMAIL_BACKEND = 'django_mailgun.MailgunBackend'
MAILGUN_ACCESS_KEY = env('DJANGO_MAILGUN_API_KEY')
MAILGUN_SERVER_NAME = env('DJANGO_MAILGUN_SERVER_NAME')
EMAIL_SUBJECT_PREFIX = env("DJANGO_EMAIL_SUBJECT_PREFIX", default='[succulent-pups] ')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See:
# https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]),
]
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
DATABASES['default'] = env.db("DATABASE_URL")
# CACHING
# ------------------------------------------------------------------------------
# Heroku URL does not pass the DB number, so we parse it in
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "{0}/{1}".format(env.cache_url('REDIS_URL', default="redis://127.0.0.1:6379"), 0),
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"IGNORE_EXCEPTIONS": True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
# Your production stuff: Below this line define 3rd party library settings
| bsd-3-clause | 3,801,262,370,812,564,500 | 36.115108 | 117 | 0.62706 | false |
InsightSoftwareConsortium/ITKExamples | src/Core/Common/ObserveAnEvent/Code.py | 1 | 1028 | #!/usr/bin/env python
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itk
Dimension = 2
PixelType = itk.UC
ImageType = itk.Image[PixelType, Dimension]
source = itk.GaussianImageSource[ImageType].New()
size = itk.Size[Dimension]()
size.Fill(128)
source.SetSize(size)
sigma = itk.FixedArray[itk.D, Dimension]()
sigma.Fill(45.0)
source.SetSigma(sigma)
def myCommand():
print("Progress: " + str(source.GetProgress()))
source.AddObserver(itk.ProgressEvent(), myCommand)
source.Update()
| apache-2.0 | 7,537,944,320,815,879,000 | 25.358974 | 74 | 0.747082 | false |
zasdfgbnm/tensorflow | tensorflow/python/estimator/util.py | 1 | 1811 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility to retrieve function args."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
def _is_bounded_method(fn):
_, fn = tf_decorator.unwrap(fn)
return tf_inspect.ismethod(fn) and (fn.__self__ is not None)
def _is_callable_object(obj):
return hasattr(obj, '__call__') and tf_inspect.ismethod(obj.__call__)
def fn_args(fn):
"""Get argument names for function-like object.
Args:
fn: Function, or function-like object (e.g., result of `functools.partial`).
Returns:
`tuple` of string argument names.
Raises:
ValueError: if partial function has positionally bound arguments
"""
if isinstance(fn, functools.partial):
args = fn_args(fn.func)
args = [a for a in args[len(fn.args):] if a not in (fn.keywords or [])]
else:
if _is_callable_object(fn):
fn = fn.__call__
args = tf_inspect.getfullargspec(fn).args
if _is_bounded_method(fn):
args.remove('self')
return tuple(args)
| apache-2.0 | -1,776,816,529,995,017,500 | 30.224138 | 80 | 0.680287 | false |
gkonstantyno/construct | construct/protocols/layer3/dhcpv4.py | 1 | 6429 | """
Dynamic Host Configuration Protocol for IPv4
http://www.networksorcery.com/enp/protocol/dhcp.htm
http://www.networksorcery.com/enp/protocol/bootp/options.htm
"""
from binascii import unhexlify
from construct import *
from ipv4 import IpAddress
dhcp_option = Struct("dhcp_option",
Enum(Byte("code"),
Pad = 0,
Subnet_Mask = 1,
Time_Offset = 2,
Router = 3,
Time_Server = 4,
Name_Server = 5,
Domain_Name_Server = 6,
Log_Server = 7,
Quote_Server = 8,
LPR_Server = 9,
Impress_Server = 10,
Resource_Location_Server = 11,
Host_Name = 12,
Boot_File_Size = 13,
Merit_Dump_File = 14,
Domain_Name = 15,
Swap_Server = 16,
Root_Path = 17,
Extensions_Path = 18,
IP_Forwarding_enabledisable = 19,
Nonlocal_Source_Routing_enabledisable = 20,
Policy_Filter = 21,
Maximum_Datagram_Reassembly_Size = 22,
Default_IP_TTL = 23,
Path_MTU_Aging_Timeout = 24,
Path_MTU_Plateau_Table = 25,
Interface_MTU = 26,
All_Subnets_are_Local = 27,
Broadcast_Address = 28,
Perform_Mask_Discovery = 29,
Mask_supplier = 30,
Perform_router_discovery = 31,
Router_solicitation_address = 32,
Static_routing_table = 33,
Trailer_encapsulation = 34,
ARP_cache_timeout = 35,
Ethernet_encapsulation = 36,
Default_TCP_TTL = 37,
TCP_keepalive_interval = 38,
TCP_keepalive_garbage = 39,
Network_Information_Service_domain = 40,
Network_Information_Servers = 41,
NTP_servers = 42,
Vendor_specific_information = 43,
NetBIOS_over_TCPIP_name_server = 44,
NetBIOS_over_TCPIP_Datagram_Distribution_Server = 45,
NetBIOS_over_TCPIP_Node_Type = 46,
NetBIOS_over_TCPIP_Scope = 47,
X_Window_System_Font_Server = 48,
X_Window_System_Display_Manager = 49,
Requested_IP_Address = 50,
IP_address_lease_time = 51,
Option_overload = 52,
DHCP_message_type = 53,
Server_identifier = 54,
Parameter_request_list = 55,
Message = 56,
Maximum_DHCP_message_size = 57,
Renew_time_value = 58,
Rebinding_time_value = 59,
Class_identifier = 60,
Client_identifier = 61,
NetWareIP_Domain_Name = 62,
NetWareIP_information = 63,
Network_Information_Service_Domain = 64,
Network_Information_Service_Servers = 65,
TFTP_server_name = 66,
Bootfile_name = 67,
Mobile_IP_Home_Agent = 68,
Simple_Mail_Transport_Protocol_Server = 69,
Post_Office_Protocol_Server = 70,
Network_News_Transport_Protocol_Server = 71,
Default_World_Wide_Web_Server = 72,
Default_Finger_Server = 73,
Default_Internet_Relay_Chat_Server = 74,
StreetTalk_Server = 75,
StreetTalk_Directory_Assistance_Server = 76,
User_Class_Information = 77,
SLP_Directory_Agent = 78,
SLP_Service_Scope = 79,
Rapid_Commit = 80,
Fully_Qualified_Domain_Name = 81,
Relay_Agent_Information = 82,
Internet_Storage_Name_Service = 83,
NDS_servers = 85,
NDS_tree_name = 86,
NDS_context = 87,
BCMCS_Controller_Domain_Name_list = 88,
BCMCS_Controller_IPv4_address_list = 89,
Authentication = 90,
Client_last_transaction_time = 91,
Associated_ip = 92,
Client_System_Architecture_Type = 93,
Client_Network_Interface_Identifier = 94,
Lightweight_Directory_Access_Protocol = 95,
Client_Machine_Identifier = 97,
Open_Group_User_Authentication = 98,
Autonomous_System_Number = 109,
NetInfo_Parent_Server_Address = 112,
NetInfo_Parent_Server_Tag = 113,
URL = 114,
Auto_Configure = 116,
Name_Service_Search = 117,
Subnet_Selection = 118,
DNS_domain_search_list = 119,
SIP_Servers_DHCP_Option = 120,
Classless_Static_Route_Option = 121,
CableLabs_Client_Configuration = 122,
GeoConf = 123,
),
Switch("value", lambda ctx: ctx.code,
{
# codes without any value
"Pad" : Pass,
},
# codes followed by length and value fields
default = Struct("value",
Byte("length"),
Field("data", lambda ctx: ctx.length),
)
)
)
dhcp_header = Struct("dhcp_header",
Enum(Byte("opcode"),
BootRequest = 1,
BootReply = 2,
),
Enum(Byte("hardware_type"),
Ethernet = 1,
Experimental_Ethernet = 2,
ProNET_Token_Ring = 4,
Chaos = 5,
IEEE_802 = 6,
ARCNET = 7,
Hyperchannel = 8,
Lanstar = 9,
),
Byte("hardware_address_length"),
Byte("hop_count"),
UBInt32("transaction_id"),
UBInt16("elapsed_time"),
BitStruct("flags",
Flag("boardcast"),
Padding(15),
),
IpAddress("client_addr"),
IpAddress("your_addr"),
IpAddress("server_addr"),
IpAddress("relay_addr"),
Bytes("client_hardware_addr", 16),
Bytes("server_host_name", 64),
Bytes("boot_filename", 128),
# BOOTP/DHCP options
# "The first four bytes contain the (decimal) values 99, 130, 83 and 99"
Const("magic", b"\x63\x82\x53\x63"),
Rename("options", OptionalGreedyRange(dhcp_option)),
)
if __name__ == "__main__":
test = unhexlify(
b"0101060167c05f5a00000000"
"0102030405060708090a0b0c"
"0d0e0f10"
"DEADBEEFBEEF"
"000000000000000000000000000000000000000000000000000000"
"000000000000000000000000000000000000000000000000000000"
"000000000000000000000000000000000000000000000000000000"
"000000000000000000000000000000000000000000000000000000"
"000000000000000000000000000000000000000000000000000000"
"000000000000000000000000000000000000000000000000000000"
"000000000000000000000000000000000000000000000000000000"
"00000000000000000000000000"
"63825363"
"3501083d0701DEADBEEFBEEF0c04417375733c084d53465420352e"
"30370d010f03062c2e2f1f2179f92bfc52210117566c616e333338"
"382b45746865726e6574312f302f32340206f8f0827348f9ff"
)
print(dhcp_header.parse(test))
| mit | -2,805,672,608,556,438,000 | 31.969231 | 76 | 0.605226 | false |
gtalarico/pyrevitplus | pyRevitPlus.tab/Smart Align.panel/smartalign.stack/Lib/smartalign/distribute.py | 1 | 3365 | """
Smart Align
Provides Aligning functionality for various Revit Objects.
TESTED REVIT API: 2015 | 2016
Copyright (c) 2014-2016 Gui Talarico
github.com/gtalarico | @gtalarico
This script is part of PyRevitPlus: Extensions for PyRevit
github.com/gtalarico | @gtalarico
--------------------------------------------------------
PyRevit Notice:
Copyright (c) 2014-2016 Ehsan Iran-Nejad
pyRevit: repository at https://github.com/eirannejad/pyRevit
"""
__author__ = '@gtalarico'
__version = '0.4.0'
import sys
import os
sys.path.append(os.path.dirname(__file__))
from Autodesk.Revit.DB import XYZ
from Autodesk.Revit.DB import Transaction
from core import logger
from core import Align
from core import PointElement, PointCollection, BoundingBoxElement
from core import get_location, get_selected_elements, move_element
from core import TOLERANCE
doc = __revit__.ActiveUIDocument.Document
uidoc = __revit__.ActiveUIDocument
def get_division_steps(delta, qty_items):
"""ADD DOC: Move to Point Collection"""
step = abs(delta/(qty_items-1))
steps = []
for i in range(0, qty_items):
steps.append(i*step)
logger.debug('Step is: {}'.format(step))
return steps
def main(ALIGN):
""" ADD DOCS
"""
align_axis = Align.axis[ALIGN]
align_method = Align.method[ALIGN]
logger.info('Align Class: {}'.format(ALIGN))
logger.debug('Align Axis: {}'.format(align_axis))
logger.debug('Align Methid: {}'.format(align_method))
elements = get_selected_elements()
point_collection = PointCollection()
for element in elements:
point_element = get_location(element, align_method)
if point_element:
point_element.element = element
point_collection.points.append(point_element)
point_collection.sort_points(align_axis)
qty_items = len(point_collection)
min_target = getattr(point_collection, 'min')
max_target = getattr(point_collection, 'max')
delta = getattr(max_target, align_axis) - getattr(min_target, align_axis)
steps = get_division_steps(delta, qty_items)
target_locations = [ getattr(min_target, align_axis) + step for step in steps]
logger.debug('Min Location Target is: {}'.format(min_target))
logger.debug('Max Location Target is: {}'.format(max_target))
logger.debug('delta is: {}'.format(str(delta)))
logger.debug('steps: {}'.format(steps))
logger.debug('targer_locations: {}'.format(target_locations))
t = Transaction(doc, 'Smart Align - Distribute')
t.Start()
for point_element, target_location in zip(point_collection, target_locations):
current_location = getattr(point_element, align_axis)
delta = current_location - target_location
delta_vector = PointElement(0, 0, 0)
setattr(delta_vector, align_axis,-delta)
translation = XYZ(*delta_vector.as_tuple)
move_element(point_element.element, translation)
logger.debug('current: {}'.format(current_location))
logger.debug('target: {}'.format(target_location))
logger.debug('delta: {}'.format(delta))
logger.debug('delta_vector: {}'.format(delta_vector))
logger.debug('Translation: {}'.format(str(translation)))
logger.info('Done.')
t.Commit()
| gpl-3.0 | -8,550,756,224,317,934,000 | 31.316832 | 82 | 0.653492 | false |
ludoo/wpkit | attic/wpfrontman/wp_frontman/tests/test_cache.py | 1 | 1227 | import unittest
import time
from django.core.cache import cache
from wp_frontman.cache import get_key, get_object_key, get_object_value, set_object_value, cache_timestamps
class DummyObject(object):
def __init__(self, id):
self.id = id
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
class CacheTestCase(unittest.TestCase):
def setUp(self):
cache.clear()
def testNotDummyCache(self):
self.assertNotEqual(repr(type(cache)), "<class 'django.core.cache.backends.dummy.CacheClass'>")
def testObjectCache(self):
obj = DummyObject(5)
obj_key = get_object_key(1, 'post', obj)
self.assertTrue(not get_object_value(1, obj_key, ('post', 'comment_post')))
set_object_value(obj_key, obj)
self.assertEqual(get_object_value(1, obj_key, ('post', 'comment_post')), obj)
cache_timestamps(1, 'comment', dict(id=1000, post_id=5), time.time())
self.assertEqual(get_object_value(1, obj_key, ('post', 'comment_post')), None)
| bsd-3-clause | 3,687,988,769,116,285,400 | 29.7 | 107 | 0.607172 | false |
boknilev/diacritization | extract_data.py | 1 | 7629 | __author__ = 'belinkov'
import sys
import os
import codecs
import re
from data_utils import DIACS, REGEX_DIACS
REGEX_SOLUTION_DIAC = re.compile(r'\((.+?)\)') # for gold diacritized word
class WordAnalysis(object):
"""
A simplified pos analysis from treebank pos/before-treebank files.
Attributes:
input_string (str): INPUT STRING from LDC file
lookup_word (str): LOOK-UP WORD from LDC file (if exists)
comment (str): Comment from LDC file
index (str): INDEX from LDC file
gold_solution (str): the gold * SOLUTION from LDC file
word (str): for Arabic words, same as lookup_word with diacritics removed;
for non-Arabic words, same as input_string
word_diac (str): for Arabic words, the diacritized lookup_word from gold_solution;
for non-Arabic words, same as input_string
"""
def __init__(self, input_string, comment, index, gold_solution=None, lookup_word=None):
self.input_string = input_string
self.comment = comment
self.index = index
self.gold_solution = gold_solution
self.lookup_word = lookup_word
# if this is an Arabic script word
if lookup_word:
self.word = REGEX_DIACS.sub('', lookup_word)
if gold_solution:
match = REGEX_SOLUTION_DIAC.match(gold_solution)
if not match:
sys.stderr.write('Warning: could not find diacritized solution in: ' + gold_solution + '. ' + \
'Writing lookup word as is: ' + lookup_word + '\n')
self.word_diac = lookup_word
else:
self.word_diac = match.groups()[0]
self.check_match()
# there may be no solution if the word is unknown, so just write the lookup word
else:
self.word_diac = lookup_word
# this is a non-Arabic script word
else:
# TODO consider marking as Lating words (and exclude later)
self.word = input_string
self.word_diac = input_string
def check_match(self):
"""
Check match between word and word_diac
"""
if REGEX_DIACS.sub('', self.word_diac) != self.word:
sys.stderr.write('Warning: word ' + self.word + ' != word_diac ' + self.word_diac + \
' after removing diacritics. Attempting to correct\n')
self.unnormalize()
if REGEX_DIACS.sub('', self.word_diac) != self.word:
sys.stderr.write('Warning: could not correct, word ' + self.word + ' != word_diac ' + \
self.word_diac + '. Using undiacritized word_diac as word.\n')
self.word = REGEX_DIACS.sub('', self.word_diac)
if REGEX_DIACS.sub('', self.word_diac) != self.word:
sys.stderr.write('Warning: still word ' + self.word + ' != word_diac ' + self.word_diac + '\n')
def unnormalize(self):
"""
Try to reverse Buckwalter normalizations on diacritized word
"""
# first, remove "_" (elongation character)
self.word = self.word.replace('_', '')
self.word_diac = self.word_diac.replace('_', '')
# next, check for normalization mismatches
word_ind = 0
word_diac_ind = 0
new_word_diac = ''
while word_ind < len(self.word) and word_diac_ind < len(self.word_diac):
word_char = self.word[word_ind]
word_diac_char = self.word_diac[word_diac_ind]
if word_char == word_diac_char:
new_word_diac += word_diac_char
word_ind += 1
word_diac_ind += 1
elif word_diac_char in DIACS:
new_word_diac += word_diac_char
word_diac_ind += 1
else:
# this is probably a normalization
# print 'word_char:', word_char, 'word_diac_char:', word_diac_char
new_word_diac += word_char
word_ind += 1
word_diac_ind += 1
if word_ind == len(self.word) and word_diac_ind == len(self.word_diac) - 1:
# if we have one more char in word_diac
word_diac_char = self.word_diac[word_diac_ind]
if word_diac_char in DIACS:
new_word_diac += word_diac_char
self.word_diac = new_word_diac
def process_treebank_file(treebank_filename, output_file, output_file_diac):
"""
Extract data from a treebank file
:param treebank_filename: pos/before-treebank file
:param output_file: file to write words without diacritics
:param output_file_diac: file to write words with diacritics
:return:
"""
print 'extracting data from file:', treebank_filename
f = codecs.open(treebank_filename, encoding='utf8')
input_string, comment, index, gold_solution, lookup_word = ['']*5
prev_index = '' # keep track of previous index
for line in f:
if line.strip() == '':
if input_string == '':
continue
word_analysis = WordAnalysis(input_string, comment, index, gold_solution, lookup_word)
# check for a new paragraph
if prev_index.startswith('P') and index.startswith('P') and not prev_index.startswith(index.split('W')[0]):
output_file.write('\n')
output_file_diac.write('\n')
output_file.write(word_analysis.word + '\n')
output_file_diac.write(word_analysis.word_diac + '\n')
prev_index = index
input_string, comment, index, gold_solution, lookup_word = ['']*5
else:
splt = line.strip().split(':', 1)
if len(splt) != 2:
sys.stderr.write('Warning: could not split line on :, in: ' + line + '\n')
continue
field_name, field_val = splt[0].strip(), splt[1].strip()
if field_name == 'INPUT STRING':
input_string = field_val
elif field_name == 'LOOK-UP WORD':
lookup_word = field_val
elif field_name == 'Comment':
comment = field_val
elif field_name == 'INDEX':
index = field_val
elif field_name.startswith('* SOLUTION'):
gold_solution = field_val
elif field_name.startswith('SOLUTION') or field_name == '(GLOSS)':
continue
else:
sys.stderr.write('Warning: unkown field: ' + field_name + '\n')
f.close()
def process_dir(treebank_dir, output_filename, output_filename_diac):
"""
Extract data from a treebank dir
:param treebank_dir: pos/before-treebank directory
:param output_file: file to write words without diacritics
:param output_file_diac: file to write words with diacritics
:return:
"""
print 'processing treebank dir:', treebank_dir
g = codecs.open(output_filename, 'w', encoding='utf8')
g_diac = codecs.open(output_filename_diac, 'w', encoding='utf8')
for f in os.listdir(treebank_dir):
process_treebank_file(treebank_dir + '/' + f, g, g_diac)
g.close()
g_diac.close()
print 'written words to:', output_filename
print 'written diacritized words to:', output_filename_diac
if __name__ == '__main__':
if len(sys.argv) == 4:
process_dir(sys.argv[1], sys.argv[2], sys.argv[3])
else:
print 'USAGE: python ' + sys.argv[0] + ' <treebank dir> <output word file> <output diacritized word file>'
| mit | 4,356,671,307,383,967,000 | 38.528497 | 119 | 0.565998 | false |
tpouyer/nova-lxd | nova_lxd/tests/session/test_profile.py | 1 | 2791 | # Copyright 2015 Canonical Ltd
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
import ddt
import mock
from nova import exception
from nova import test
from pylxd.deprecated import exceptions as lxd_exceptions
from nova_lxd.nova.virt.lxd import session
from nova_lxd.tests import fake_api
from nova_lxd.tests import stubs
@ddt.ddt
class SessionProfileTest(test.NoDBTestCase):
def setUp(self):
super(SessionProfileTest, self).setUp()
"""This is so we can mock out pylxd API calls."""
self.ml = stubs.lxd_mock()
lxd_patcher = mock.patch('pylxd.api.API',
mock.Mock(return_value=self.ml))
lxd_patcher.start()
self.addCleanup(lxd_patcher.stop)
self.session = session.LXDAPISession()
@stubs.annotated_data(
('empty', [], []),
('valid', ['test'], ['test']),
)
def test_profile_list(self, tag, side_effect, expected):
self.ml.profile_list.return_value = side_effect
self.assertEqual(expected,
self.session.profile_list())
def test_profile_list_fail(self):
self.ml.profile_list.side_effect = (
lxd_exceptions.APIError('Fake', 500))
self.assertRaises(
exception.NovaException,
self.session.profile_list)
def test_profile_create(self):
instance = stubs._fake_instance()
config = mock.Mock()
self.ml.profile_defined.return_value = True
self.ml.profile_create.return_value = \
(200, fake_api.fake_standard_return())
self.assertEqual((200, fake_api.fake_standard_return()),
self.session.profile_create(config,
instance))
calls = [mock.call.profile_list(),
mock.call.profile_create(config)]
self.assertEqual(calls, self.ml.method_calls)
def test_profile_delete(self):
instance = stubs._fake_instance()
self.ml.profile_defined.return_value = True
self.ml.profile_delete.return_value = \
(200, fake_api.fake_standard_return())
self.assertEqual(None,
self.session.profile_delete(instance))
| apache-2.0 | -6,601,287,437,658,835,000 | 34.329114 | 76 | 0.626657 | false |
Sout/sigrok-meter | sigrok_meter/util.py | 1 | 3572 | ##
## This file is part of the sigrok-meter project.
##
## Copyright (C) 2015 Jens Steinhauser <[email protected]>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##
import sigrok.core as sr
def format_unit(u):
units = {
sr.Unit.VOLT: 'V',
sr.Unit.AMPERE: 'A',
sr.Unit.OHM: u'\u03A9',
sr.Unit.FARAD: 'F',
sr.Unit.KELVIN: 'K',
sr.Unit.CELSIUS: u'\u00B0C',
sr.Unit.FAHRENHEIT: u'\u00B0F',
sr.Unit.HERTZ: 'Hz',
sr.Unit.PERCENTAGE: '%',
# sr.Unit.BOOLEAN
sr.Unit.SECOND: 's',
sr.Unit.SIEMENS: 'S',
sr.Unit.DECIBEL_MW: 'dBm',
sr.Unit.DECIBEL_VOLT: 'dBV',
# sr.Unit.UNITLESS
sr.Unit.DECIBEL_SPL: 'dB',
# sr.Unit.CONCENTRATION
sr.Unit.REVOLUTIONS_PER_MINUTE: 'rpm',
sr.Unit.VOLT_AMPERE: 'VA',
sr.Unit.WATT: 'W',
sr.Unit.WATT_HOUR: 'Wh',
sr.Unit.METER_SECOND: 'm/s',
sr.Unit.HECTOPASCAL: 'hPa',
sr.Unit.HUMIDITY_293K: '%rF',
sr.Unit.DEGREE: u'\u00B0',
sr.Unit.HENRY: 'H'
}
return units.get(u, '')
def quantity_from_unit(u):
quantities = {
sr.Unit.VOLT: 'Voltage',
sr.Unit.AMPERE: 'Current',
sr.Unit.OHM: 'Resistance',
sr.Unit.FARAD: 'Capacity',
sr.Unit.KELVIN: 'Temperature',
sr.Unit.CELSIUS: 'Temperature',
sr.Unit.FAHRENHEIT: 'Temperature',
sr.Unit.HERTZ: 'Frequency',
sr.Unit.PERCENTAGE: 'Duty Cycle',
sr.Unit.BOOLEAN: 'Continuity',
sr.Unit.SECOND: 'Time',
sr.Unit.SIEMENS: 'Conductance',
sr.Unit.DECIBEL_MW: 'Power Ratio',
sr.Unit.DECIBEL_VOLT: 'Voltage Ratio',
sr.Unit.UNITLESS: 'Unitless Quantity',
sr.Unit.DECIBEL_SPL: 'Sound Pressure',
sr.Unit.CONCENTRATION: 'Concentration',
sr.Unit.REVOLUTIONS_PER_MINUTE: 'Revolutions',
sr.Unit.VOLT_AMPERE: 'Apparent Power',
sr.Unit.WATT: 'Power',
sr.Unit.WATT_HOUR: 'Energy',
sr.Unit.METER_SECOND: 'Velocity',
sr.Unit.HECTOPASCAL: 'Pressure',
sr.Unit.HUMIDITY_293K: 'Humidity',
sr.Unit.DEGREE: 'Angle',
sr.Unit.HENRY: 'Inductance'
}
return quantities.get(u, '')
| gpl-3.0 | 1,200,301,977,577,231,000 | 41.023529 | 76 | 0.4972 | false |
RudolfCardinal/crate | crate_anon/nlp_manager/parse_biochemistry.py | 1 | 65975 | #!/usr/bin/env python
"""
crate_anon/nlp_manager/parse_biochemistry.py
===============================================================================
Copyright (C) 2015-2021 Rudolf Cardinal ([email protected]).
This file is part of CRATE.
CRATE is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CRATE is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CRATE. If not, see <http://www.gnu.org/licenses/>.
===============================================================================
**Python regex-based NLP processors for biochemistry data.**
All inherit from
:class:`crate_anon.nlp_manager.regex_parser.SimpleNumericalResultParser` and
are constructed with these arguments:
nlpdef:
a :class:`crate_anon.nlp_manager.nlp_definition.NlpDefinition`
cfgsection:
the name of a CRATE NLP config file section (from which we may
choose to get extra config information)
commit:
force a COMMIT whenever we insert data? You should specify this
in multiprocess mode, or you may get database deadlocks.
"""
import logging
from typing import List, Optional, Tuple, Union
from cardinal_pythonlib.logs import main_only_quicksetup_rootlogger
from crate_anon.common.regex_helpers import (
regex_or,
WORD_BOUNDARY,
)
from crate_anon.nlp_manager.nlp_definition import NlpDefinition
from crate_anon.nlp_manager.number import to_float
from crate_anon.nlp_manager.regex_parser import (
make_simple_numeric_regex,
OPTIONAL_POC,
SimpleNumericalResultParser,
ValidatorBase,
)
from crate_anon.nlp_manager.regex_read_codes import (
ReadCodes,
regex_components_from_read_codes,
)
from crate_anon.nlp_manager.regex_units import (
factor_micromolar_from_mg_per_dl,
factor_millimolar_from_mg_per_dl,
G,
G_PER_L,
MG,
MG_PER_DL,
MG_PER_L,
MICROEQ_PER_L,
MICROMOLAR,
micromolar_from_mg_per_dl,
MICROMOLES_PER_L,
MICROUNITS_PER_ML,
MILLIEQ_PER_L,
MILLIMOLAR,
millimolar_from_mg_per_dl,
MILLIMOLES_PER_L,
MILLIMOLES_PER_MOL,
MILLIUNITS_PER_L,
PERCENT,
UNITS_PER_L,
)
log = logging.getLogger(__name__)
# =============================================================================
# C-reactive protein (CRP)
# =============================================================================
class Crp(SimpleNumericalResultParser):
"""
C-reactive protein (CRP).
CRP units:
- mg/L is commonest in the UK (or at least standard at Addenbrooke's,
Hinchingbrooke, and Dundee);
- values of <=6 mg/L or <10 mg/L are normal, and e.g. 70-250 mg/L in
pneumonia.
- Refs include:
- http://www.ncbi.nlm.nih.gov/pubmed/7705110
- http://emedicine.medscape.com/article/2086909-overview
- 1 mg/dL = 10 mg/L, so normal in mg/dL is <=1 roughly.
"""
CRP_BASE = fr"""
{WORD_BOUNDARY}
(?: (?: C [-\s]+ reactive [\s]+ protein ) | CRP )
{WORD_BOUNDARY}
"""
CRP = regex_or(
*regex_components_from_read_codes(
ReadCodes.CRP_PLASMA,
ReadCodes.CRP_SERUM,
),
CRP_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=CRP,
units=regex_or(
MG_PER_DL,
MG_PER_L
),
optional_ignorable_after_quantity=OPTIONAL_POC
)
NAME = "CRP"
PREFERRED_UNIT_COLUMN = "value_mg_L"
UNIT_MAPPING = {
MG_PER_L: 1, # preferred unit
MG_PER_DL: 10, # 1 mg/dL -> 10 mg/L
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
self.test_numerical_parser([
("CRP", []), # should fail; no values
("CRP 6", [6]),
("C-reactive protein 6", [6]),
("C reactive protein 6", [6]),
("CRP = 6", [6]),
("CRP 6 mg/dl", [60]),
("CRP: 6", [6]),
("CRP equals 6", [6]),
("CRP is equal to 6", [6]),
("CRP <1", [1]),
("CRP less than 1", [1]),
("CRP <1 mg/dl", [10]),
("CRP >250", [250]),
("CRP more than 1", [1]),
("CRP greater than 1", [1]),
("CRP >250 mg/dl", [2500]),
("CRP was 62", [62]),
("CRP was 62 mg/l", [62]),
("CRP was <1", [1]),
("CRP is 19.2", [19.2]),
("CRP is >250", [250]),
("CRP is 19 mg dl-1", [190]),
("CRP is 19 mg dl -1", [190]),
("CRP 1.9 mg/L", [1.9]),
("CRP-97", [97]),
("CRP 1.9 mg L-1", [1.9]),
("CRP | 1.9 (H) | mg/L", [1.9]),
("Plasma C-reactive protein level (XE2dy) 45 mg/L", [45]),
("Serum C reactive protein level (XaINL) 45 mg/L", [45]),
("CRP (mg/L) 62", [62]),
], verbose=verbose)
class CrpValidator(ValidatorBase):
"""
Validator for CRP
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Crp.NAME, [Crp.CRP]
# =============================================================================
# Sodium (Na)
# =============================================================================
# ... handy to check approximately expected distribution of results!
class Sodium(SimpleNumericalResultParser):
"""
Sodium (Na).
"""
SODIUM_BASE = fr"""
{WORD_BOUNDARY} (?: Na | Sodium ) {WORD_BOUNDARY}
"""
SODIUM = regex_or(
*regex_components_from_read_codes(
ReadCodes.SODIUM,
ReadCodes.SODIUM_BLOOD,
ReadCodes.SODIUM_PLASMA,
ReadCodes.SODIUM_SERUM,
),
SODIUM_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=SODIUM,
units=regex_or(
MILLIMOLAR, # good
MILLIMOLES_PER_L, # good
MILLIEQ_PER_L, # good
MG, # bad
),
optional_ignorable_after_quantity=OPTIONAL_POC
)
NAME = "Sodium"
PREFERRED_UNIT_COLUMN = "value_mmol_L"
UNIT_MAPPING = {
MILLIMOLAR: 1, # preferred unit
MILLIMOLES_PER_L: 1,
MILLIEQ_PER_L: 1,
# but not MG
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
self.test_numerical_parser([
("Na", []), # should fail; no values
("Na 120", [120]),
("sodium 153", [153]),
("Na 135 mEq/L", [135]),
("Na 139 mM", [139]),
("docusate sodium 100mg", []),
("Present: Nicola Adams (NA). 1.0 Minutes of last meeting", []),
("Present: Nicola Adams (NA) 1.0 Minutes of last meeting", []),
("Na (H) 145 mM", [145]),
("Na (*) 145 mM", [145]),
("Na (X) 145 mM", []),
("blah (Na) 145 mM", []),
("Na (145) something", [145]),
("Na (145 mM), others", [145]),
("Na-145", [145]),
("Sodium level (X771T) 145", [145]),
("Blood sodium level (XaDva) 145", [145]),
("Plasma sodium level (XaIRf) 145", [145]),
("Serum sodium level (XE2q0) 145", [145]),
("Serum sodium level (mmol/L) 137", [137]),
], verbose=verbose)
class SodiumValidator(ValidatorBase):
"""
Validator for Sodium
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Sodium.NAME, [Sodium.SODIUM]
# =============================================================================
# Potassium (K)
# =============================================================================
class Potassium(SimpleNumericalResultParser):
"""
Potassium (K).
"""
POTASSIUM_BASE = fr"""
{WORD_BOUNDARY} (?: K | Potassium ) {WORD_BOUNDARY}
"""
POTASSIUM = regex_or(
POTASSIUM_BASE,
*regex_components_from_read_codes(
ReadCodes.POTASSIUM,
ReadCodes.POTASSIUM_BLOOD,
ReadCodes.POTASSIUM_PLASMA,
ReadCodes.POTASSIUM_SERUM,
),
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=POTASSIUM,
units=regex_or(
MILLIMOLAR, # good
MILLIMOLES_PER_L, # good
MILLIEQ_PER_L, # good
MG, # bad
),
optional_ignorable_after_quantity=OPTIONAL_POC
)
NAME = "Potassium"
PREFERRED_UNIT_COLUMN = "value_mmol_L"
UNIT_MAPPING = {
MILLIMOLAR: 1, # preferred unit
MILLIMOLES_PER_L: 1,
MILLIEQ_PER_L: 1,
# but not MG
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
self.test_numerical_parser([
("K", []), # should fail; no values
("K 4", [4]),
("Potassium 4.3", [4.3]),
("K 4.5 mEq/L", [4.5]),
("K 4.5 mM", [4.5]),
("losartan potassium 50mg", []),
("Present: Kerry Smith (K). 1.0 Minutes of last meeting", []),
("Present: Kerry Smith (K) 1.0 Minutes of last meeting", []),
("K (H) 5.6 mM", [5.6]),
("K (*) 5.6 mM", [5.6]),
("K (X) 5.6 mM", []),
("blah (K) 5.6 mM", []),
("K (5.6) something", [5.6]),
("K (5.6 mM), others", [5.6]),
("K-3.2", [3.2]),
("Potassium level (X771S) 3.2", [3.2]),
("Blood potassium level (XaDvZ) 3.2", [3.2]),
("Plasma potassium level (XaIRl) 3.2", [3.2]),
("Serum potassium level (XE2pz) 3.2", [3.2]),
("Serum potassium level (XaIRl) 3.2", []), # wrong code
], verbose=verbose)
class PotassiumValidator(ValidatorBase):
"""
Validator for Potassium
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Potassium.NAME, [Potassium.POTASSIUM]
# =============================================================================
# Urea
# =============================================================================
class Urea(SimpleNumericalResultParser):
"""
Urea.
"""
UREA_BASE = fr"""
{WORD_BOUNDARY} U(?:r(?:ea)?)? {WORD_BOUNDARY}
"""
UREA = regex_or(
*regex_components_from_read_codes(
ReadCodes.UREA_BLOOD,
ReadCodes.UREA_PLASMA,
ReadCodes.UREA_SERUM,
),
UREA_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=UREA,
units=regex_or(
MILLIMOLAR, # good
MILLIMOLES_PER_L, # good
MILLIEQ_PER_L, # good
MG, # bad
),
optional_ignorable_after_quantity=OPTIONAL_POC
)
NAME = "Urea"
PREFERRED_UNIT_COLUMN = "value_mmol_L"
UNIT_MAPPING = {
MILLIMOLAR: 1, # preferred unit
MILLIMOLES_PER_L: 1,
MILLIEQ_PER_L: 1,
# but not MG
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
self.test_numerical_parser([
("Urea", []), # should fail; no values
("U 4", [4]),
("Urea 4.3", [4.3]),
("U 4.5 mEq/L", [4.5]),
("Ur 4.5 mM", [4.5]),
("Present: Ursula Rogers (U). 1.0 Minutes of last meeting", []),
("Present: Ursula Rogers (UR) 1.0 Minutes of last meeting", []),
("U (H) 5.6 mM", [5.6]),
("Ur (*) 5.6 mM", [5.6]),
("Urea (X) 5.6 mM", []),
("blah (U) 5.6 mM", []),
("Urea (5.6) something", [5.6]),
("Urea (5.6 mM), others", [5.6]),
("U-3.2", [3.2]),
("Blood urea (X771P) 3.2", [3.2]),
("Plasma urea level (XaDvl) 3.2", [3.2]),
("Serum urea level (XM0lt) 3.2", [3.2]),
], verbose=verbose)
class UreaValidator(ValidatorBase):
"""
Validator for Urea
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Urea.NAME, [Urea.UREA]
# =============================================================================
# Creatinine
# =============================================================================
class Creatinine(SimpleNumericalResultParser):
"""
Creatinine. Default units are micromolar (SI).
"""
CREATININE_BASE = fr"""
{WORD_BOUNDARY} Cr(?:eat(?:inine)?)? {WORD_BOUNDARY}
"""
# ... Cr, Creat, Creatinine
# Possible that "creatine" is present as a typo... but it's wrong...
CREATININE = regex_or(
*regex_components_from_read_codes(
ReadCodes.CREATININE,
ReadCodes.CREATININE_PLASMA,
ReadCodes.CREATININE_PLASMA_CORRECTED,
ReadCodes.CREATININE_SERUM,
ReadCodes.CREATININE_SERUM_CORRECTED,
),
CREATININE_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=CREATININE,
units=regex_or(
MICROMOLAR, # good
MICROMOLES_PER_L, # good
MICROEQ_PER_L, # good
MG_PER_DL, # good but needs conversion
# ... note that MG_PER_DL must precede MG
MG, # bad
),
optional_ignorable_after_quantity=OPTIONAL_POC
)
CREATININE_MOLECULAR_MASS_G_PER_MOL = 113.12
# ... https://pubchem.ncbi.nlm.nih.gov/compound/creatinine
NAME = "Creatinine"
PREFERRED_UNIT_COLUMN = "value_micromol_L"
UNIT_MAPPING = {
MICROMOLAR: 1, # preferred unit
MICROMOLES_PER_L: 1,
MICROEQ_PER_L: 1,
MG_PER_DL: factor_micromolar_from_mg_per_dl(
CREATININE_MOLECULAR_MASS_G_PER_MOL
)
# but not MG
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
def convert(mg_dl: float) -> float:
# Convert mg/dl to μM
return micromolar_from_mg_per_dl(
mg_dl, self.CREATININE_MOLECULAR_MASS_G_PER_MOL)
self.test_numerical_parser([
("Creatinine", []), # should fail; no values
("Cr 50", [50]),
("Creat 125.5", [125.5]),
("Creat 75 uEq/L", [75]),
("Cr 75 μM", [75]),
("Present: Chloe Rogers (CR). 1.0 Minutes of last meeting", []),
("Creatinine (H) 200 uM", [200]),
("Creatinine (*) 200 micromol/L", [200]),
("Creatinine (X) 200 uM", []),
("Creatinine 200 micromolar", [200]),
("Creatinine 200 micromolar, others", [200]),
("blah (creat) 5.6 uM", []),
("Creatinine (200) something", [200]),
("Creatinine (200 micromolar)", [200]),
("Creatinine (200 micromolar), others", [200]),
("Cr-75", [75]),
("creatinine 3 mg/dl", [convert(3)]),
("creatinine 3 mg", []),
("Creatinine level (X771Q) 75", [75]),
("Plasma creatinine level (XaETQ) 75", [75]),
("Cor plasma creatinine level (XaERX) 75", [75]),
("Serum creatinine level (XE2q5) 75", [75]),
("Cor serum creatinine level (XaERc) 75", [75]),
], verbose=verbose)
class CreatinineValidator(ValidatorBase):
"""
Validator for Creatinine
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Creatinine.NAME, [Creatinine.CREATININE]
# =============================================================================
# Lithium (Li)
# =============================================================================
class Lithium(SimpleNumericalResultParser):
"""
Lithium (Li) levels (for blood tests, not doses).
"""
LITHIUM_BASE = fr"""
{WORD_BOUNDARY} Li(?:thium)? {WORD_BOUNDARY}
"""
LITHIUM = regex_or(
*regex_components_from_read_codes(
ReadCodes.LITHIUM_SERUM,
),
LITHIUM_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=LITHIUM,
units=regex_or(
MILLIMOLAR, # good
MILLIMOLES_PER_L, # good
MILLIEQ_PER_L, # good
MG, # bad
G, # bad
)
)
NAME = "Lithium"
PREFERRED_UNIT_COLUMN = "value_mmol_L"
UNIT_MAPPING = {
MILLIMOLAR: 1, # preferred unit
MILLIMOLES_PER_L: 1,
MILLIEQ_PER_L: 1,
# but not MG
# and not G
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
self.test_numerical_parser([
("Li", []), # should fail; no values
("Li 0.4", [0.4]),
("li 1200 mg", []), # that's a dose
("li 1.2 g", []), # that's a dose
("lithium 1200 mg", []), # that's a dose
("lithium 153", [153]), # an unhappy patient...
("Li 135 mEq/L", [135]),
("Li 139 mM", [139]),
("lithium carbonate 800mg", []),
("Present: Linda Ingles (LI). 1.0 Minutes of last meeting", []),
("Present: Linda Ingles (LI) 1.0 Minutes of last meeting", []),
("Li (H) 1.3 mM", [1.3]),
("Li (*) 1.3 mM", [1.3]),
("Li (X) 1.3 mM", []),
("blah (Li) 1.2 mM", []),
("Li (1.3) something", [1.3]),
("Li (0.4 mM), others", [0.4]),
("Li-0.4", [0.4]),
("Serum lithium level (XE25g) 0.4", [0.4]),
], verbose=verbose)
class LithiumValidator(ValidatorBase):
"""
Validator for Lithium
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Lithium.NAME, [Lithium.LITHIUM]
# =============================================================================
# Thyroid-stimulating hormone (TSH)
# =============================================================================
class Tsh(SimpleNumericalResultParser):
"""
Thyroid-stimulating hormone (TSH).
"""
TSH_BASE = fr"""
{WORD_BOUNDARY}
(?: TSH | thyroid [-\s]+ stimulating [-\s]+ hormone )
{WORD_BOUNDARY}
"""
TSH = regex_or(
*regex_components_from_read_codes(
ReadCodes.TSH_PLASMA,
ReadCodes.TSH_PLASMA_30_MIN,
ReadCodes.TSH_PLASMA_60_MIN,
ReadCodes.TSH_PLASMA_90_MIN,
ReadCodes.TSH_PLASMA_120_MIN,
ReadCodes.TSH_PLASMA_150_MIN,
ReadCodes.TSH_SERUM,
ReadCodes.TSH_SERUM_60_MIN,
ReadCodes.TSH_SERUM_90_MIN,
ReadCodes.TSH_SERUM_120_MIN,
ReadCodes.TSH_SERUM_150_MIN,
),
TSH_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=TSH,
units=regex_or(
MILLIUNITS_PER_L, # good
MICROUNITS_PER_ML, # good
)
)
NAME = "TSH"
PREFERRED_UNIT_COLUMN = "value_mU_L"
UNIT_MAPPING = {
MILLIUNITS_PER_L: 1, # preferred unit
MICROUNITS_PER_ML: 1
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in superclass
self.test_numerical_parser([
("TSH", []), # should fail; no values
("TSH 1.5", [1.5]),
("thyroid-stimulating hormone 1.5", [1.5]),
("TSH 1.5 mU/L", [1.5]),
("TSH 1.5 mIU/L", [1.5]),
("TSH 1.5 μU/mL", [1.5]),
("TSH 1.5 μIU/mL", [1.5]),
("TSH 1.5 uU/mL", [1.5]),
("TSH 1.5 uIU/mL", [1.5]),
("TSH-2.3", [2.3]),
("Plasma TSH level (XaELW) 2.3", [2.3]),
("Serum TSH level (XaELV) 2.3", [2.3]),
# etc.; not all Read codes tested here
], verbose=verbose)
class TshValidator(ValidatorBase):
"""
Validator for TSH
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Tsh.NAME, [Tsh.TSH]
# =============================================================================
# Alkaline phosphatase
# =============================================================================
class AlkPhos(SimpleNumericalResultParser):
"""
Alkaline phosphatase (ALP, AlkP, AlkPhos).
"""
ALKP_BASE = fr"""
{WORD_BOUNDARY}
(?:
(?: ALk?P (?:\. | {WORD_BOUNDARY}) ) |
(?:
alk(?:aline | \.)?
[-\s]*
phos(?:phatase{WORD_BOUNDARY} | \. | {WORD_BOUNDARY})
)
)
"""
ALKP = regex_or(
*regex_components_from_read_codes(
ReadCodes.ALKPHOS_PLASMA,
ReadCodes.ALKPHOS_SERUM,
ReadCodes.ALKPHOS, # least specific; at end
),
ALKP_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=ALKP,
units=UNITS_PER_L
)
NAME = "AlkPhos"
PREFERRED_UNIT_COLUMN = "value_U_L"
UNIT_MAPPING = {
UNITS_PER_L: 1 # preferred unit
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in superclass
self.test_numerical_parser([
("ALP", []), # should fail; no values
("was 7", []), # no quantity
("ALP 55", [55]),
("Alkaline-Phosphatase 55", [55]),
("Alkaline Phosphatase 55 U/L ", [55]),
("ALP 55 U/L", [55]),
("ALP-55", [55]),
("AlkP 55", [55]),
("alk.phos. 55", [55]),
("alk. phos. 55", [55]),
("alkphos 55", [55]),
("Alkaline phosphatase level (44F3.) 55", [55]),
("Alkaline phosphatase level (44F3x) 55", []), # test "." in regex
("Plasma alkaline phosphatase level (XaIRj) 55", [55]),
("Serum alkaline phosphatase level (XE2px) 55", [55]),
], verbose=verbose)
class AlkPhosValidator(ValidatorBase):
"""
Validator for ALP
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return AlkPhos.NAME, [AlkPhos.ALKP]
# =============================================================================
# Alanine aminotransferase (ALT)
# =============================================================================
class ALT(SimpleNumericalResultParser):
"""
Alanine aminotransferase (ALT), a.k.a. alanine transaminase (ALT).
A.k.a. serum glutamate-pyruvate transaminase (SGPT), or serum
glutamate-pyruvic transaminase (SGPT), but not a.k.a. those in recent
memory!
"""
ALT_BASE = fr"""
{WORD_BOUNDARY}
(?:
ALT |
alanine [-\s]+ (?: aminotransferase | transaminase )
)
{WORD_BOUNDARY}
"""
ALT = regex_or(
*regex_components_from_read_codes(
ReadCodes.ALT,
),
ALT_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=ALT,
units=UNITS_PER_L
)
NAME = "ALT"
PREFERRED_UNIT_COLUMN = "value_U_L"
UNIT_MAPPING = {
UNITS_PER_L: 1 # preferred unit
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in superclass
self.test_numerical_parser([
("ALT", []), # should fail; no values
("was 7", []), # no quantity
("ALT 55", [55]),
("alanine-aminotransferase 55", [55]),
("Alanine aminotransferase 55 U/L ", [55]),
("alanine transaminase 55 U/L ", [55]),
("ALT 55 U/L", [55]),
("ALT-55", [55]),
("ALP 55", []), # wrong thing
("ALT/SGPT serum level (44G3.) 55", [55]),
], verbose=verbose)
class ALTValidator(ValidatorBase):
"""
Validator for ALT
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return ALT.NAME, [ALT.ALT]
# =============================================================================
# Gamma GT (gGT)
# =============================================================================
class GammaGT(SimpleNumericalResultParser):
"""
Gamma-glutamyl transferase (gGT).
"""
GGT_BASE = fr"""
{WORD_BOUNDARY}
(?:
(?: γ | G | gamma)
[-\s]*
(?:
GT |
glutamyl [-\s]+ transferase
)
)
{WORD_BOUNDARY}
"""
GGT = regex_or(
*regex_components_from_read_codes(
ReadCodes.GAMMA_GT,
ReadCodes.GAMMA_GT_PLASMA,
ReadCodes.GAMMA_GT_SERUM,
),
GGT_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=GGT,
units=UNITS_PER_L
)
NAME = "GammaGT"
PREFERRED_UNIT_COLUMN = "value_U_L"
UNIT_MAPPING = {
UNITS_PER_L: 1 # preferred unit
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in superclass
self.test_numerical_parser([
("gGT", []), # should fail; no values
("was 7", []), # no quantity
("gGT 55", [55]),
("gamma Glutamyl Transferase 19 U/L", [19]),
("Gamma GT 55 U/L ", [55]),
("GGT 55 U/L", [55]),
("ggt-55", [55]),
("γGT 55", [55]),
("Gamma-glutamyl transferase lev (44G4.) 55", [55]),
("Plasma gamma-glutamyl transferase level (XaES4) 55", [55]),
("Serum gamma-glutamyl transferase level (XaES3) 55", [55]),
], verbose=verbose)
class GammaGTValidator(ValidatorBase):
"""
Validator for gGT
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return GammaGT.NAME, [GammaGT.GGT]
# =============================================================================
# Total bilirubin
# =============================================================================
class Bilirubin(SimpleNumericalResultParser):
"""
Total bilirubin.
"""
BILIRUBIN_BASE = fr"""
{WORD_BOUNDARY}
(?: t(?: ot(?:al | \.)? | \.) \s+ )?
bili?(?: \. | rubin{WORD_BOUNDARY})?
"""
BILIRUBIN = regex_or(
*regex_components_from_read_codes(
ReadCodes.BILIRUBIN_PLASMA_TOTAL,
ReadCodes.BILIRUBIN_SERUM,
ReadCodes.BILIRUBIN_SERUM_TOTAL,
ReadCodes.BILIRUBIN_TOTAL,
),
BILIRUBIN_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=BILIRUBIN,
units=regex_or(
MICROMOLAR, # good
MICROMOLES_PER_L, # good
)
)
NAME = "Bilirubin"
PREFERRED_UNIT_COLUMN = "value_micromol_L"
UNIT_MAPPING = {
MICROMOLAR: 1, # preferred unit
MICROMOLES_PER_L: 1
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in superclass
self.test_numerical_parser([
("tot Bil", []), # should fail; no values
("was 7", []), # no quantity
("tot Bil 6", [6]),
("Total Bilirubin: 6", [6]),
("Total Bilirubin 6 umol/L", [6]),
("bilirubin 17 μM", [17]),
("t.bilirubin 17 μM", [17]),
("t. bilirubin 17 μM", [17]),
("bili. 17 μM", [17]),
("bili 17 μM", [17]),
("Plasma total bilirubin level (XaETf) 17", [17]),
("Serum bilirubin level (44E..) 17", [17]),
("Serum total bilirubin level (XaERu) 17", [17]),
("Total bilirubin level (XE2qu) 17", [17]),
("Total bilirubin \t level \n (XE2qu) 17", [17]), # test whitespace # noqa
("xTotal bilirubin level (XE2qu) 17", []), # test word boundary
("Serum total bilirubin level (XaERu) 6 umol/L", [6]),
], verbose=verbose)
class BilirubinValidator(ValidatorBase):
"""
Validator for bilirubin.
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Bilirubin.NAME, [Bilirubin.BILIRUBIN]
# =============================================================================
# Albumin (Alb)
# =============================================================================
class Albumin(SimpleNumericalResultParser):
"""
Albumin (Alb).
"""
ALBUMIN_BASE = fr"""
{WORD_BOUNDARY}
(?:
alb(?:\. | umin{WORD_BOUNDARY})?
(?: \s+ level{WORD_BOUNDARY})?
)
"""
ALBUMIN = regex_or(
*regex_components_from_read_codes(
ReadCodes.ALBUMIN_PLASMA,
ReadCodes.ALBUMIN_SERUM,
),
ALBUMIN_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=ALBUMIN,
units=G_PER_L
)
NAME = "Albumin"
PREFERRED_UNIT_COLUMN = "value_g_L"
UNIT_MAPPING = {
G_PER_L: 1 # preferred unit
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in superclass
self.test_numerical_parser([
("Alb", []), # should fail; no values
("was 7", []), # no quantity
("ALP 6", []), # wrong quantity
("Alb 6", [6]),
("Albumin: 48", [48]),
("Albumin 48 g/L", [48]),
("alb. 48", [48]),
("albumin level 48", [48]),
("Plasma albumin level (XaIRc) 48", [48]),
("Serum albumin level (XE2eA) 48", [48]),
], verbose=verbose)
class AlbuminValidator(ValidatorBase):
"""
Validator for Albumin
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Albumin.NAME, [Albumin.ALBUMIN]
# =============================================================================
# Glucose
# =============================================================================
class Glucose(SimpleNumericalResultParser):
"""
Glucose.
- By Emanuele Osimo, Feb 2019.
- Some modifications by Rudolf Cardinal, Feb 2019.
"""
GLUCOSE_BASE = fr"""
{WORD_BOUNDARY} glu(?:c(?:ose)?)? {WORD_BOUNDARY}
# glu, gluc, glucose
"""
GLUCOSE = regex_or(
*regex_components_from_read_codes(
ReadCodes.GLUCOSE,
ReadCodes.GLUCOSE_BLOOD,
ReadCodes.GLUCOSE_BLOOD_2H_POSTPRANDIAL,
ReadCodes.GLUCOSE_BLOOD_150_MIN,
ReadCodes.GLUCOSE_PLASMA_RANDOM,
ReadCodes.GLUCOSE_PLASMA_FASTING,
ReadCodes.GLUCOSE_PLASMA_30_MIN,
ReadCodes.GLUCOSE_PLASMA_60_MIN,
ReadCodes.GLUCOSE_PLASMA_90_MIN,
ReadCodes.GLUCOSE_PLASMA_120_MIN,
ReadCodes.GLUCOSE_PLASMA_2H_POSTPRANDIAL,
ReadCodes.GLUCOSE_PLASMA_150_MIN,
ReadCodes.GLUCOSE_SERUM,
ReadCodes.GLUCOSE_SERUM_RANDOM,
ReadCodes.GLUCOSE_SERUM_FASTING,
ReadCodes.GLUCOSE_SERUM_30_MIN,
ReadCodes.GLUCOSE_SERUM_60_MIN,
ReadCodes.GLUCOSE_SERUM_90_MIN,
ReadCodes.GLUCOSE_SERUM_120_MIN,
ReadCodes.GLUCOSE_SERUM_2H_POSTPRANDIAL,
ReadCodes.GLUCOSE_SERUM_150_MIN,
# !
),
GLUCOSE_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=GLUCOSE,
units=regex_or(
MILLIMOLAR, # good
MILLIMOLES_PER_L, # good
MG_PER_DL, # good but needs conversion
),
optional_ignorable_after_quantity=OPTIONAL_POC
)
GLUCOSE_MOLECULAR_MASS_G_PER_MOL = 180.156
# ... https://pubchem.ncbi.nlm.nih.gov/compound/D-glucose
NAME = "Glucose"
PREFERRED_UNIT_COLUMN = "value_mmol_L"
UNIT_MAPPING = {
MILLIMOLAR: 1, # preferred unit
MILLIMOLES_PER_L: 1,
MG_PER_DL: factor_millimolar_from_mg_per_dl(GLUCOSE_MOLECULAR_MASS_G_PER_MOL) # noqa
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
def convert(mg_dl: float) -> float:
# Convert mg/dl to mM
return millimolar_from_mg_per_dl(
mg_dl, self.GLUCOSE_MOLECULAR_MASS_G_PER_MOL)
self.test_numerical_parser([
("glu", []), # should fail; no values
("glucose 6 mM", [6]),
("glucose 6 mmol", [6]),
("glucose 6", [6]),
("glu 6", [6]),
("glucose 90 mg/dl", [convert(90)]), # unit conversion
("gluc = 6", [6]),
("glucose: 6", [6]),
("glu equals 6", [6]),
("glucose is equal to 6", [6]),
("glu <4", [4]),
("glucose less than 1", [1]), # would be bad news...
("glu more than 20", [20]),
("glucose was 15", [15]),
("glucose was 90 mg/dl", [convert(90)]),
("glu is 90 mg dl-1", [convert(90)]),
("glucose is 90 mg dl -1", [convert(90)]),
("glu-5", [5]),
("glucose | 20.3 (H) | mmol/L", [20.3]),
("Glucose level (X772y) 5", [5]),
("Blood glucose level (X772z) 5", [5]),
# Not all Read codes tested.
], verbose=verbose)
class GlucoseValidator(ValidatorBase):
"""
Validator for glucose
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Glucose.NAME, [Glucose.GLUCOSE]
# =============================================================================
# LDL cholesterol
# =============================================================================
class LDLCholesterol(SimpleNumericalResultParser):
"""
Low density lipoprotein (LDL) cholesterol.
- By Emanuele Osimo, Feb 2019.
- Some modifications by Rudolf Cardinal, Feb 2019.
"""
LDL_BASE = fr"""
{WORD_BOUNDARY}
LDL [-\s]*
(?:
chol(?:esterol)?{WORD_BOUNDARY} |
chol\. |
{WORD_BOUNDARY} # allows LDL by itself
)
"""
LDL = regex_or(
*regex_components_from_read_codes(
ReadCodes.LDL_PLASMA,
ReadCodes.LDL_PLASMA_FASTING,
ReadCodes.LDL_PLASMA_RANDOM,
ReadCodes.LDL_SERUM,
ReadCodes.LDL_SERUM_FASTING,
ReadCodes.LDL_SERUM_RANDOM,
),
LDL_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=LDL,
units=regex_or(
MILLIMOLAR, # good
MILLIMOLES_PER_L, # good
MG_PER_DL, # good but needs conversion
)
)
NAME = "LDL cholesterol"
PREFERRED_UNIT_COLUMN = "value_mmol_L"
FACTOR_MG_DL_TO_MMOL_L = 0.02586
# ... https://www.ncbi.nlm.nih.gov/books/NBK33478/
UNIT_MAPPING = {
MILLIMOLAR: 1, # preferred unit
MILLIMOLES_PER_L: 1,
MG_PER_DL: FACTOR_MG_DL_TO_MMOL_L,
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
def convert(mg_dl: float) -> float:
# Convert mg/dl to mM
return self.FACTOR_MG_DL_TO_MMOL_L * mg_dl
self.test_numerical_parser([
("LDL", []), # should fail; no values
("LDL 4 mM", [4]),
("LDL chol 4 mmol", [4]),
("LDL chol. 4 mmol", [4]),
("LDL 4", [4]),
("chol 4", []), # that's total cholesterol
("HDL chol 4", []), # that's HDL cholesterol
("LDL cholesterol 140 mg/dl", [convert(140)]), # unit conversion
("LDL = 4", [4]),
("LDL: 4", [4]),
("LDL equals 4", [4]),
("LDL is equal to 4", [4]),
("LDL <4", [4]),
("LDLchol less than 4", [4]),
("LDL cholesterol more than 20", [20]),
("LDL was 4", [4]),
("LDL chol was 140 mg/dl", [convert(140)]),
("chol was 140 mg/dl", []),
("LDL is 140 mg dl-1", [convert(140)]),
("ldl chol is 140 mg dl -1", [convert(140)]),
("ldl-4", [4]),
("LDL chol | 6.2 (H) | mmol/L", [6.2]),
("Plasma LDL cholesterol level (XaEVs) 4", [4]),
("Plasma rndm LDL cholest level (44d4.) 4", [4]),
("Plasma fast LDL cholest level (44d5.) 4", [4]),
("Serum LDL cholesterol level (44P6.) 4", [4]),
("Serum fast LDL cholesterol lev (44PD.) 4", [4]),
("Ser random LDL cholesterol lev (44PE.) 4", [4]),
], verbose=verbose)
class LDLCholesterolValidator(ValidatorBase):
"""
Validator for LDL cholesterol
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return LDLCholesterol.NAME, [LDLCholesterol.LDL]
# =============================================================================
# HDL cholesterol
# =============================================================================
class HDLCholesterol(SimpleNumericalResultParser):
"""
High-density lipoprotein (HDL) cholesterol.
- By Emanuele Osimo, Feb 2019.
- Some modifications by Rudolf Cardinal, Feb 2019.
"""
HDL_BASE = fr"""
{WORD_BOUNDARY}
HDL [-\s]*
(?:
chol(?:esterol)?{WORD_BOUNDARY} |
chol\. |
{WORD_BOUNDARY} # allows HDL by itself
)
"""
HDL = regex_or(
*regex_components_from_read_codes(
ReadCodes.HDL_PLASMA,
ReadCodes.HDL_PLASMA_FASTING,
ReadCodes.HDL_PLASMA_RANDOM,
ReadCodes.HDL_SERUM,
ReadCodes.HDL_SERUM_FASTING,
ReadCodes.HDL_SERUM_RANDOM,
),
HDL_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=HDL,
units=regex_or(
MILLIMOLAR, # good
MILLIMOLES_PER_L, # good
MG_PER_DL, # good but needs conversion
)
)
NAME = "HDL cholesterol"
PREFERRED_UNIT_COLUMN = "value_mmol_L"
FACTOR_MG_DL_TO_MMOL_L = 0.02586
# ... https://www.ncbi.nlm.nih.gov/books/NBK33478/
UNIT_MAPPING = {
MILLIMOLAR: 1, # preferred unit
MILLIMOLES_PER_L: 1,
MG_PER_DL: FACTOR_MG_DL_TO_MMOL_L,
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
def convert(mg_dl: float) -> float:
# Convert mg/dl to mM
return self.FACTOR_MG_DL_TO_MMOL_L * mg_dl
self.test_numerical_parser([
("HDL", []), # should fail; no values
("HDL 4 mM", [4]),
("HDL chol 4 mmol", [4]),
("HDL chol. 4 mmol", [4]),
("HDL 4", [4]),
("chol 4", []), # that's total cholesterol
("LDL chol 4", []), # that's LDL cholesterol
("HDL cholesterol 140 mg/dl", [convert(140)]), # unit conversion
("HDL = 4", [4]),
("HDL: 4", [4]),
("HDL equals 4", [4]),
("HDL is equal to 4", [4]),
("HDL <4", [4]),
("HDLchol less than 4", [4]),
("HDL cholesterol more than 20", [20]),
("HDL was 4", [4]),
("HDL chol was 140 mg/dl", [convert(140)]),
("chol was 140 mg/dl", []),
("HDL is 140 mg dl-1", [convert(140)]),
("Hdl chol is 140 mg dl -1", [convert(140)]),
("hdl-4", [4]),
("HDL chol | 6.2 (H) | mmol/L", [6.2]),
("Plasma HDL cholesterol level (XaEVr) 4", [4]),
("Plasma rndm HDL cholest level (44d2.) 4", [4]),
("Plasma fast HDL cholest level (44d3.) 4", [4]),
("Serum HDL cholesterol level (44P5.) 4", [4]),
("Serum fast HDL cholesterol lev (44PB.) 4", [4]),
("Ser random HDL cholesterol lev (44PC.) 4", [4]),
], verbose=verbose)
class HDLCholesterolValidator(ValidatorBase):
"""
Validator for HDL cholesterol
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return HDLCholesterol.NAME, [HDLCholesterol.HDL]
# =============================================================================
# Total cholesterol
# =============================================================================
class TotalCholesterol(SimpleNumericalResultParser):
"""
Total or undifferentiated cholesterol.
"""
CHOLESTEROL_BASE = fr"""
{WORD_BOUNDARY}
(?<!HDL[-\s]+) (?<!LDL[-\s]+) # not preceded by HDL or LDL
(?: tot(?:al) [-\s] )? # optional "total" prefix
(?:
chol(?:esterol)?{WORD_BOUNDARY} |
chol\.
)
"""
# ... (?<! something ) is a negative lookbehind assertion
CHOLESTEROL = regex_or(
*regex_components_from_read_codes(
ReadCodes.CHOLESTEROL_SERUM,
ReadCodes.CHOLESTEROL_TOTAL_PLASMA,
ReadCodes.CHOLESTEROL_TOTAL_SERUM,
),
CHOLESTEROL_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=CHOLESTEROL,
units=regex_or(
MILLIMOLAR, # good
MILLIMOLES_PER_L, # good
MG_PER_DL, # good but needs conversion
)
)
NAME = "Total cholesterol"
PREFERRED_UNIT_COLUMN = "value_mmol_L"
FACTOR_MG_DL_TO_MMOL_L = 0.02586
# ... https://www.ncbi.nlm.nih.gov/books/NBK33478/
UNIT_MAPPING = {
MILLIMOLAR: 1, # preferred unit
MILLIMOLES_PER_L: 1,
MG_PER_DL: FACTOR_MG_DL_TO_MMOL_L,
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
def convert(mg_dl: float) -> float:
# Convert mg/dl to mM
return self.FACTOR_MG_DL_TO_MMOL_L * mg_dl
self.test_numerical_parser([
("chol", []), # should fail; no values
("chol 4 mM", [4]),
("total chol 4 mmol", [4]),
("chol. 4 mmol", [4]),
("chol 4", [4]),
("HDL chol 4", []), # that's HDL cholesterol
("LDL chol 4", []), # that's LDL cholesterol
("total cholesterol 140 mg/dl", [convert(140)]), # unit conversion
("chol = 4", [4]),
("chol: 4", [4]),
("chol equals 4", [4]),
("chol is equal to 4", [4]),
("chol <4", [4]),
("chol less than 4", [4]),
("cholesterol more than 20", [20]),
("chol was 4", [4]),
("chol was 140 mg/dl", [convert(140)]),
("chol was 140", [140]), # but probably wrong interpretation!
("chol is 140 mg dl-1", [convert(140)]),
("chol is 140 mg dl -1", [convert(140)]),
("chol-4", [4]),
("chol | 6.2 (H) | mmol/L", [6.2]),
("Serum cholesterol level (XE2eD) 4", [4]),
("Plasma total cholesterol level (XaIRd) 4", [4]),
("Serum total cholesterol level (XaJe9) 4", [4]),
], verbose=verbose)
class TotalCholesterolValidator(ValidatorBase):
"""
Validator for total cholesterol
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return TotalCholesterol.NAME, [TotalCholesterol.CHOLESTEROL]
# =============================================================================
# Triglycerides
# =============================================================================
class Triglycerides(SimpleNumericalResultParser):
"""
Triglycerides.
- By Emanuele Osimo, Feb 2019.
- Some modifications by Rudolf Cardinal, Feb 2019.
"""
TG_BASE = fr"""
{WORD_BOUNDARY}
(?: Triglyceride[s]? | TG )
{WORD_BOUNDARY}
"""
TG = regex_or(
*regex_components_from_read_codes(
ReadCodes.TG,
ReadCodes.TG_PLASMA,
ReadCodes.TG_PLASMA_FASTING,
ReadCodes.TG_PLASMA_RANDOM,
ReadCodes.TG_SERUM,
ReadCodes.TG_SERUM_FASTING,
ReadCodes.TG_SERUM_RANDOM,
),
TG_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=TG,
units=regex_or(
MILLIMOLAR, # good
MILLIMOLES_PER_L, # good
MG_PER_DL, # good but needs conversion
)
)
NAME = "Triglycerides"
PREFERRED_UNIT_COLUMN = "value_mmol_L"
FACTOR_MG_DL_TO_MMOL_L = 0.01129 # reciprocal of 88.57
# ... https://www.ncbi.nlm.nih.gov/books/NBK33478/
# ... https://www.ncbi.nlm.nih.gov/books/NBK83505/
UNIT_MAPPING = {
MILLIMOLAR: 1, # preferred unit
MILLIMOLES_PER_L: 1,
MG_PER_DL: FACTOR_MG_DL_TO_MMOL_L,
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
def convert(mg_dl: float) -> float:
# Convert mg/dl to mM
return self.FACTOR_MG_DL_TO_MMOL_L * mg_dl
self.test_numerical_parser([
("TG", []), # should fail; no values
("triglycerides", []), # should fail; no values
("TG 4 mM", [4]),
("triglycerides 4 mmol", [4]),
("triglyceride 4 mmol", [4]),
("TG 4", [4]),
("TG 140 mg/dl", [convert(140)]), # unit conversion
("TG = 4", [4]),
("TG: 4", [4]),
("TG equals 4", [4]),
("TG is equal to 4", [4]),
("TG <4", [4]),
("TG less than 4", [4]),
("TG more than 20", [20]),
("TG was 4", [4]),
("TG was 140 mg/dl", [convert(140)]),
("TG was 140", [140]), # but probably wrong interpretation!
("TG is 140 mg dl-1", [convert(140)]),
("TG is 140 mg dl -1", [convert(140)]),
("TG-4", [4]),
("triglycerides | 6.2 (H) | mmol/L", [6.2]),
("Triglyceride level (X772O) 4", [4]),
("Plasma triglyceride level (44e..) 4", [4]),
("Plasma rndm triglyceride level (44e0.) 4", [4]),
("Plasma fast triglyceride level (44e1.) 4", [4]),
("Serum triglyceride levels (XE2q9) 4", [4]),
("Serum fasting triglyceride lev (44Q4.) 4", [4]),
("Serum random triglyceride lev (44Q5.) 4", [4]),
], verbose=verbose)
class TriglyceridesValidator(ValidatorBase):
"""
Validator for triglycerides
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return Triglycerides.NAME, [Triglycerides.TG]
# =============================================================================
# HbA1c
# =============================================================================
def hba1c_mmol_per_mol_from_percent(percent: Union[float, str]) \
-> Optional[float]:
"""
Convert an HbA1c value from old percentage units -- DCCT (Diabetes Control
and Complications Trial), UKPDS (United Kingdom Prospective Diabetes Study)
or NGSP (National Glycohemoglobin Standardization Program) -- to newer IFCC
(International Federation of Clinical Chemistry) mmol/mol units (mmol HbA1c
/ mol Hb).
Args:
percent: DCCT value as a percentage
Returns:
IFCC value in mmol/mol
Example: 5% becomes 31.1 mmol/mol.
By Emanuele Osimo, Feb 2019.
Some modifications by Rudolf Cardinal, Feb 2019.
References:
- Emanuele had mmol_per_mol = (percent - 2.14) * 10.929 -- primary source
awaited.
- Jeppsson 2002, https://www.ncbi.nlm.nih.gov/pubmed/11916276 -- no, that's
the chemistry
- https://www.ifcchba1c.org/
- http://www.ngsp.org/ifccngsp.asp -- gives master equation of
NGSP = [0.09148 × IFCC] + 2.152), therefore implying
IFCC = (NGSP – 2.152) × 10.93135.
- Little & Rohlfing 2013: https://www.ncbi.nlm.nih.gov/pubmed/23318564;
also gives NGSP = [0.09148 * IFCC] + 2.152.
Note also that you may see eAG values (estimated average glucose), in
mmol/L or mg/dl; see http://www.ngsp.org/A1ceAG.asp; these are not direct
measurements of HbA1c.
"""
if isinstance(percent, str):
percent = to_float(percent)
if not percent:
return None
percent = abs(percent) # deals with e.g. "HbA1c-8%" -> -8
return (percent - 2.152) * 10.93135
class HbA1c(SimpleNumericalResultParser):
"""
Glycosylated (glycated) haemoglobin (HbA1c).
- By Emanuele Osimo, Feb 2019.
- Some modifications by Rudolf Cardinal, Feb 2019.
Note: HbA1 is different
(https://www.ncbi.nlm.nih.gov/pmc/articles/PMC2541274).
"""
HBA1C_BASE = fr"""
{WORD_BOUNDARY}
(?:
(?: Glyc(?:osyl)?ated [-\s]+ (?:ha?emoglobin|Hb) ) |
HbA1c
)
{WORD_BOUNDARY}
"""
HBA1C = regex_or(
*regex_components_from_read_codes(
ReadCodes.HBA1C,
ReadCodes.HBA1C_DCCT,
ReadCodes.HBA1C_IFCC,
),
HBA1C_BASE,
wrap_each_in_noncapture_group=True,
wrap_result_in_noncapture_group=False
)
REGEX = make_simple_numeric_regex(
quantity=HBA1C,
units=regex_or(
MILLIMOLES_PER_MOL, # standard
PERCENT, # good but needs conversion
MILLIMOLES_PER_L, # bad; may be an eAG value
MG_PER_DL, # bad; may be an eAG value
)
)
NAME = "HBA1C"
PREFERRED_UNIT_COLUMN = "value_mmol_mol"
UNIT_MAPPING = {
MILLIMOLES_PER_MOL: 1, # preferred unit
PERCENT: hba1c_mmol_per_mol_from_percent,
# but not MILLIMOLES_PER_L
# and not MG_PER_DL
}
def __init__(self,
nlpdef: Optional[NlpDefinition],
cfg_processor_name: Optional[str],
commit: bool = False) -> None:
# see documentation above
super().__init__(
nlpdef=nlpdef,
cfg_processor_name=cfg_processor_name,
regex_str=self.REGEX,
variable=self.NAME,
target_unit=self.PREFERRED_UNIT_COLUMN,
units_to_factor=self.UNIT_MAPPING,
commit=commit,
take_absolute=True
)
def test(self, verbose: bool = False) -> None:
# docstring in parent class
def convert(percent: float) -> float:
# Convert % to mmol/mol
return hba1c_mmol_per_mol_from_percent(percent)
self.test_numerical_parser([
("HbA1c", []), # should fail; no values
("glycosylated haemoglobin", []), # should fail; no values
("HbA1c 31", [31]),
("HbA1c 31 mmol/mol", [31]),
("HbA1c 31 mg/dl", []), # wrong units
("HbA1c 31 mmol/L", []), # wrong units
("glycosylated haemoglobin 31 mmol/mol", [31]),
("glycated hemoglobin 31 mmol/mol", [31]),
("HbA1c 8%", [convert(8)]),
("HbA1c = 8%", [convert(8)]),
("HbA1c: 31", [31]),
("HbA1c equals 31", [31]),
("HbA1c is equal to 31", [31]),
("HbA1c <31.2", [31.2]),
("HbA1c less than 4", [4]),
("HbA1c more than 20", [20]),
("HbA1c was 31", [31]),
("HbA1c was 15%", [convert(15)]),
("HbA1c-31", [31]),
("HbA1c-8%", [convert(8)]),
("HbA1c | 40 (H) | mmol/mol", [40]),
("Haemoglobin A1c level (X772q) 8%", [convert(8)]),
("HbA1c level (DCCT aligned) (XaERp) 8%", [convert(8)]),
("HbA1c levl - IFCC standardised (XaPbt) 31 mmol/mol", [31]),
], verbose=verbose)
class HbA1cValidator(ValidatorBase):
"""
Validator for HbA1c
(see :class:`crate_anon.nlp_manager.regex_parser.ValidatorBase` for
explanation).
"""
@classmethod
def get_variablename_regexstrlist(cls) -> Tuple[str, List[str]]:
return HbA1c.NAME, [HbA1c.HBA1C]
# =============================================================================
# All classes in this module
# =============================================================================
ALL_BIOCHEMISTRY_NLP_AND_VALIDATORS = [
(Albumin, AlbuminValidator),
(AlkPhos, AlkPhosValidator),
(ALT, ALTValidator),
(Bilirubin, BilirubinValidator),
(Creatinine, CreatinineValidator),
(Crp, CrpValidator),
(GammaGT, GammaGTValidator),
(Glucose, GlucoseValidator),
(HbA1c, HbA1cValidator),
(HDLCholesterol, HDLCholesterolValidator),
(LDLCholesterol, LDLCholesterolValidator),
(Lithium, LithiumValidator),
(Potassium, PotassiumValidator),
(Sodium, SodiumValidator),
(TotalCholesterol, TotalCholesterolValidator),
(Triglycerides, TriglyceridesValidator),
(Tsh, TshValidator),
(Urea, UreaValidator),
]
ALL_BIOCHEMISTRY_NLP, ALL_BIOCHEMISTRY_VALIDATORS = zip(*ALL_BIOCHEMISTRY_NLP_AND_VALIDATORS) # noqa
# =============================================================================
# Command-line entry point
# =============================================================================
def test_all(verbose: bool = False) -> None:
"""
Test all parsers in this module.
"""
for cls in ALL_BIOCHEMISTRY_NLP:
cls(None, None).test(verbose=verbose)
if __name__ == '__main__':
main_only_quicksetup_rootlogger(level=logging.DEBUG)
test_all(verbose=True)
| gpl-3.0 | -816,009,918,376,030,600 | 32.329965 | 101 | 0.502608 | false |
transientlunatic/minke | minke/antenna.py | 1 | 3841 | """
This module provides functions to calculate antenna factors for a given time, a given sky location and a given detector.
Adapted from the implementation in pylal
"""
import sys
from math import *
import lal
import lalsimulation
__author__ = "Alexander Dietz <[email protected]>; Daniel Williams <[email protected]>"
def response( gpsTime, rightAscension, declination, inclination,
polarization, unit, det ):
"""
response( gpsTime, rightAscension, declination, inclination,
polarization, unit, detector )
Calculates the antenna factors for a detector 'detector' (e.g. 'H1')
at a given gps time (as integer) for a given sky location
(rightAscension, declination) in some unit (degree/radians).
This computation also takes into account a specific inclination
and polarization.
The returned values are: (f-plus, f-cross, f-average, q-value).
Example: antenna.response( 854378604.780, 11.089, 42.308, 0, 0, 'radians', 'H1' )
"""
# check the input arguments
if unit =='radians':
ra_rad = rightAscension
de_rad = declination
psi_rad = polarization
iota_rad = inclination
elif unit =='degree':
ra_rad = rightAscension/180.0*pi
de_rad = declination/180.0*pi
psi_rad = polarization/180.0*pi
iota_rad = inclination/180.0*pi
else:
raise ValueError("Unknown unit %s" % unit)
# calculate GMST if the GPS time
gps = lal.LIGOTimeGPS( gpsTime )
gmst_rad = lal.GreenwichMeanSiderealTime(gps)
# Get the detector from its prefix
try:
detector = lalsimulation.DetectorPrefixToLALDetector(det)
except KeyError:
raise ValueError("ERROR. Key %s is not a valid detector prefix." % (det))
# get the correct response data
response = detector.response
# actual computation of antenna factors
f_plus, f_cross = lal.ComputeDetAMResponse(response, ra_rad, de_rad,
psi_rad, gmst_rad)
f_ave=sqrt( (f_plus*f_plus + f_cross*f_cross)/2.0 );
ci=cos( iota_rad );
cc=ci*ci;
# calculate q-value, e.g. ratio of effective to real distance
# ref: Duncans PhD, eq. (4.3) on page 57
f_q=sqrt( f_plus*f_plus*(1+cc)*(1+cc)/4.0 + f_cross*f_cross*cc );
# output
return f_plus, f_cross, f_ave, f_q
def timeDelay( gpsTime, rightAscension, declination, unit, det1, det2 ):
"""
timeDelay( gpsTime, rightAscension, declination, unit, det1, det2 )
Calculates the time delay in seconds between the detectors
'det1' and 'det2' (e.g. 'H1') for a sky location at (rightAscension
and declination) which must be given in certain units
('radians' or 'degree'). The time is passes as GPS time.
A positive time delay means the GW arrives first at 'det2', then at 'det1'.
Example:
antenna.timeDelay( 877320548.000, 355.084,31.757, 'degree','H1','L1')
0.0011604683260994519
Given these values, the signal arrives first at detector L1,
and 1.16 ms later at H2
"""
# check the input arguments
if unit =='radians':
ra_rad = rightAscension
de_rad = declination
elif unit =='degree':
ra_rad = rightAscension/180.0*pi
de_rad = declination/180.0*pi
else:
raise ValueError("Unknown unit %s" % unit)
# check input values
if ra_rad<0.0 or ra_rad> 2*pi:
raise ValueError( "ERROR. right ascension=%f "\
"not within reasonable range."\
% (rightAscension))
if de_rad<-pi or de_rad> pi:
raise ValueError( "ERROR. declination=%f not within reasonable range."\
% (declination))
if det1 == det2:
return 0.0
gps = lal.LIGOTimeGPS( gpsTime )
x1 = lalsimulation.DetectorPrefixToLALDetector(det1).location
x2 = lalsimulation.DetectorPrefixToLALDetector(det2).location
timedelay = lal.ArrivalTimeDiff(list(x1), list(x2), ra_rad, de_rad, gps)
return timedelay
| isc | 5,938,514,258,122,833,000 | 30.743802 | 120 | 0.67899 | false |
20tab/twentytab-seo | seo/template_context/context_processors.py | 1 | 2841 | from seo.models import MetaPage, MetaSite
class MetaContent(object):
"""
MetaContent class define an object that contain informations about page or publication.
These informations are included in template.
"""
def __init__(self):
self.title = ""
self.description = ""
self.keywords = ""
self.author = ""
self.content_type = ""
self.robots = ""
self.generator = ""
self.html_head = ""
def fill_content(self, metaObject):
"""
This method fills MetaContent with informations contained in metaObjetc
"""
self.title = metaObject.title
self.description = metaObject.description
self.keywords = metaObject.keywords
self.author = metaObject.author
self.content_type = metaObject.content_type
self.robots = metaObject.robots
self.html_head = metaObject.html_head
try:#perche' Page non ha generator
self.generator = metaObject.generator
except:
self.generator = ''
def check_attr(self, item):
"""
It checks if item is defined in self object
"""
if hasattr(self, item):
if not getattr(self, item) or getattr(self, item) == "":
return False
return True
def jsonToMeta(self, json):
"""
It sets all item in a json to self
"""
for k, v in json.items():
setattr(self, k, v)
def get_fields(self):
"""
It returns this object as a dictionary
"""
return self.__dict__
def __str__(self):
return "%s" % self.title
def set_meta(request):
"""
This context processor returns meta informations contained in cached files.
If there aren't cache it calculates dictionary to return
"""
context_extras = {}
if not request.is_ajax() and hasattr(request, 'upy_context') and request.upy_context['PAGE']:
try:
site = MetaSite.objects.get(default=True)
except MetaSite.DoesNotExist:
site = None
try:
page = MetaPage.objects.get(page=request.upy_context['PAGE'])
except MetaPage.DoesNotExist:
page = None
meta_temp = MetaContent()
attr_list = ('title', 'description', 'keywords', 'author', 'content_type', 'robots', 'generator', 'html_head')
if page:
for item in attr_list:
if hasattr(page, item):
setattr(meta_temp, item, getattr(page, item, ""))
if site:
for item in attr_list:
if hasattr(site, item) and not meta_temp.check_attr(item):
setattr(meta_temp, item, getattr(site, item, ""))
context_extras['META'] = meta_temp
return context_extras
| mit | -8,941,388,902,364,435,000 | 31.284091 | 118 | 0.571278 | false |
dogukantufekci/supersalon | config/wsgi.py | 1 | 1452 | """
WSGI config for supersalon project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from django.core.wsgi import get_wsgi_application
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| bsd-3-clause | -7,566,110,440,660,778,000 | 36.230769 | 79 | 0.791322 | false |
citrix-openstack-build/python-saharaclient | saharaclient/openstack/common/importutils.py | 1 | 2367 | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Import related utilities and helper functions.
"""
import sys
import traceback
def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition('.')
__import__(mod_str)
try:
return getattr(sys.modules[mod_str], class_str)
except AttributeError:
raise ImportError('Class %s cannot be found (%s)' %
(class_str,
traceback.format_exception(*sys.exc_info())))
def import_object(import_str, *args, **kwargs):
"""Import a class and return an instance of it."""
return import_class(import_str)(*args, **kwargs)
def import_object_ns(name_space, import_str, *args, **kwargs):
"""Tries to import object from default namespace.
Imports a class and return an instance of it, first by trying
to find the class in a default namespace, then failing back to
a full path if not found in the default namespace.
"""
import_value = "%s.%s" % (name_space, import_str)
try:
return import_class(import_value)(*args, **kwargs)
except ImportError:
return import_class(import_str)(*args, **kwargs)
def import_module(import_str):
"""Import a module."""
__import__(import_str)
return sys.modules[import_str]
def import_versioned_module(version, submodule=None):
module = 'saharaclient.v%s' % version
if submodule:
module = '.'.join((module, submodule))
return import_module(module)
def try_import(import_str, default=None):
"""Try to import a module and if it fails return default."""
try:
return import_module(import_str)
except ImportError:
return default
| apache-2.0 | 5,951,684,899,121,669,000 | 31.424658 | 78 | 0.667934 | false |
rfyiamcool/TimeCost | timecost.py | 1 | 1310 | import time
import functools
class TimeCost(object):
def __init__(self, unit='s', precision=4, logger=None):
self.start = None
self.end = None
self.total = 0
self.unit = unit
self.precision = precision
self.__unitfactor = {'s': 1,
'ms': 1000,
'us': 1000000}
self.logger = logger
def __call__(self, f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
with self:
return f(*args, **kwargs)
return wrapped
def __enter__(self):
if self.unit not in self.__unitfactor:
raise KeyError('Unsupported time unit.')
if self.precision < 0:
raise KeyError('must gte 0')
self.start = time.time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.end = time.time()
self.total = (self.end - self.start) * self.__unitfactor[self.unit]
if self.precision != 0:
self.total = round(self.total, self.precision)
else:
self.total = int(self.total)
if self.logger:
self.logger.info('this cost {0}{1}'.format(self.total, self.unit))
def __str__(self):
return 'this cost {0}{1}'.format(self.total, self.unit)
| mit | 9,133,844,000,565,606,000 | 29.465116 | 78 | 0.529008 | false |
ahartoto/lendingclub2 | lendingclub2/request.py | 1 | 2731 | # Filename: request.py
"""
LendingClub2 Request Module
Interface functions:
get
post
"""
# Standard libraries
import datetime
import time
# Requests
import requests
# Lending Club
from lendingclub2.authorization import Authorization
from lendingclub2.config import REQUEST_LIMIT_PER_SEC
from lendingclub2.error import LCError
__LAST_REQUEST_TIMESTAMP = None
# pylint: disable=global-statement
def get(*args, **kwargs):
"""
Wrapper around :py:func:`requests.get` function.
:param args: tuple - positional arguments for :py:func:`requests.get`.
:param kwargs: dict - keyword arguments for :py:func:`requests.get`.
:returns: instance of :py:class:`requests.Response`.
"""
global __LAST_REQUEST_TIMESTAMP
__add_headers_to_kwargs(kwargs)
__wait_request()
try:
response = requests.get(*args, **kwargs)
__LAST_REQUEST_TIMESTAMP = datetime.datetime.now()
return response
except requests.ConnectionError as exc:
fstr = "Cannot connect correctly"
raise LCError(fstr, details=str(exc))
# pylint: enable=global-statement
# pylint: disable=global-statement
def post(*args, **kwargs):
"""
Wrapper around :py:func:`requests.post` function.
:param args: tuple - positional arguments for :py:func:`requests.post`.
:param kwargs: dict - keyword arguments for :py:func:`requests.post`.
:returns: instance of :py:class:`requests.Response`.
"""
global __LAST_REQUEST_TIMESTAMP
__add_headers_to_kwargs(kwargs)
__wait_request()
try:
response = requests.post(*args, **kwargs)
__LAST_REQUEST_TIMESTAMP = datetime.datetime.now()
return response
except requests.ConnectionError as exc:
fstr = "Cannot connect correctly"
raise LCError(fstr, details=str(exc))
# pylint: enable=global-statement
# Internal functions
def __add_headers_to_kwargs(kwargs):
"""
Add authorization key to the headers in keyword arguments.
:param kwargs: dict
"""
auth = Authorization()
if 'headers' in kwargs:
for key, value in auth.header.items():
kwargs['headers'][key] = value
else:
kwargs['headers'] = auth.header
def __wait_request():
"""
Ensure that we are not violating the requirements on sending request
at the correct rate.
"""
if __LAST_REQUEST_TIMESTAMP is None:
return
now = datetime.datetime.now()
delta = now - __LAST_REQUEST_TIMESTAMP
total_seconds = delta.total_seconds()
wait_time_between_requests = 1.0 / REQUEST_LIMIT_PER_SEC
if total_seconds < wait_time_between_requests:
wait_time = wait_time_between_requests - total_seconds
time.sleep(wait_time)
| mit | -9,145,021,347,564,711,000 | 26.585859 | 75 | 0.671183 | false |
zyga/arrowhead | xkcd518.py | 1 | 3193 | #!/usr/bin/env python3
from arrowhead import Flow, step, arrow, main
def ask(prompt):
answer = None
while answer not in ('yes', 'no'):
answer = input(prompt + ' ')
return answer
class XKCD518(Flow):
"""
https://xkcd.com/518/
"""
@step(initial=True, level=1)
@arrow('do_you_understand_flowcharts')
def start(step):
"""
START
"""
print(step.Meta.label)
# ---------------
@step(level=2)
@arrow(to='good', value='yes')
@arrow(to='okay_you_see_the_line_labeled_yes', value='no')
def do_you_understand_flowcharts(step):
"""
Do you understand flowcharts?
"""
return ask(step.Meta.label)
@step(level=2)
@arrow(to='lets_go_drink')
def good(step):
print(step.Meta.label)
# ---------------
@step(level=3)
@arrow(to='hey_I_should_try_installing_freebsd')
def lets_go_drink(step):
"""
Let's go drink.
"""
print(step.Meta.label)
@step(accepting=True, level=3)
def hey_I_should_try_installing_freebsd(step):
"""
Hey, I should try installing freeBSD!
"""
print(step.Meta.label)
# ---------------
@step(level=4)
@arrow(to='and_you_can_see_ones_labeled_no', value='yes')
@arrow(to='but_you_see_the_ones_labeled_no', value='no')
def okay_you_see_the_line_labeled_yes(step):
"""
Okay. You see the line labeled 'yes'?
"""
return ask(step.Meta.label)
@step(level=4)
@arrow(to='good', value='yes')
@arrow(to='but_you_just_followed_them_twice', value='no')
def and_you_can_see_ones_labeled_no(step):
"""
...and you can see the ones labeled 'no'?
"""
return ask(step.Meta.label)
# ---------------
@step(level=5)
@arrow(to='wait_what', value='yes')
@arrow(to='listen', value='no')
def but_you_see_the_ones_labeled_no(step):
"""
But you see the ones labeled "no"?
"""
return ask(step.Meta.label)
# ---------------
@step(accepting=True, level=5)
def wait_what(step):
"""
Wait, what!
"""
print(step.Meta.label)
# ---------------
@step(level=6)
@arrow(to='I_hate_you')
def listen(step):
"""
Listen
"""
print(step.Meta.label)
@step(accepting=True, level=6)
def I_hate_you(step):
"""
I hate you
"""
print(step.Meta.label)
# ---------------
@step(level=5)
@arrow(to='that_wasnt_a_question', value='yes')
@arrow(to='that_wasnt_a_question', value='no')
def but_you_just_followed_them_twice(step):
"""
But you just followed them twice!
"""
return ask(step.Meta.label)
@step(level=5)
@arrow(to='screw_it')
def that_wasnt_a_question(step):
"""
(That wasn't a question)
"""
print(step.Meta.label)
@step(level=4)
@arrow(to='lets_go_drink')
def screw_it(step):
"""
Screw it.
"""
print(step.Meta.label)
if __name__ == '__main__':
main(XKCD518)
| bsd-3-clause | -9,054,405,839,265,277,000 | 21.173611 | 62 | 0.509239 | false |
erykoff/redmapper | redmapper/calibration/prepmembers.py | 1 | 3678 | """Classes related to preparing members for the next calibration iteration
"""
import os
import numpy as np
import esutil
from ..catalog import Entry, Catalog
from ..galaxy import GalaxyCatalog
from ..utilities import read_members
from ..configuration import Configuration
class PrepMembers(object):
"""
Class to prepare members for input to the next calibration iteration.
"""
def __init__(self, conf):
"""
Instantiate a PrepMembers object.
Parameters
----------
conf: `str` or `redmapper.Configuration`
Config filename or configuration object
"""
if not isinstance(conf, Configuration):
self.config = Configuration(conf)
else:
self.config = conf
def run(self, mode):
"""
Run the member preparation.
Output members are put into self.config.zmemfile.
Parameters
----------
mode: `str`
May be "z_init": use initial cluster seed redshift as member redshift or
may be "cg": use the most likely central spec_z as member redshift
Raises
------
RuntimeError: If mode is not "z_init" or "cg".
"""
cat = Catalog.from_fits_file(self.config.catfile)
if mode == 'z_init':
cat_z = cat.z_init
elif mode == 'cg':
cat_z = cat.cg_spec_z
else:
raise RuntimeError("Unsupported mode %s" % (mode))
mem = read_members(self.config.catfile)
# Cut the clusters
use, = np.where((cat.Lambda / cat.scaleval > self.config.calib_minlambda) &
(cat.scaleval > 0.0) &
(np.abs(cat_z - cat.z_lambda) < self.config.calib_zlambda_clean_nsig * cat.z_lambda_e))
cat = cat[use]
cat_z = cat_z[use]
# Cut the members
use, = np.where((mem.p * mem.theta_i * mem.theta_r > self.config.calib_pcut) |
(mem.pcol > self.config.calib_pcut))
mem = mem[use]
# Match cut clusters to members
a, b = esutil.numpy_util.match(cat.mem_match_id, mem.mem_match_id)
newmem = Catalog(np.zeros(b.size, dtype=[('z', 'f4'),
('z_lambda', 'f4'),
('p', 'f4'),
('pcol', 'f4'),
('central', 'i2'),
('ra', 'f8'),
('dec', 'f8'),
('mag', 'f4', self.config.nmag),
('mag_err', 'f4', self.config.nmag),
('refmag', 'f4'),
('refmag_err', 'f4'),
('ebv', 'f4')]))
newmem.ra[:] = mem.ra[b]
newmem.dec[:] = mem.dec[b]
newmem.p[:] = mem.p[b]
newmem.pcol[:] = mem.pcol[b]
newmem.mag[:, :] = mem.mag[b, :]
newmem.mag_err[:, :] = mem.mag_err[b, :]
newmem.refmag[:] = mem.refmag[b]
newmem.refmag_err[:] = mem.refmag_err[b]
newmem.ebv[:] = mem.ebv[b]
cent, = np.where(mem.r[b] < 0.0001)
newmem.central[cent] = 1
newmem.z[:] = cat_z[a]
newmem.z_lambda = cat.z_lambda[a]
if self.config.calib_smooth > 0.0:
newmem.z[:] += self.config.calib_smooth * np.random.normal(size=newmem.size)
newmem.to_fits_file(self.config.zmemfile)
| apache-2.0 | -1,909,251,253,028,138,500 | 32.743119 | 111 | 0.467374 | false |
rchatterjee/nocrack | newcode/dte/honey_vault.py | 1 | 12274 | import json
import math
import os
import sys
BASE_DIR = os.getcwd()
sys.path.append(BASE_DIR)
from .honey_enc import DTE, DTE_random
from pcfg.pcfg import TrainedGrammar
import honeyvault_config as hny_config
from Crypto.Cipher import AES
from Crypto.Hash import SHA256
from Crypto.Protocol.KDF import PBKDF1
from Crypto.Util import Counter
import copy, struct
from helper import (open_, print_err, process_parallel, random,
print_production)
from pcfg.pcfg import VaultDistPCFG
from collections import OrderedDict
# from IPython.core import ultratb
# sys.excepthook = ultratb.FormattedTB(color_scheme='Linux', call_pdb=1)
MAX_INT = hny_config.MAX_INT
# -------------------------------------------------------------------------------
def do_crypto_setup(mp, salt):
key = PBKDF1(mp, salt, 16, 100, SHA256)
ctr = Counter.new(128, initial_value=int(254))
aes = AES.new(key, AES.MODE_CTR, counter=ctr)
return aes
def copy_from_old_parallel(args):
odte, ndte, i, p = args
ret = []
pw = odte.decode_pw(p)
if not pw:
return i, pw, []
ret = ndte.encode_pw(pw)
if not ret:
print("Cool I failed in encoding!! Kudos to me. pw: {}, i: {}"
.format(pw, i))
ret = pw
else:
tpw = ndte.decode_pw(ret)
assert pw == tpw, "Encoding-Decoding password is wrong. Expecting {!r}, got {!r}"\
.format(pw, tpw)
return i, pw, ret
class HoneyVault:
s1 = hny_config.HONEY_VAULT_S1
s2 = hny_config.HONEY_VAULT_S2
s_g = hny_config.HONEY_VAULT_GRAMMAR_SIZE
s = hny_config.HONEY_VAULT_STORAGE_SIZE
vault_total_size = hny_config.HONEY_VAULT_ENCODING_SIZE
sample = [10, 19, 20, 31]
mpass_set_size = hny_config.HONEY_VAULT_MACHINE_PASS_SET_SIZE
def __init__(self, vault_fl, mp):
self.pcfg = TrainedGrammar() # Default large trained PCFG
domain_hash_map_fl = hny_config.STATIC_DOMAIN_HASH_LIST
self.domain_hash_map = json.load(open_(domain_hash_map_fl))
self.vault_fl = vault_fl
self.mp = mp
self.initialize_vault(mp)
self.dte = DTE(self.pcfg.decode_grammar(self.H))
def get_domain_index(self, d):
h = SHA256.new()
h.update(d.encode('utf-8'))
d_hash = h.hexdigest()[:32]
try:
i = self.domain_hash_map[d_hash]
if i > self.s1:
raise KeyError
else:
return i
except KeyError:
sys.stderr.write('WARNING! S1 miss for %s\n' % d)
x = struct.unpack('8I', h.digest())[0]
return self.s1 + x % self.s2
def initialize_vault(self, mp):
vd = VaultDistPCFG()
if not os.path.exists(self.vault_fl):
print_production("\nCould not find the vault file @ {}, so, sit tight, " \
"creating a dummy vault for you." \
"\nShould not take too long...\n".format(self.vault_fl))
t_s = random.randints(0, MAX_INT, hny_config.HONEY_VAULT_ENCODING_SIZE)
self.H = t_s[:hny_config.HONEY_VAULT_GRAMMAR_SIZE]
t_s = t_s[hny_config.HONEY_VAULT_GRAMMAR_SIZE:]
self.S = [t_s[i:i + hny_config.PASSWORD_LENGTH]
for i in range(0, self.s * hny_config.PASSWORD_LENGTH, hny_config.PASSWORD_LENGTH)]
assert all(len(s) == hny_config.PASSWORD_LENGTH for s in
self.S), "All passwords encodings are not of correct length.\n {}".format(
(len(s), hny_config.PASSWORD_LENGTH) for s in self.S)
self.machine_pass_set = list('0' * (self.mpass_set_size * 8))
k = int(math.ceil(hny_config.HONEY_VAULT_STORAGE_SIZE * \
hny_config.MACHINE_GENRATED_PASS_PROB / 1000.0))
for i in random.sample(list(range(hny_config.HONEY_VAULT_STORAGE_SIZE)), k):
self.machine_pass_set[i] = '1'
self.salt = os.urandom(8)
self.save(mp)
else:
self.load(mp)
def gen_password(self, mp, domain_list, size=10):
"""
generates random password strings for each of the domail
specified, and saves it in corresponding location.
Master password (@mp) is required for that.
"""
r_dte = DTE_random()
reply = []
for d in domain_list:
i = self.get_domain_index(d)
p, encoding = r_dte.generate_and_encode_password(size)
self.S[i] = encoding
self.machine_pass_set[i] = '1'
reply.append(p)
self.save()
return OrderedDict(list(zip(domain_list, reply)))
def add_password(self, domain_pw_map):
# print self.dte.G
nG = copy.deepcopy(self.dte.G)
print_production("Updating the grammar with new passwords..")
nG.update_grammar(*(list(domain_pw_map.values())))
ndte = DTE(nG)
# TODO: fix this, currently its a hack to way around my bad
# parsing. A password can be generated in a different way than it is parsed in most probable
# way. The code is supposed to pick one parse tree at random. Currently picking the most
# probable one. Need to fix for security reason. Will add a ticket.
new_encoding_of_old_pw = []
current_passwords = ['' for _ in range(hny_config.HONEY_VAULT_STORAGE_SIZE)]
if self.dte and (ndte != self.dte):
# if new dte is different then copy the existing human chosen passwords.
# Machine generated passwords are not necessary to re-encode. As their grammar
# does not change. NEED TO CHECK SECURITY.
print_production("Some new rules found, so adding them to the new grammar. " \
"Should not take too long...\n")
data = [(self.dte, ndte, i, p)
for i, p in enumerate(self.S)
if self.machine_pass_set[i] == '0']
if hny_config.DEBUG:
result = map(copy_from_old_parallel, data)
else:
result = process_parallel(copy_from_old_parallel, data, func_load=100)
for i, pw, pw_encodings in result:
if isinstance(pw_encodings, str):
new_encoding_of_old_pw.append((i, pw_encodings))
current_passwords[i] = pw
self.S[i] = pw_encodings
# print_err(self.H[:10])
# G_ = self.pcfg.decode_grammar(self.H)
# print_err("-"*50)
# print_err("Original: ", nG, '\n', '='*50)
# print_err("After Decoding:", G_)
# assert G_ == nG
print_production("\nAdding new passowrds..\n")
for domain, pw in list(domain_pw_map.items()):
i = self.get_domain_index(domain)
print(">>", i, pw, domain)
current_passwords[i] = pw
self.S[i] = ndte.encode_pw(pw)
self.machine_pass_set[i] = '0'
# Cleaning the mess because of missed passwords
# Because some password might have failed in copy_from_old_function, They need to be
# re-encoded
if new_encoding_of_old_pw:
print_err("\n<<<<<<\nFixing Mess!!\n{}>>>>>>>".format(new_encoding_of_old_pw))
nG.update_grammar(*[p for i, p in new_encoding_of_old_pw])
for i, p in new_encoding_of_old_pw:
self.S[i] = ndte.encode_pw(p)
self.machine_pass_set[i] = '0'
if hny_config.DEBUG:
for i, pw_encodings in enumerate(self.S):
if self.machine_pass_set[i] == '0':
tpw = ndte.decode_pw(pw_encodings)
tpwold = current_passwords[i]
assert len(tpwold) <= 0 or tpw == tpwold, \
"The re-encoding is faulty. Expecting: '{}' at {}. Got '{}'.".format(tpwold, i, tpw)
try:
self.H = self.pcfg.encode_grammar(nG)
except ValueError as ex:
print(ex)
print("Sorry the grammar is not complete enough to encode your "
"passwords. This error will be fixed in future.")
exit(-1)
self.dte = ndte
def get_password(self, domain_list, send_raw=False):
pw_list = []
r_dte = DTE_random()
for d in domain_list:
i = self.get_domain_index(d)
if self.machine_pass_set[i] == '1':
pw = r_dte.decode_pw(self.S[i])
else:
pw = self.dte.decode_pw(self.S[i])
pw_list.append(pw)
return OrderedDict(list(zip(domain_list, pw_list)))
def get_sample_decoding(self):
"""
check some of the sample decoding to make sure you are
not accidentally spoiling the vault
"""
assert all(len(self.S[i]) == hny_config.PASSWORD_LENGTH for i in self.sample), \
"Corrupted Encoding!!"
return [self.dte.decode_pw(self.S[i]) for i in self.sample]
def get_all_pass(self):
"""
Returns all the passwords in the vault.
"""
r_dte = DTE_random()
return ((i, self.dte.decode_pw(s)) if self.machine_pass_set[i] == '0' \
else (i, r_dte.decode_pw(s))
for i, s in enumerate(self.S))
def save(self, mp=None):
if not mp:
mp = self.mp
with open(self.vault_fl, 'wb') as fvault:
fvault.write(self.salt)
buf = list(self.H[:])
for i, a in enumerate(self.S):
buf.extend(a)
aes = do_crypto_setup(mp, self.salt)
fvault.write(aes.encrypt(
struct.pack('!%sI' % \
hny_config.HONEY_VAULT_ENCODING_SIZE,
*buf))
)
for i in range(self.mpass_set_size):
fvault.write(struct.pack('!B', int(
''.join(self.machine_pass_set[i * 8:(i + 1) * 8]), 2)))
def load(self, mp):
with open(self.vault_fl, 'rb') as fvault:
self.salt = fvault.read(8)
size_of_int = struct.calcsize('I')
aes = do_crypto_setup(mp, self.salt)
buf = aes.decrypt(
fvault.read(hny_config.HONEY_VAULT_ENCODING_SIZE * size_of_int))
t_s = struct.unpack(
'!%sI' % hny_config.HONEY_VAULT_ENCODING_SIZE, buf)
self.H = t_s[:hny_config.HONEY_VAULT_GRAMMAR_SIZE]
t_s = t_s[hny_config.HONEY_VAULT_GRAMMAR_SIZE:]
self.S = [t_s[i * hny_config.PASSWORD_LENGTH:(i + 1) * hny_config.PASSWORD_LENGTH]
for i in range(self.s)]
buf = fvault.read(self.mpass_set_size)
self.machine_pass_set = \
list(''.join(["{0:08b}".format(x)
for x in struct.unpack(
"%sB" % self.mpass_set_size, buf)]))
assert len(self.machine_pass_set) >= len(self.S)
# ----------------------------------------------------------------------
def main():
if len(sys.argv) < 5 or sys.argv[0] in ['-h', '--help']:
print('''Taste the HoneyVault1.1 - a New Password Encrypting paradigm!
|| Encrypt with confidence ||
--encode vault_plain.txt masterpassword vault_cipher.txt
--decode vault_cipher.txt masterpassword stdout
''')
else:
f1 = sys.argv[2]
mp = sys.argv[3]
f2 = sys.argv[4]
if sys.argv[1] == '--encode':
vault = [l.strip().split(',')[2] for l in open(f1) if l[0] != '#']
cipher = vault_encrypt(vault, mp)
with open(f2, 'wb') as outf:
n = len(vault)
outf.write(struct.pack('<I', n))
outf.write(cipher)
print("Your Vault is encrypted! Now you can delte the plaintext vault text.")
elif sys.argv[1] == '--decode':
dt = open(f1, 'rb').read()
n = struct.unpack('<I', dt[:4])[0]
vault = vault_decrypt(dt[4:], mp, n)
print(vault)
else:
print("Sorry Anthofila! Command not recognised.")
if __name__ == "__main__":
print("TODO: add main/test")
main()
| mit | -92,666,579,949,946,460 | 39.242623 | 108 | 0.538374 | false |
EconForge/dolo | dolo/linter.py | 1 | 17659 | # import ast
# import json
# import ruamel.yaml as ry
# from ruamel.yaml.comments import CommentedSeq
# from dolo.compiler.symbolic import check_expression
# from dolo.compiler.recipes import recipes
# from dolo.misc.termcolor import colored
# class Compare:
# def __init__(self):
# self.d = {}
# def compare(self, A, B):
# if isinstance(A, ast.Name) and (A.id[0] == '_'):
# if A.id not in self.d:
# self.d[A.id] = B
# return True
# else:
# return self.compare(self.d[A.id], B)
# if not (A.__class__ == B.__class__):
# return False
# if isinstance(A, ast.Name):
# return A.id == B.id
# elif isinstance(A, ast.Call):
# if not self.compare(A.func, B.func):
# return False
# if not len(A.args) == len(B.args):
# return False
# for i in range(len(A.args)):
# if not self.compare(A.args[i], B.args[i]):
# return False
# return True
# elif isinstance(A, ast.Num):
# return A.n == B.n
# elif isinstance(A, ast.Expr):
# return self.compare(A.value, B.value)
# elif isinstance(A, ast.Module):
# if not len(A.body) == len(B.body):
# return False
# for i in range(len(A.body)):
# if not self.compare(A.body[i], B.body[i]):
# return False
# return True
# elif isinstance(A, ast.BinOp):
# if not isinstance(A.op, B.op.__class__):
# return False
# if not self.compare(A.left, B.left):
# return False
# if not self.compare(A.right, B.right):
# return False
# return True
# elif isinstance(A, ast.UnaryOp):
# if not isinstance(A.op, B.op.__class__):
# return False
# return self.compare(A.operand, B.operand)
# elif isinstance(A, ast.Subscript):
# if not self.compare(A.value, B.value):
# return False
# return self.compare(A.slice, B.slice)
# elif isinstance(A, ast.Index):
# return self.compare(A.value, B.value)
# elif isinstance(A, ast.Compare):
# if not self.compare(A.left, B.left):
# return False
# if not len(A.ops) == len(B.ops):
# return False
# for i in range(len(A.ops)):
# if not self.compare(A.ops[i], B.ops[i]):
# return False
# if not len(A.comparators) == len(B.comparators):
# return False
# for i in range(len(A.comparators)):
# if not self.compare(A.comparators[i], B.comparators[i]):
# return False
# return True
# elif isinstance(A, ast.In):
# return True
# elif isinstance(A, (ast.Eq, ast.LtE)):
# return True
# else:
# print(A.__class__)
# raise Exception("Not implemented")
# def compare_strings(a, b):
# t1 = ast.parse(a)
# t2 = ast.parse(b)
# comp = Compare()
# val = comp.compare(t1, t2)
# return val
# def match(m, s):
# if isinstance(m, str):
# m = ast.parse(m).body[0].value
# if isinstance(s, str):
# s = ast.parse(s).body[0].value
# comp = Compare()
# val = comp.compare(m, s)
# d = comp.d
# if len(d) == 0:
# return val
# else:
# return d
# known_symbol_types = {
# 'dtcc': recipes['dtcc']['symbols'],
# }
# class ModelException(Exception):
# type = 'error'
# def check_symbol_validity(s):
# import ast
# val = ast.parse(s).body[0].value
# assert (isinstance(val, ast.Name))
# def check_symbols(data):
# # can raise three types of exceptions
# # - unknown symbol
# # - invalid symbol
# # - already declared
# # add: not declared if missing 'states', 'controls' ?
# exceptions = []
# symbols = data['symbols']
# cm_symbols = symbols
# model_type = 'dtcc'
# already_declared = {} # symbol: symbol_type, position
# for key, values in cm_symbols.items():
# # (start_line, start_column, end_line, end_column) of the key
# if key not in known_symbol_types[model_type]:
# l0, c0, l1, c1 = cm_symbols.lc.data[key]
# exc = ModelException(
# "Unknown symbol type '{}'".format(
# key, model_type))
# exc.pos = (l0, c0, l1, c1)
# # print(l0,c0,l1,c1)
# exceptions.append(exc)
# assert (isinstance(values, CommentedSeq))
# for i, v in enumerate(values):
# (l0, c0) = values.lc.data[i]
# length = len(v)
# l1 = l0
# c1 = c0 + length
# try:
# check_symbol_validity(v)
# except:
# exc = ModelException("Invalid symbol '{}'".format(v))
# exc.pos = (l0, c0, l1, c1)
# exceptions.append(exc)
# if v in already_declared:
# ll = already_declared[v]
# exc = ModelException(
# "Symbol '{}' already declared as '{}'. (pos {})".format(
# v, ll[0], (ll[1][0] + 1, ll[1][1])))
# exc.pos = (l0, c0, l1, c1)
# exceptions.append(exc)
# else:
# already_declared[v] = (key, (l0, c0))
# return exceptions
# def check_equations(data):
# model_type = data['model_type']
# pos0 = data.lc.data['equations']
# equations = data['equations']
# exceptions = []
# recipe = recipes[model_type]
# specs = recipe['specs']
# for eq_type in specs.keys():
# if (eq_type not in equations) and (not specs[eq_type].get(
# 'optional', True)):
# exc = ModelException("Missing equation type {}.".format(eq_type))
# exc.pos = pos0
# exceptions.append(exc)
# already_declared = {}
# unknown = []
# for eq_type in equations.keys():
# pos = equations.lc.data[eq_type]
# if eq_type not in specs:
# exc = ModelException("Unknown equation type {}.".format(eq_type))
# exc.pos = pos
# exceptions.append(exc)
# unknown.append(eq_type)
# # BUG: doesn't produce an error when a block is declared twice
# # should be raised by ruaml.yaml ?
# elif eq_type in already_declared.keys():
# exc = ModelException(
# "Equation type {} declared twice at ({})".format(eq_type, pos))
# exc.pos = pos
# exceptions.append(exc)
# else:
# already_declared[eq_type] = pos
# for eq_type in [k for k in equations.keys() if k not in unknown]:
# for n, eq in enumerate(equations[eq_type]):
# eq = eq.replace('<=', '<').replace('==',
# '=').replace('=', '==').replace(
# '<', '<=')
# # print(eq)
# pos = equations[eq_type].lc.data[n]
# try:
# ast.parse(eq)
# except SyntaxError as e:
# exc = ModelException("Syntax Error.")
# exc.pos = [
# pos[0], pos[1] + e.offset, pos[0], pos[1] + e.offset
# ]
# exceptions.append(exc)
# # TEMP: incorrect ordering
# if specs[eq_type].get('target'):
# for n, eq in enumerate(equations[eq_type]):
# eq = eq.replace('<=', '<').replace('==', '=').replace(
# '=', '==').replace('<', '<=')
# pos = equations[eq_type].lc.data[n]
# lhs_name = str.split(eq, '=')[0].strip()
# target = specs[eq_type]['target'][0]
# if lhs_name not in data['symbols'][target]:
# exc = ModelException(
# "Undeclared assignement target '{}'. Add it to '{}'.".
# format(lhs_name, target))
# exc.pos = [pos[0], pos[1], pos[0], pos[1] + len(lhs_name)]
# exceptions.append(exc)
# # if n>len(data['symbols'][target]):
# else:
# right_name = data['symbols'][target][n]
# if lhs_name != right_name:
# exc = ModelException(
# "Left hand side should be '{}' instead of '{}'.".
# format(right_name, lhs_name))
# exc.pos = [
# pos[0], pos[1], pos[0], pos[1] + len(lhs_name)
# ]
# exceptions.append(exc)
# # temp
# return exceptions
# def check_definitions(data):
# if 'definitions' not in data:
# return []
# definitions = data['definitions']
# if definitions is None:
# return []
# exceptions = []
# known_symbols = sum([[*v] for v in data['symbols'].values()], [])
# allowed_symbols = {v: (0, ) for v in known_symbols} # TEMP
# for p in data['symbols']['parameters']:
# allowed_symbols[p] = (0, )
# new_definitions = dict()
# for k, v in definitions.items():
# pos = definitions.lc.data[k]
# if k in known_symbols:
# exc = ModelException(
# 'Symbol {} has already been defined as a model symbol.'.format(
# k))
# exc.pos = pos
# exceptions.append(exc)
# continue
# if k in new_definitions:
# exc = ModelException(
# 'Symbol {} cannot be defined twice.'.format(k))
# exc.pos = pos
# exceptions.append(exc)
# continue
# try:
# check_symbol_validity(k)
# except:
# exc = ModelException("Invalid symbol '{}'".format(k))
# exc.pos = pos
# exceptions.append(exc)
# # pos = equations[eq_type].lc.data[n]
# try:
# expr = ast.parse(str(v))
# # print(allowed_symbols)
# check = check_expression(expr, allowed_symbols)
# # print(check['problems'])
# for pb in check['problems']:
# name, t, offset, err_type = [pb[0], pb[1], pb[2], pb[3]]
# if err_type == 'timing_error':
# exc = Exception(
# 'Timing for variable {} could not be determined.'.
# format(pb[0]))
# elif err_type == 'incorrect_timing':
# exc = Exception(
# 'Variable {} cannot have time {}. (Allowed: {})'.
# format(name, t, pb[4]))
# elif err_type == 'unknown_function':
# exc = Exception(
# 'Unknown variable/function {}.'.format(name))
# elif err_type == 'unknown_variable':
# exc = Exception(
# 'Unknown variable/parameter {}.'.format(name))
# else:
# print(err_type)
# exc.pos = (pos[0], pos[1] + offset, pos[0],
# pos[1] + offset + len(name))
# exc.type = 'error'
# exceptions.append(exc)
# new_definitions[k] = v
# allowed_symbols[k] = (0, ) # TEMP
# # allowed_symbols[k] = None
# except SyntaxError as e:
# pp = pos # TODO: find right mark for pp
# exc = ModelException("Syntax Error.")
# exc.pos = [pp[0], pp[1] + e.offset, pp[0], pp[1] + e.offset]
# exceptions.append(exc)
# return exceptions
# def check_calibration(data):
# # what happens here if symbols are not clean ?
# symbols = data['symbols']
# pos0 = data.lc.data['calibration']
# calibration = data['calibration']
# exceptions = []
# all_symbols = []
# for v in symbols.values():
# all_symbols += v
# for s in all_symbols:
# if (s not in calibration.keys()) and (s not in symbols["exogenous"]):
# # should skip invalid symbols there
# exc = ModelException(
# "Symbol {} has no calibrated value.".format(s))
# exc.pos = pos0
# exc.type = 'warning'
# exceptions.append(exc)
# for s in calibration.keys():
# val = str(calibration[s])
# try:
# ast.parse(val)
# except SyntaxError as e:
# pos = calibration.lc.data[s]
# exc = ModelException("Syntax Error.")
# exc.pos = [pos[0], pos[1] + e.offset, pos[0], pos[1] + e.offset]
# exceptions.append(exc)
# return exceptions
# def check_all(data):
# def serious(exsc):
# return ('error' in [e.type for e in exsc])
# exceptions = check_infos(data)
# if serious(exceptions):
# return exceptions
# exceptions = check_symbols(data)
# if serious(exceptions):
# return exceptions
# exceptions += check_definitions(data)
# if serious(exceptions):
# return exceptions
# exceptions += check_equations(data)
# if serious(exceptions):
# return exceptions
# exceptions += check_calibration(data)
# if serious(exceptions):
# return exceptions
# return exceptions
# def human_format(err):
# err_type = err['type']
# err_type = colored(
# err_type, color=('red' if err_type == 'error' else 'yellow'))
# err_range = str([e + 1 for e in err['range'][0]])[1:-1]
# return '{:7}: {:6}: {}'.format(err_type, err_range, err['text'])
# def check_infos(data):
# exceptions = []
# if 'model_type' in data:
# model_type = data['model_type']
# if model_type not in ['dtcc', 'dtmscc', 'dtcscc', 'dynare']:
# exc = ModelException('Uknown model type: {}.'.format(
# str(model_type)))
# exc.pos = data.lc.data['model_type']
# exc.type = 'error'
# exceptions.append(exc)
# else:
# model_type = 'dtcc'
# data['model_type'] = 'dtcc'
# # exc = ModelException("Missing field: 'model_type'.")
# # exc.pos = (0,0,0,0)
# # exc.type='error'
# # exceptions.append(exc)
# if 'name' not in data:
# exc = ModelException("Missing field: 'name'.")
# exc.pos = (0, 0, 0, 0)
# exc.type = 'warning'
# exceptions.append(exc)
# return exceptions
# def lint(txt, source='<string>', format='human', catch_exception=False):
# # raise ModelException if it doesn't work correctly
# if isinstance(txt, str):
# try:
# data = ry.load(txt, ry.RoundTripLoader)
# except Exception as exc:
# if not catch_exception:
# raise exc
# return [] # should return parse error
# else:
# # txt is then assumed to be a ruamel structure
# data = txt
# if not ('symbols' in data or 'equations' in data or 'calibration' in data):
# # this is probably not a yaml filename
# output = []
# else:
# try:
# exceptions = check_all(data)
# except Exception as e:
# if not catch_exception:
# raise(e)
# exc = ModelException("Linter Error: Uncaught Exception.")
# exc.pos = [0, 0, 0, 0]
# exc.type = 'error'
# exceptions = [exc]
# output = []
# for k in exceptions:
# try:
# err_type = k.type
# except:
# err_type = 'error'
# output.append({
# 'type':
# err_type,
# 'source':
# source,
# 'range': ((k.pos[0], k.pos[1]), (k.pos[2], k.pos[3])),
# 'text':
# k.args[0]
# })
# if format == 'json':
# return (json.dumps(output))
# elif format == 'human':
# return (str.join("\n", [human_format(e) for e in output]))
# elif not format:
# return output
# else:
# raise ModelException("Unkown format {}.".format(format))
# TODO:
# - check name (already defined by smbdy else ?)
# - description: ?
# - calibration:
# - incorrect key
# - warning if not a known symbol ?
# - not a recognized identifier
# - defined twice
# - impossible to solve in closed form (depends on ...)
# - incorrect equation
# - grammatically incorrect
# - contains timed variables
# - warnings:
# - missing values
# - equations: symbols already known (beware of speed issues)
# - unknown group of equations
# - incorrect syntax
# - undeclared variable (and not a function)
# - indexed parameter
# - incorrect order
# - incorrect complementarities
# - incorrect recipe: unexpected symbol type
# - nonzero residuals (warning, to be done without compiling)
# - options: if present
# - approximation_space:
# - inconsistent boundaries
# - must equal number of states
# - distribution:
# - same size as shocks
| bsd-2-clause | -5,633,091,538,059,207,000 | 33.693517 | 81 | 0.482474 | false |
smurfix/HomEvenT | irrigation/rainman/migrations/0031_auto.py | 1 | 20462 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding M2M table for field xdays on 'Group'
db.create_table('rainman_group_xdays', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('group', models.ForeignKey(orm['rainman.group'], null=False)),
('day', models.ForeignKey(orm['rainman.day'], null=False))
))
db.create_unique('rainman_group_xdays', ['group_id', 'day_id'])
def backwards(self, orm):
# Removing M2M table for field xdays on 'Group'
db.delete_table('rainman_group_xdays')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'rainman.controller': {
'Meta': {'unique_together': "(('site', 'name'),)", 'object_name': 'Controller'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'max_on': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'controllers'", 'to': "orm['rainman.Site']"}),
'var': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'rainman.day': {
'Meta': {'object_name': 'Day'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'rainman.daytime': {
'Meta': {'unique_together': "(('day', 'descr'),)", 'object_name': 'DayTime'},
'day': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'times'", 'to': "orm['rainman.Day']"}),
'descr': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'rainman.environmenteffect': {
'Meta': {'object_name': 'EnvironmentEffect'},
'factor': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'param_group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'environment_effects'", 'to': "orm['rainman.ParamGroup']"}),
'sun': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'temp': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'wind': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
'rainman.feed': {
'Meta': {'object_name': 'Feed'},
'db_max_flow_wait': ('django.db.models.fields.PositiveIntegerField', [], {'default': '300', 'db_column': "'max_flow_wait'"}),
'flow': ('django.db.models.fields.FloatField', [], {'default': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'feed_meters'", 'to': "orm['rainman.Site']"}),
'var': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'rainman.group': {
'Meta': {'unique_together': "(('site', 'name'),)", 'object_name': 'Group'},
'adj_rain': ('django.db.models.fields.FloatField', [], {'default': '1'}),
'adj_sun': ('django.db.models.fields.FloatField', [], {'default': '1'}),
'adj_temp': ('django.db.models.fields.FloatField', [], {'default': '1'}),
'adj_wind': ('django.db.models.fields.FloatField', [], {'default': '1'}),
'days': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'groups_y'", 'blank': 'True', 'to': "orm['rainman.Day']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groups'", 'to': "orm['rainman.Site']"}),
'valves': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'groups'", 'symmetrical': 'False', 'to': "orm['rainman.Valve']"}),
'xdays': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'groups_n'", 'blank': 'True', 'to': "orm['rainman.Day']"})
},
'rainman.groupadjust': {
'Meta': {'unique_together': "(('group', 'start'),)", 'object_name': 'GroupAdjust'},
'factor': ('django.db.models.fields.FloatField', [], {}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'adjusters'", 'to': "orm['rainman.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'rainman.groupoverride': {
'Meta': {'unique_together': "(('group', 'name'), ('group', 'start'))", 'object_name': 'GroupOverride'},
'allowed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'db_duration': ('django.db.models.fields.PositiveIntegerField', [], {'db_column': "'duration'"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'overrides'", 'to': "orm['rainman.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'off_level': ('django.db.models.fields.FloatField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'on_level': ('django.db.models.fields.FloatField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'rainman.history': {
'Meta': {'unique_together': "(('site', 'time'),)", 'object_name': 'History'},
'feed': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rain': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'history'", 'to': "orm['rainman.Site']"}),
'sun': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'temp': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'wind': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
'rainman.level': {
'Meta': {'unique_together': "(('valve', 'time'),)", 'object_name': 'Level'},
'flow': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.FloatField', [], {}),
'time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'valve': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'levels'", 'to': "orm['rainman.Valve']"})
},
'rainman.log': {
'Meta': {'object_name': 'Log'},
'controller': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'logs'", 'null': 'True', 'to': "orm['rainman.Controller']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'logs'", 'to': "orm['rainman.Site']"}),
'text': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 5, 22, 0, 0)', 'db_index': 'True'}),
'valve': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'logs'", 'null': 'True', 'to': "orm['rainman.Valve']"})
},
'rainman.paramgroup': {
'Meta': {'unique_together': "(('site', 'name'),)", 'object_name': 'ParamGroup'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'factor': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'rain': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'param_groups'", 'to': "orm['rainman.Site']"}),
'var': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'rainman.rainmeter': {
'Meta': {'unique_together': "(('site', 'name'),)", 'object_name': 'RainMeter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'rain_meters'", 'to': "orm['rainman.Site']"}),
'var': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'weight': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '10'})
},
'rainman.schedule': {
'Meta': {'unique_together': "(('valve', 'start'),)", 'object_name': 'Schedule'},
'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'max_length': '1'}),
'db_duration': ('django.db.models.fields.PositiveIntegerField', [], {'db_column': "'duration'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'seen': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'max_length': '1'}),
'start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'valve': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'schedules'", 'to': "orm['rainman.Valve']"})
},
'rainman.site': {
'Meta': {'object_name': 'Site'},
'db_rain_delay': ('django.db.models.fields.PositiveIntegerField', [], {'default': '300', 'db_column': "'rain_delay'"}),
'db_rate': ('django.db.models.fields.FloatField', [], {'default': '2', 'db_column': "'rate'"}),
'host': ('django.db.models.fields.CharField', [], {'default': "'localhost'", 'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'port': ('django.db.models.fields.PositiveIntegerField', [], {'default': '50005'}),
'var': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200', 'blank': 'True'})
},
'rainman.sunmeter': {
'Meta': {'unique_together': "(('site', 'name'),)", 'object_name': 'SunMeter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sun_meters'", 'to': "orm['rainman.Site']"}),
'var': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'weight': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '10'})
},
'rainman.tempmeter': {
'Meta': {'unique_together': "(('site', 'name'),)", 'object_name': 'TempMeter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'temp_meters'", 'to': "orm['rainman.Site']"}),
'var': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'weight': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '10'})
},
'rainman.userforgroup': {
'Meta': {'object_name': 'UserForGroup'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'users'", 'to': "orm['rainman.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'rainman.valve': {
'Meta': {'unique_together': "(('controller', 'name'),)", 'object_name': 'Valve'},
'area': ('django.db.models.fields.FloatField', [], {}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'controller': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'valves'", 'to': "orm['rainman.Controller']"}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'valves'", 'to': "orm['rainman.Feed']"}),
'flow': ('django.db.models.fields.FloatField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'max_level': ('django.db.models.fields.FloatField', [], {'default': '10'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'param_group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'valves'", 'to': "orm['rainman.ParamGroup']"}),
'priority': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'runoff': ('django.db.models.fields.FloatField', [], {'default': '1'}),
'shade': ('django.db.models.fields.FloatField', [], {'default': '1'}),
'start_level': ('django.db.models.fields.FloatField', [], {'default': '8'}),
'stop_level': ('django.db.models.fields.FloatField', [], {'default': '3'}),
'time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'var': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'verbose': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'rainman.valveoverride': {
'Meta': {'unique_together': "(('valve', 'name'), ('valve', 'start'))", 'object_name': 'ValveOverride'},
'db_duration': ('django.db.models.fields.PositiveIntegerField', [], {'db_column': "'duration'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'off_level': ('django.db.models.fields.FloatField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'on_level': ('django.db.models.fields.FloatField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'valve': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'overrides'", 'to': "orm['rainman.Valve']"})
},
'rainman.windmeter': {
'Meta': {'unique_together': "(('site', 'name'),)", 'object_name': 'WindMeter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'wind_meters'", 'to': "orm['rainman.Site']"}),
'var': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'weight': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '10'})
}
}
complete_apps = ['rainman'] | gpl-3.0 | 4,006,488,282,154,659,000 | 77.402299 | 182 | 0.537973 | false |
rwl/PyCIM | CIM14/ENTSOE/Dynamics/IEC61970/Dynamics/DynamicsMetaBlockParameterReference.py | 1 | 4926 | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.ENTSOE.Dynamics.IEC61970.Core.CoreIdentifiedObject import CoreIdentifiedObject
class DynamicsMetaBlockParameterReference(CoreIdentifiedObject):
def __init__(self, StandardControlBlock_MetaBlockConnectable=None, MetaBlockConnectable=None, MemberOf_MetaBlockReference=None, *args, **kw_args):
"""Initialises a new 'DynamicsMetaBlockParameterReference' instance.
@param StandardControlBlock_MetaBlockConnectable:
@param MetaBlockConnectable:
@param MemberOf_MetaBlockReference:
"""
self._StandardControlBlock_MetaBlockConnectable = None
self.StandardControlBlock_MetaBlockConnectable = StandardControlBlock_MetaBlockConnectable
self._MetaBlockConnectable = None
self.MetaBlockConnectable = MetaBlockConnectable
self._MemberOf_MetaBlockReference = None
self.MemberOf_MetaBlockReference = MemberOf_MetaBlockReference
super(DynamicsMetaBlockParameterReference, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["StandardControlBlock_MetaBlockConnectable", "MetaBlockConnectable", "MemberOf_MetaBlockReference"]
_many_refs = []
def getStandardControlBlock_MetaBlockConnectable(self):
return self._StandardControlBlock_MetaBlockConnectable
def setStandardControlBlock_MetaBlockConnectable(self, value):
if self._StandardControlBlock_MetaBlockConnectable is not None:
filtered = [x for x in self.StandardControlBlock_MetaBlockConnectable.StandardControlBlock_MetaBlockParameterReference if x != self]
self._StandardControlBlock_MetaBlockConnectable._StandardControlBlock_MetaBlockParameterReference = filtered
self._StandardControlBlock_MetaBlockConnectable = value
if self._StandardControlBlock_MetaBlockConnectable is not None:
if self not in self._StandardControlBlock_MetaBlockConnectable._StandardControlBlock_MetaBlockParameterReference:
self._StandardControlBlock_MetaBlockConnectable._StandardControlBlock_MetaBlockParameterReference.append(self)
StandardControlBlock_MetaBlockConnectable = property(getStandardControlBlock_MetaBlockConnectable, setStandardControlBlock_MetaBlockConnectable)
def getMetaBlockConnectable(self):
return self._MetaBlockConnectable
def setMetaBlockConnectable(self, value):
if self._MetaBlockConnectable is not None:
filtered = [x for x in self.MetaBlockConnectable.MetaBlockParameterReference if x != self]
self._MetaBlockConnectable._MetaBlockParameterReference = filtered
self._MetaBlockConnectable = value
if self._MetaBlockConnectable is not None:
if self not in self._MetaBlockConnectable._MetaBlockParameterReference:
self._MetaBlockConnectable._MetaBlockParameterReference.append(self)
MetaBlockConnectable = property(getMetaBlockConnectable, setMetaBlockConnectable)
def getMemberOf_MetaBlockReference(self):
return self._MemberOf_MetaBlockReference
def setMemberOf_MetaBlockReference(self, value):
if self._MemberOf_MetaBlockReference is not None:
filtered = [x for x in self.MemberOf_MetaBlockReference.MetaBlockParameterReference if x != self]
self._MemberOf_MetaBlockReference._MetaBlockParameterReference = filtered
self._MemberOf_MetaBlockReference = value
if self._MemberOf_MetaBlockReference is not None:
if self not in self._MemberOf_MetaBlockReference._MetaBlockParameterReference:
self._MemberOf_MetaBlockReference._MetaBlockParameterReference.append(self)
MemberOf_MetaBlockReference = property(getMemberOf_MetaBlockReference, setMemberOf_MetaBlockReference)
| mit | -4,645,534,073,048,918,000 | 49.783505 | 150 | 0.75944 | false |
jvce92/web-tdd | lists/tests/testView.py | 1 | 4453 | from django.test import TestCase
from django.core.urlresolvers import resolve
from lists.views import homePage
from django.http import HttpRequest
from django.template.loader import render_to_string
from lists.models import Item, List
from django.utils.html import escape
# Create your tests here.
#
# class smokeTest(TestCase):
#
# def testBadMath(self):
# self.assertEqual(1+1,3)
class homePageTest(TestCase):
def testRootUrlResolvesToHomePage(self):
found = resolve('/')
self.assertEqual(found.func,homePage)
# def testHomePageReturnsHtml(self):
# request = HttpRequest()
# response = homePage(request)
# expectedHtml = render_to_string('home.html')
# self.assertEqual(expectedHtml,response.content.decode())
class ListViewTest(TestCase):
def testUsesListTemplate(self):
myList = List.objects.create()
response = self.client.get('/lists/%d/' % (myList.id,))
self.assertTemplateUsed(response, 'list.html')
def testDisplaysOnlyItemsForThatList(self):
correctList = List.objects.create()
Item.objects.create(text='item1', list = correctList)
Item.objects.create(text='item2', list = correctList)
wrongList = List.objects.create()
Item.objects.create(text='otherItem1', list = wrongList)
Item.objects.create(text='otherItem2', list = wrongList)
response = self.client.get('/lists/%d/' % (correctList.id, ))
self.assertContains(response, 'item1')
self.assertContains(response, 'item2')
self.assertNotContains(response, 'otherItem1')
self.assertNotContains(response, 'otherItem2')
def testDisplayAllItems(self):
myList = List.objects.create()
Item.objects.create(text='item1', list = myList)
Item.objects.create(text='item2', list = myList)
response = self.client.get('/lists/%d/' % (myList.id, ))
self.assertContains(response, 'item1')
self.assertContains(response, 'item2')
def testUseDifferentTemplates(self):
myList = List.objects.create()
response = self.client.get('/lists/%d/' % (myList.id, ))
self.assertTemplateUsed(response,'list.html')
class NewListTest(TestCase):
def testHandlePostRequest(self):
self.client.post(
'/lists/new', data = {'item_text':'New item', }
)
self.assertEqual(Item.objects.count(),1)
newItem = Item.objects.first()
self.assertEqual(newItem.text,'New item')
def testRedirectsAfterPost(self):
response = self.client.post(
'/lists/new', data = {'item_text': 'New item', }
)
newList = List.objects.first()
self.assertRedirects(response, '/lists/%d/' % (newList.id, ))
def testCanSavePostToAnExistingList(self):
wrongList = List.objects.create()
correctList = List.objects.create()
self.client.post(
'/lists/%d/' % (correctList.id,), data = {'item_text':'New item for existing list'}
)
self.assertEqual(Item.objects.count(),1)
newItem = Item.objects.first()
self.assertEqual(newItem.text,'New item for existing list')
self.assertEqual(newItem.list,correctList)
def testRedirectsToListView(self):
wrongList = List.objects.create()
correctList = List.objects.create()
response = self.client.post(
'/lists/%d/' % (correctList.id,), data = {'item_text':'New item for existing list'}
)
self.assertRedirects(response,'/lists/%d/' % (correctList.id, ))
def testPassesCorrectListToTemplate(self):
wrongList = List.objects.create()
correctList = List.objects.create()
response = self.client.get(
'/lists/%d/' % (correctList.id, ),
)
self.assertEqual(response.context['list'],correctList)
def testValidationErrorsAreSentToHomePageTemplate(self):
response = self.client.post('/lists/new', data={'item_text':''})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'home.html')
expectedError = escape("You can't have an empty list item")
self.assertContains(response, expectedError)
def testEmptyItemsAreNotSaved(self):
response = self.client.post('/lists/new', data={'item_text':''})
self.assertEqual(List.objects.count(),0)
self.assertEqual(Item.objects.count(),0)
| gpl-3.0 | 5,634,385,062,296,922,000 | 33.789063 | 91 | 0.650573 | false |
radical-cybertools/radical.repex | old/misc/experimental_async/async_with_pipeline_suspend.py | 1 | 9815 | #!/usr/bin/env python
import radical.utils as ru
#import radical.analytics as ra
import radical.entk as re
from radical.entk import Pipeline, Stage, Task, AppManager
import os
import tarfile
import writeInputs
import time
import git
#import replica
#os.environ['RADICAL_SAGA_VERBOSE'] = 'INFO'
os.environ['RADICAL_ENTK_VERBOSE'] = 'INFO'
os.environ['RP_ENABLE_OLD_DEFINES'] = 'True'
os.environ['SAGA_PTY_SSH_TIMEOUT'] = '2000'
#os.environ['RADICAL_VERBOSE'] = 'INFO'
"""
Every instance of the Replica object instantiates a pipeline for itself. Once the pipeline is created, an MD task is carried out.
The End of this MD task/stage, every replica transitions into a wait state, all the while looking for other replicas that are also
waiting. The number of replicas waiting is written to a list that has a maximum size limit. As soon as this limit is reached the
replicas on the list begin to exchange and the list is emptied. The list can now be populated by new replicas finishing their MD
stages. Termination criterion: ALL replicas have performed at least N exchange attempts (i.e. "cycles" specified by the user).
There are 3 data structures maintained here:
1) List of replicas that have completed MD and are awaiting exchange.
2) Array containing the number of times each replica has exchanged.
3) Dictionary containing locations of all replica sandboxes.
"""
replicas = 4
replica_cores = 1
min_temp = 100
max_temp = 200
timesteps = 1000
basename = 'ace-ala'
cycle = 1
md_executable = '/home/scm177/mantel/AMBER/amber14/bin/sander'
SYNCHRONICITY = 0.5
wait_ratio = 0
max_waiting_list = 2
waiting_replicas = []
min_completed_cycles = 3
replica_cycles = [0]*replicas
wait_count = 0
def setup_replicas(replicas, min_temp, max_temp, timesteps, basename):
writeInputs.writeInputs(max_temp=max_temp, min_temp=min_temp, replicas=replicas, timesteps=timesteps, basename=basename)
tar = tarfile.open("input_files.tar", "w")
for name in [basename + ".prmtop", basename + ".inpcrd", basename + ".mdin"]:
tar.add(name)
for r in range(replicas):
tar.add('mdin-{replica}-{cycle}'.format(replica=r, cycle=0))
tar.close()
for r in range(replicas):
os.remove('mdin-{replica}-{cycle}'.format(replica=r, cycle=0))
setup_p = Pipeline()
setup_p.name = 'untarPipe'
repo = git.Repo('.', search_parent_directories=True)
aux_function_path = repo.working_tree_dir
untar_stg = Stage()
untar_stg.name = 'untarStg'
#Untar Task
untar_tsk = Task()
untar_tsk.name = 'untarTsk'
untar_tsk.executable = ['python']
untar_tsk.upload_input_data = ['untar_input_files.py', 'input_files.tar']
untar_tsk.arguments = ['untar_input_files.py', 'input_files.tar']
untar_tsk.cpu_reqs = 1
untar_tsk.post_exec = []
untar_stg.add_tasks(untar_tsk)
setup_p.add_stages(untar_stg)
global replica_sandbox
replica_sandbox='$Pipeline_%s_Stage_%s_Task_%s'%(setup_p.name, untar_stg.name, untar_tsk.name)
return setup_p
####_----------------------------------------------------------init replicas
class Replica(object):
def __init__(self):
self.state_history = []
def replica_pipeline(self, rid, cycle, replica_cores, md_executable, timesteps, replica_sandbox):
def add_ex_stg(rid, cycle):
#ex stg here
ex_tsk = Task()
ex_stg = Stage()
ex_tsk.name = 'extsk-{replica}-{cycle}'.format(replica=rid, cycle=cycle)
for rid in range(len(waiting_replicas)):
ex_tsk.link_input_data += ['%s/mdinfo-{replica}-{cycle}'.format(replica=rid, cycle=cycle)%replica_sandbox]
ex_tsk.arguments = ['t_ex_gibbs_async.py', len(waiting_replicas)] #This needs to be fixed
ex_tsk.executable = ['python']
ex_tsk.cpu_reqs = {
'processes': 1,
'process_type': '',
'threads_per_process': 1,
'thread_type': None
}
ex_tsk.pre_exec = ['export dummy_variable=19']
ex_stg.add_tasks(ex_tsk)
return ex_stg
def add_md_stg(rid,cycle):
#md stg h
md_tsk = Task()
md_stg = Stage()
md_tsk.name = 'mdtsk-{replica}-{cycle}'.format(replica=rid, cycle=cycle)
md_tsk.link_input_data += ['%s/inpcrd' %replica_sandbox,
'%s/prmtop' %replica_sandbox,
'%s/mdin-{replica}-{cycle}'.format(replica=rid, cycle=0) %replica_sandbox]
md_tsk.arguments = ['-O',
'-i', 'mdin-{replica}-{cycle}'.format(replica=rid, cycle=0),
'-p', 'prmtop',
'-c', 'inpcrd',
'-o', 'out',
'-r', '%s/restrt-{replica}-{cycle}'.format(replica=rid, cycle=cycle) %replica_sandbox,
'-x', 'mdcrd',
'-inf', '%s/mdinfo-{replica}-{cycle}'.format(replica=rid, cycle=cycle) %replica_sandbox]
md_tsk.executable = ['/home/scm177/mantel/AMBER/amber14/bin/sander']
md_tsk.cpu_reqs = {
'processes': replica_cores,
'process_type': '',
'threads_per_process': 1,
'thread_type': None
}
md_tsk.pre_exec = ['export dummy_variable=19', 'echo $SHARED']
md_stg.add_tasks(md_tsk)
md_stg.post_exec = {
'condition': md_post,
'on_true': suspend,
'on_false': exchange_stg
}
return md_stg
def synchronicity_function():
"""
synchronicity function should evaluate the following:
1) Has the replica in THIS pipeline completed enough cycles?
2) If yes, Is the replica threshold met? I.e. is the exchange list large enough?
3) If no, add to waiting list
4) Is the replica is THIS pipeline the LOWEST rid in the list?
If 1 and 2 return True, the Synchronicity Function returns a True.
If the first is true and second is false, the synchronicity function returns False
EXTREMELY IMPORTANT: Remember to clear replica related variables, replica lists etc., after the adaptivity
operations have completed! i.e. after the propagate_cycle() function. Not necessary to clear after end_func().
"""
global replica_cycles
global ex_pipeline
global max_waiting_list
global min_completed_cycles
print replica_cycles, rid
replica_cycles[rid] += 1
print replica_cycles
if min(replica_cycles) < min_completed_cycles:
waiting_replicas.append(rid)
if len(waiting_replicas) < max_waiting_list:
p_replica.suspend()
#p_replica.resume() # There seems to be an issue here. We potentially need the "resume" function to be triggered
# by a different pipeline.
ex_pipeline = min(waiting_replicas)
print "Synchronicity Function returns True"
return True
return False
def propagate_cycle():
"""
This function adds two stages to the pipeline: an exchange stage and an MD stage.
If the pipeline is not the "ex_pipeline", it stalls and adds only the MD stage until the EX pipeline has completed
the EX task.
"""
if rid is ex_pipeline: ### FIX THIS TO REFER TO THE CORRECT NAME OF THE EX PIPELINE
# This adds an Ex task.
ex_stg = add_ex_stg(rid, cycle)
p_replica.add_stages(ex_stg)
# And the next MD stage
md_stg = add_md_stg(rid, cycle)
p_replica.add_stages(md_stg)
else:
while ex_stg.state is not "COMPLETED": ### FIX THIS TO REFER TO THE CORRECT NAME OF THE EX STAGE
#time.sleep(1)
pass
md_stg = add_md_stg(rid, cycle)
p_replica.add_stages(md_stg)
waiting_replicas = [] # EMPTY REPLICA WAITING LIST
def end_func():
print "DONE"
p_replica = Pipeline()
p_replica.name = 'p_{rid}'.format(rid=rid)
md_stg = add_md_stg(rid, cycle)
p_replica.add_stages(md_stg)
return p_replica
system = setup_replicas(replicas, min_temp, max_temp, timesteps, basename)
replica=[]
replica_pipelines = []
for rid in range(replicas):
print rid
replica = Replica()
r_pipeline = replica.replica_pipeline(rid, cycle, replica_cores, md_executable, timesteps, replica_sandbox)
replica_pipelines.append(r_pipeline)
os.environ['RADICAL_PILOT_DBURL'] = "mongodb://smush:[email protected]:47361/db_repex_4"
res_dict ={
"resource" : 'local.localhost',
"walltime" : 30,
"cpus" : 4,
}
appman = AppManager(autoterminate=False, port=32769)
appman.resource_desc = res_dict
appman.workflow = set([system])
appman.run()
appman.workflow = set(replica_pipelines)
appman.run()
appman.resource_terminate()
| mit | -1,247,269,295,089,884,200 | 34.690909 | 130 | 0.563423 | false |
FrederichCheng/incubator-superset | superset/sql_lab.py | 1 | 8776 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from time import sleep
from datetime import datetime
import json
import logging
import uuid
import pandas as pd
import sqlalchemy
from sqlalchemy.pool import NullPool
from sqlalchemy.orm import sessionmaker
from celery.exceptions import SoftTimeLimitExceeded
from superset import (app, db, utils, dataframe, results_backend)
from superset.models.sql_lab import Query
from superset.sql_parse import SupersetQuery
from superset.db_engine_specs import LimitMethod
from superset.jinja_context import get_template_processor
from superset.utils import QueryStatus, get_celery_app
config = app.config
celery_app = get_celery_app(config)
stats_logger = app.config.get('STATS_LOGGER')
SQLLAB_TIMEOUT = config.get('SQLLAB_ASYNC_TIME_LIMIT_SEC', 600)
class SqlLabException(Exception):
pass
def dedup(l, suffix='__'):
"""De-duplicates a list of string by suffixing a counter
Always returns the same number of entries as provided, and always returns
unique values.
>>> print(','.join(dedup(['foo', 'bar', 'bar', 'bar'])))
foo,bar,bar__1,bar__2
"""
new_l = []
seen = {}
for s in l:
if s in seen:
seen[s] += 1
s += suffix + str(seen[s])
else:
seen[s] = 0
new_l.append(s)
return new_l
def get_query(query_id, session, retry_count=5):
"""attemps to get the query and retry if it cannot"""
query = None
attempt = 0
while not query and attempt < retry_count:
try:
query = session.query(Query).filter_by(id=query_id).one()
except Exception:
attempt += 1
logging.error(
"Query with id `{}` could not be retrieved".format(query_id))
stats_logger.incr('error_attempting_orm_query_' + str(attempt))
logging.error("Sleeping for a sec before retrying...")
sleep(1)
if not query:
stats_logger.incr('error_failed_at_getting_orm_query')
raise SqlLabException("Failed at getting query")
return query
def get_session(nullpool):
if nullpool:
engine = sqlalchemy.create_engine(
app.config.get('SQLALCHEMY_DATABASE_URI'), poolclass=NullPool)
session_class = sessionmaker()
session_class.configure(bind=engine)
return session_class()
session = db.session()
session.commit() # HACK
return session
@celery_app.task(bind=True, soft_time_limit=SQLLAB_TIMEOUT)
def get_sql_results(
ctask, query_id, return_results=True, store_results=False, user_name=None):
"""Executes the sql query returns the results."""
try:
return execute_sql(
ctask, query_id, return_results, store_results, user_name)
except Exception as e:
logging.exception(e)
stats_logger.incr('error_sqllab_unhandled')
sesh = get_session(not ctask.request.called_directly)
query = get_query(query_id, sesh)
query.error_message = str(e)
query.status = QueryStatus.FAILED
query.tmp_table_name = None
sesh.commit()
raise
def execute_sql(
ctask, query_id, return_results=True, store_results=False, user_name=None):
"""Executes the sql query returns the results."""
session = get_session(not ctask.request.called_directly)
query = get_query(query_id, session)
payload = dict(query_id=query_id)
database = query.database
db_engine_spec = database.db_engine_spec
db_engine_spec.patch()
def handle_error(msg):
"""Local method handling error while processing the SQL"""
query.error_message = msg
query.status = QueryStatus.FAILED
query.tmp_table_name = None
session.commit()
payload.update({
'status': query.status,
'error': msg,
})
return payload
if store_results and not results_backend:
return handle_error("Results backend isn't configured.")
# Limit enforced only for retrieving the data, not for the CTA queries.
superset_query = SupersetQuery(query.sql)
executed_sql = superset_query.stripped()
if not superset_query.is_select() and not database.allow_dml:
return handle_error(
"Only `SELECT` statements are allowed against this database")
if query.select_as_cta:
if not superset_query.is_select():
return handle_error(
"Only `SELECT` statements can be used with the CREATE TABLE "
"feature.")
return
if not query.tmp_table_name:
start_dttm = datetime.fromtimestamp(query.start_time)
query.tmp_table_name = 'tmp_{}_table_{}'.format(
query.user_id, start_dttm.strftime('%Y_%m_%d_%H_%M_%S'))
executed_sql = superset_query.as_create_table(query.tmp_table_name)
query.select_as_cta_used = True
elif (query.limit and superset_query.is_select()
and db_engine_spec.limit_method == LimitMethod.WRAP_SQL):
executed_sql = database.wrap_sql_limit(executed_sql, query.limit)
query.limit_used = True
try:
template_processor = get_template_processor(
database=database, query=query)
executed_sql = template_processor.process_template(executed_sql)
except Exception as e:
logging.exception(e)
msg = "Template rendering failed: " + utils.error_msg_from_exception(e)
return handle_error(msg)
query.executed_sql = executed_sql
query.status = QueryStatus.RUNNING
query.start_running_time = utils.now_as_float()
session.merge(query)
session.commit()
logging.info("Set query to 'running'")
engine = database.get_sqla_engine(
schema=query.schema, nullpool=not ctask.request.called_directly, user_name=user_name)
try:
engine = database.get_sqla_engine(
schema=query.schema, nullpool=not ctask.request.called_directly, user_name=user_name)
conn = engine.raw_connection()
cursor = conn.cursor()
logging.info("Running query: \n{}".format(executed_sql))
logging.info(query.executed_sql)
cursor.execute(query.executed_sql,
**db_engine_spec.cursor_execute_kwargs)
logging.info("Handling cursor")
db_engine_spec.handle_cursor(cursor, query, session)
logging.info("Fetching data: {}".format(query.to_dict()))
data = db_engine_spec.fetch_data(cursor, query.limit)
except SoftTimeLimitExceeded as e:
logging.exception(e)
conn.close()
return handle_error(
"SQL Lab timeout. This environment's policy is to kill queries "
"after {} seconds.".format(SQLLAB_TIMEOUT))
except Exception as e:
logging.exception(e)
conn.close()
return handle_error(db_engine_spec.extract_error_message(e))
logging.info("Fetching cursor description")
cursor_description = cursor.description
conn.commit()
conn.close()
if query.status == utils.QueryStatus.STOPPED:
return json.dumps(
{
'query_id': query.id,
'status': query.status,
'query': query.to_dict(),
},
default=utils.json_iso_dttm_ser)
column_names = (
[col[0] for col in cursor_description] if cursor_description else [])
column_names = dedup(column_names)
cdf = dataframe.SupersetDataFrame(
pd.DataFrame(list(data), columns=column_names))
query.rows = cdf.size
query.progress = 100
query.status = QueryStatus.SUCCESS
if query.select_as_cta:
query.select_sql = '{}'.format(
database.select_star(
query.tmp_table_name,
limit=query.limit,
schema=database.force_ctas_schema,
show_cols=False,
latest_partition=False, ))
query.end_time = utils.now_as_float()
session.merge(query)
session.flush()
payload.update({
'status': query.status,
'data': cdf.data if cdf.data else [],
'columns': cdf.columns if cdf.columns else [],
'query': query.to_dict(),
})
if store_results:
key = '{}'.format(uuid.uuid4())
logging.info("Storing results in results backend, key: {}".format(key))
json_payload = json.dumps(payload, default=utils.json_iso_dttm_ser)
results_backend.set(key, utils.zlib_compress(json_payload))
query.results_key = key
query.end_result_backend_time = utils.now_as_float()
session.merge(query)
session.commit()
if return_results:
return payload
| apache-2.0 | -4,792,927,945,449,039,000 | 33.825397 | 97 | 0.634458 | false |
dextervip/rpv | GerenDisponibilidade/setup.py | 1 | 1613 | from distutils.core import setup
from distutils.command.install import INSTALL_SCHEMES
import os
VERSION = '0.3'
# Fazendo os dados irem para o lugar correto. [Make data go to the right place.]
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/35ec7b2fed36eaec/2105ee4d9e8042cb
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
template_dir = "registration_defaults/templates/registration"
templates = [os.path.join(template_dir, f) for f in os.listdir(template_dir)]
setup(
name='django-registration-defaults',
version=VERSION,
description="Default templates and settings for James Bennett's"
"django-registration application.",
long_description="This module provides a full set of default templates"
" and settings for ``django-registration`` to ease the process of"
" creating Django apps that require user registration. It depends"
" on ``django-registration``.",
author="Charlie DeTar",
author_email="[email protected]",
url="http://github.com/yourcelf/django-registration-defaults",
license="MIT License",
platforms=["any"],
packages=['registration_defaults'],
package_data={'registration_defaults': templates},
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Framework :: Django",
],
include_package_data=True,
)
| gpl-3.0 | -2,980,506,392,158,818,300 | 37.404762 | 104 | 0.688779 | false |
ella/mypage | mypage/pages/migrations/0008_dupe_sites.py | 1 | 3621 |
from south.db import db
from django.db import models
from django.db.models import F
from mypage.pages.models import *
import datetime
class Migration:
def forwards(self, orm):
'''
if not db.dry_run:
orm.UserPage.objects.update(site_copy=F('site'))
orm.SessionPage.objects.update(site_copy=F('site'))
'''
db.execute('''
update pages_userpage p JOIN pages_page ON p.page_ptr_id = pages_page.id set p.site_copy_id=site_id;
''')
db.execute('''
update pages_sessionpage p JOIN pages_page ON p.page_ptr_id = pages_page.id set p.site_copy_id=site_id;
''')
def backwards(self, orm):
pass
models = {
'sites.site': {
'Meta': {'ordering': "('domain',)", 'db_table': "'django_site'"},
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'pages.widgetinpage': {
'Meta': {'unique_together': "(('page','widget',),)"},
'config_json': ('models.TextField', [], {}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'page': ('models.ForeignKey', ["orm['pages.Page']"], {'verbose_name': "_('Page')"}),
'rendered_widget': ('models.ForeignKey', ["orm['widgets.RenderedWidget']"], {'null': 'False'}),
'state': ('models.SmallIntegerField', [], {'default': '2'}),
'widget': ('models.ForeignKey', ["orm['widgets.Widget']"], {'verbose_name': "_('Widget')"})
},
'auth.user': {
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'widgets.widget': {
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'pages.page': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'layout_json': ('models.TextField', [], {}),
'site': ('models.ForeignKey', ["orm['sites.Site']"], {'default': ' lambda :settings.SITE_ID'}),
'skin': ('models.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'template': ('models.CharField', [], {'default': "'page.html'", 'max_length': '100'}),
'widgets': ('models.ManyToManyField', ["orm['widgets.Widget']"], {'through': "'WidgetInPage'"})
},
'widgets.renderedwidget': {
'Meta': {'unique_together': "(('widget','state','site',),)"},
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'pages.userpage': {
'Meta': {'_bases': ['mypage.pages.models.Page']},
'page_ptr': ('models.OneToOneField', ["orm['pages.Page']"], {}),
'site_copy': ('models.ForeignKey', ["orm['sites.Site']"], {'default': ' lambda :settings.SITE_ID', 'null': 'True', 'blank': 'True'}),
'user': ('models.ForeignKey', ["orm['auth.User']"], {'db_index': 'True'})
},
'pages.sessionpage': {
'Meta': {'_bases': ['mypage.pages.models.Page']},
'page_ptr': ('models.OneToOneField', ["orm['pages.Page']"], {}),
'session_key': ('models.CharField', ["_('session key')"], {'max_length': '40', 'db_index': 'True'}),
'site_copy': ('models.ForeignKey', ["orm['sites.Site']"], {'default': ' lambda :settings.SITE_ID', 'null': 'True', 'blank': 'True'}),
'updated': ('models.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'False'})
}
}
complete_apps = ['pages']
| bsd-3-clause | -4,512,450,370,742,625,300 | 45.423077 | 145 | 0.495996 | false |
cupy/cupy | tests/cupyx_tests/scipy_tests/linalg_tests/test_decomp_lu.py | 1 | 4936 | import unittest
import warnings
import numpy
import cupy
from cupy import testing
import cupyx.scipy.linalg
if cupyx.scipy._scipy_available:
import scipy.linalg
@testing.gpu
@testing.parameterize(*testing.product({
'shape': [(1, 1), (2, 2), (3, 3), (5, 5), (1, 5), (5, 1), (2, 5), (5, 2)],
}))
@testing.fix_random()
@testing.with_requires('scipy')
class TestLUFactor(unittest.TestCase):
@testing.for_dtypes('fdFD')
def test_lu_factor(self, dtype):
if self.shape[0] != self.shape[1]:
# skip non-square tests since scipy.lu_factor requires square
return unittest.SkipTest()
a_cpu = testing.shaped_random(self.shape, numpy, dtype=dtype)
a_gpu = cupy.asarray(a_cpu)
result_cpu = scipy.linalg.lu_factor(a_cpu)
result_gpu = cupyx.scipy.linalg.lu_factor(a_gpu)
assert len(result_cpu) == len(result_gpu)
assert result_cpu[0].dtype == result_gpu[0].dtype
assert result_cpu[1].dtype == result_gpu[1].dtype
cupy.testing.assert_allclose(result_cpu[0], result_gpu[0], atol=1e-5)
cupy.testing.assert_array_equal(result_cpu[1], result_gpu[1])
def check_lu_factor_reconstruction(self, A):
m, n = self.shape
lu, piv = cupyx.scipy.linalg.lu_factor(A)
# extract ``L`` and ``U`` from ``lu``
L = cupy.tril(lu, k=-1)
cupy.fill_diagonal(L, 1.)
L = L[:, :m]
U = cupy.triu(lu)
U = U[:n, :]
# check output shapes
assert lu.shape == (m, n)
assert L.shape == (m, min(m, n))
assert U.shape == (min(m, n), n)
assert piv.shape == (min(m, n),)
# apply pivot (on CPU since slaswp is not available in cupy)
piv = cupy.asnumpy(piv)
rows = numpy.arange(m)
for i, row in enumerate(piv):
if i != row:
rows[i], rows[row] = rows[row], rows[i]
PA = A[rows]
# check that reconstruction is close to original
LU = L.dot(U)
cupy.testing.assert_allclose(LU, PA, atol=1e-5)
@testing.for_dtypes('fdFD')
def test_lu_factor_reconstruction(self, dtype):
A = testing.shaped_random(self.shape, cupy, dtype=dtype)
self.check_lu_factor_reconstruction(A)
@testing.for_dtypes('fdFD')
def test_lu_factor_reconstruction_singular(self, dtype):
if self.shape[0] != self.shape[1]:
return unittest.SkipTest()
A = testing.shaped_random(self.shape, cupy, dtype=dtype)
A -= A.mean(axis=0, keepdims=True)
A -= A.mean(axis=1, keepdims=True)
with warnings.catch_warnings():
warnings.simplefilter('ignore', RuntimeWarning)
self.check_lu_factor_reconstruction(A)
@testing.gpu
@testing.parameterize(*testing.product({
'shape': [(1, 1), (2, 2), (3, 3), (5, 5), (1, 5), (5, 1), (2, 5), (5, 2)],
'permute_l': [False, True],
}))
@testing.fix_random()
@testing.with_requires('scipy')
class TestLU(unittest.TestCase):
@testing.for_dtypes('fdFD')
def test_lu(self, dtype):
a_cpu = testing.shaped_random(self.shape, numpy, dtype=dtype)
a_gpu = cupy.asarray(a_cpu)
result_cpu = scipy.linalg.lu(a_cpu, permute_l=self.permute_l)
result_gpu = cupyx.scipy.linalg.lu(a_gpu, permute_l=self.permute_l)
assert len(result_cpu) == len(result_gpu)
if not self.permute_l:
# check permutation matrix
result_cpu = list(result_cpu)
result_gpu = list(result_gpu)
P_cpu = result_cpu.pop(0)
P_gpu = result_gpu.pop(0)
cupy.testing.assert_array_equal(P_gpu, P_cpu)
cupy.testing.assert_allclose(result_gpu[0], result_cpu[0], atol=1e-5)
cupy.testing.assert_allclose(result_gpu[1], result_cpu[1], atol=1e-5)
@testing.for_dtypes('fdFD')
def test_lu_reconstruction(self, dtype):
m, n = self.shape
A = testing.shaped_random(self.shape, cupy, dtype=dtype)
if self.permute_l:
PL, U = cupyx.scipy.linalg.lu(A, permute_l=self.permute_l)
PLU = PL @ U
else:
P, L, U = cupyx.scipy.linalg.lu(A, permute_l=self.permute_l)
PLU = P @ L @ U
# check that reconstruction is close to original
cupy.testing.assert_allclose(PLU, A, atol=1e-5)
@testing.gpu
@testing.parameterize(*testing.product({
'trans': [0, 1, 2],
'shapes': [((4, 4), (4,)), ((5, 5), (5, 2))],
}))
@testing.fix_random()
@testing.with_requires('scipy')
class TestLUSolve(unittest.TestCase):
@testing.for_dtypes('fdFD')
@testing.numpy_cupy_allclose(atol=1e-5, scipy_name='scp')
def test_lu_solve(self, xp, scp, dtype):
a_shape, b_shape = self.shapes
A = testing.shaped_random(a_shape, xp, dtype=dtype)
b = testing.shaped_random(b_shape, xp, dtype=dtype)
lu = scp.linalg.lu_factor(A)
return scp.linalg.lu_solve(lu, b, trans=self.trans)
| mit | 3,208,808,118,296,692,700 | 36.112782 | 78 | 0.595421 | false |
bailabs/bench-v7 | bench/commands/update.py | 1 | 7760 | import click
import sys, os
from bench.config.common_site_config import get_config
from bench.app import pull_all_apps, is_version_upgrade
from bench.utils import (update_bench, validate_upgrade, pre_upgrade, post_upgrade, before_update,
update_requirements, backup_all_sites, patch_sites, build_assets, restart_supervisor_processes)
from bench import patches
# TODO: Not DRY
@click.command('update')
@click.option('--pull', is_flag=True, help="Pull changes in all the apps in bench")
@click.option('--patch', is_flag=True, help="Run migrations for all sites in the bench")
@click.option('--build', is_flag=True, help="Build JS and CSS artifacts for the bench")
@click.option('--bench', is_flag=True, help="Update bench")
@click.option('--requirements', is_flag=True, help="Update requirements")
@click.option('--restart-supervisor', is_flag=True, help="restart supervisor processes after update")
@click.option('--auto', is_flag=True)
@click.option('--upgrade', is_flag=True, help="Required for major version updates")
@click.option('--no-backup', is_flag=True)
@click.option('--force', is_flag=True)
@click.option('--reset', is_flag=True,
help="Hard resets git branch's to their new states overriding any changes and overriding rebase on pull")
@click.option('--force_frappe', is_flag=True)
def update(pull=False, patch=False, build=False, bench=False, auto=False,
restart_supervisor=False, requirements=False,
no_backup=False, upgrade=False, force=False, reset=False,
force_frappe = False):
"Update bench"
print "force_frape {0}".format(force_frappe)
if not (pull or patch or build or bench or requirements):
pull, patch, build, bench, requirements = True, True, True, True, True
if auto:
sys.exit(1)
patches.run(bench_path='.')
conf = get_config(".")
# if bench and conf.get('update_bench_on_update'):
# update_bench()
# restart_update({
# 'pull': pull,
# 'patch': patch,
# 'build': build,
# 'requirements': requirements,
# 'no-backup': no_backup,
# 'restart-supervisor': restart_supervisor,
# 'upgrade': upgrade,
# 'reset': reset
# })
if conf.get('release_bench'):
print 'Release bench, cannot update'
sys.exit(1)
version_upgrade = is_version_upgrade()
if version_upgrade[0] and not upgrade:
print
print
print "This update will cause a major version change in Frappe/ERPNext from {0} to {1}.".format(
*version_upgrade[1:])
print "This would take significant time to migrate and might break custom apps. Please run `bench update --upgrade` to confirm."
print
print "You can stay on the latest stable release by running `bench switch-to-master` or pin your bench to {0} by running `bench switch-to-v{0}`".format(
version_upgrade[1])
sys.exit(1)
_update(pull, patch, build, bench, auto, restart_supervisor, requirements, no_backup, upgrade, force=force,
reset=reset,force_frappe=force_frappe)
def _update(pull=False, patch=False, build=False, update_bench=False, auto=False, restart_supervisor=False,
requirements=False, no_backup=False, upgrade=False, bench_path='.', force=False, reset=False,
force_frappe=False):
conf = get_config(bench_path=bench_path)
version_upgrade = is_version_upgrade(bench_path=bench_path)
# if version_upgrade[0] and not upgrade:
# raise Exception("Major Version Upgrade")
#
# if upgrade and (version_upgrade[0] or (not version_upgrade[0] and force)):
# validate_upgrade(version_upgrade[1], version_upgrade[2], bench_path=bench_path)
before_update(bench_path=bench_path, requirements=requirements)
if pull:
pull_all_apps(bench_path=bench_path, reset=reset,force_frappe=force_frappe)
# if requirements:
# update_requirements(bench_path=bench_path)
if upgrade and (version_upgrade[0] or (not version_upgrade[0] and force)):
pre_upgrade(version_upgrade[1], version_upgrade[2], bench_path=bench_path)
import bench.utils, bench.app
reload(bench.utils)
reload(bench.app)
if patch:
if not no_backup:
backup_all_sites(bench_path=bench_path)
patch_sites(bench_path=bench_path)
if build:
build_assets(bench_path=bench_path)
if upgrade and (version_upgrade[0] or (not version_upgrade[0] and force)):
post_upgrade(version_upgrade[1], version_upgrade[2], bench_path=bench_path)
if restart_supervisor or conf.get('restart_supervisor_on_update'):
restart_supervisor_processes(bench_path=bench_path)
print "_" * 80
print "Bench: Open source installer + admin for Frappe and ERPNext (https://erpnext.com)"
print
@click.command('retry-upgrade')
@click.option('--version', default=5)
def retry_upgrade(version):
pull_all_apps()
patch_sites()
build_assets()
post_upgrade(version - 1, version)
def restart_update(kwargs):
args = ['--' + k for k, v in kwargs.items() if v]
os.execv(sys.argv[0], sys.argv[:2] + args)
@click.command('switch-to-branch')
@click.argument('branch')
@click.argument('apps', nargs=-1)
@click.option('--upgrade', is_flag=True)
def switch_to_branch(branch, apps, upgrade=False,force_frappe=False):
"Switch all apps to specified branch, or specify apps separated by space"
from bench.app import switch_to_branch
switch_to_branch(branch=branch, apps=list(apps), upgrade=upgrade)
print 'Switched to ' + branch
print 'Please run `bench update --patch` to be safe from any differences in database schema'
@click.command('switch-to-master')
@click.option('--upgrade', is_flag=True)
@click.option('--force_frappe', is_flag=True)
def switch_to_master(upgrade=False,force_frappe=False):
"Switch frappe and erpnext to master branch"
from bench.app import switch_to_master
switch_to_master(upgrade=upgrade, apps=['frappe', 'erpnext'],force_frappe=force_frappe)
print
print 'Switched to master'
print 'Please run `bench update --patch` to be safe from any differences in database schema'
@click.command('switch-to-develop')
@click.option('--upgrade', is_flag=True)
def switch_to_develop(upgrade=False):
"Switch frappe and erpnext to develop branch"
from bench.app import switch_to_develop
switch_to_develop(upgrade=upgrade, apps=['frappe', 'erpnext'])
print
print 'Switched to develop'
print 'Please run `bench update --patch` to be safe from any differences in database schema'
@click.command('switch-to-v4')
@click.option('--upgrade', is_flag=True)
def switch_to_v4(upgrade=False):
"Switch frappe and erpnext to v4 branch"
from bench.app import switch_to_v4
switch_to_v4(upgrade=upgrade)
print
print 'Switched to v4'
print 'Please run `bench update --patch` to be safe from any differences in database schema'
@click.command('switch-to-v5')
@click.option('--upgrade', is_flag=True)
def switch_to_v5(upgrade=False):
"Switch frappe and erpnext to v5 branch"
from bench.app import switch_to_v5
switch_to_v5(upgrade=upgrade)
print
print 'Switched to v5'
print 'Please run `bench update --patch` to be safe from any differences in database schema'
@click.command('switch-to-v7')
@click.option('--upgrade', is_flag=True)
def switch_to_v7(upgrade=False):
"Switch frappe and erpnext to v7 branch"
from bench.app import switch_to_v7
switch_to_v7(upgrade=upgrade)
print
print 'Switched to v7'
print 'Please run `bench update --patch` to be safe from any differences in database schema'
| gpl-3.0 | 4,466,500,918,880,796,700 | 38.794872 | 160 | 0.677835 | false |
Maleus/Enumerator | enumerator/lib/generic_service.py | 1 | 2366 | #!/usr/bin/env python
"""GenericService encapsulates any
methods which are common across all
service modules.
@author: Steve Coward (steve<at>sugarstack.io)
@version 1.0
"""
import os
import re
class GenericService(object):
static_path = '%s/../static' % os.path.dirname(os.path.realpath(__file__))
compiled_service_definition = None
def __init__(self):
self.compiled_service_definition = self.compile_service_definition(
self.SERVICE_DEFINITION)
def compile_service_definition(self, definition):
"""Take a string of key:values and parse
the values into a python interpretable
conditional statement.
@param definition: String used to classify
a service.
"""
rule_parser_pattern = re.compile('([^\s]+\s)?(\w+):([^\s]+)')
rule = []
for rule_set in rule_parser_pattern.findall(definition):
conditional, key, values = map(str.strip, rule_set)
# Determine if values need to be split apart.
# Rule: If there are no '-' values at the beginning of each value, we can use a list.
# Rule: If there are '-' values at the beginning of each value,
# split apart.
if len([val for val in values.split(',') if val[0] == '-']):
values_set = values.split(',')
for value in values_set:
if value[0] == '-':
rule.append('"%s" not in %s' % (value[1:], key))
else:
rule.append('"%s" in %s' % (value, key))
else:
values_set = values.split(',')
rule.append('%s %s in %s' % (conditional, key, values_set))
return ' and '.join(rule).replace('and or', 'or')
def is_valid_service(self, attributes):
"""Returns True or False if the attributes
of a service record match the definition of
a service.
@param attributes: Dict value of a scanned service
(service,port,state).
"""
service = attributes.get('service')
port = attributes.get('port')
state = attributes.get('state')
if state != 'open':
return False
# The keys in rule will map to service, port and status set above.
return eval(self.compiled_service_definition)
| mit | -7,374,674,132,465,793,000 | 34.313433 | 97 | 0.569315 | false |
buguen/pylayers | pylayers/util/cone.py | 1 | 20331 | #-*- coding:Utf-8 -*-
r"""
Class Cone
==========
The following conventions are adopted
+ A cone has an **apex** which is a point in the plane.
+ A cone has two vectors which define the cone aperture. The order of those two vectors
matters (u) is the starting vector (u) and (v) the ending vector.
The cone region is defined by the convex angular sector going from starting
vector :math:`\mathbf{u}` to the ending vector :math:`\mathbf{v}`
rotating in the plane in following the trigonometric rotation convention.
The modulus of the cross product between :math:`\mathbf{u}` and :math:`\mathbf{v}` is positive.
:math:`\mathbf{u} \times \mathbf{v} = \alpha \mathbf{z} \;\; \textrm{with} \;\;\alpha > 0`
.. autosummary::
:toctree:
"""
import numpy as np
import doctest
import shapely as shp
import matplotlib.pyplot as plt
import pylayers.util.geomutil as geu
import pylayers.util.plotutil as plu
from pylayers.util.project import *
from matplotlib.path import Path
import matplotlib.patches as patches
import pdb
import logging
class Cone(PyLayers):
def __init__(self, a=np.array([1,0]), b = np.array([0,1]), apex=np.array([0, 0])):
"""
a : np.array (,2)
basis vector
b : np.array (,2)
apex : np.array (,2)
"""
self.apex = apex
# normalizing cone vectors
an = a/np.sqrt(np.dot(a,a))
bn = b/np.sqrt(np.dot(b,b))
if np.cross(an,bn) > 0:
self.u = an
self.v = bn
else:
self.u = bn
self.v = an
# -1 < gamma < 1
self.dot = np.dot(self.u,self.v)
self.cross = np.cross(self.u,self.v)
if self.cross<>0:
self.degenerated = False
else:
self.degenerated = True
# update cone angle and probability
self.upd_angle()
def __repr__(self):
st = 'Cone object \n'
st = st+'----------------\n'
st = st + "Apex : " + str(self.apex)+'\n'
st = st + "u :" + str(self.u)+'\n'
st = st + "v :" + str(self.v)+'\n'
st = st + "cross : " + str(self.cross)+'\n'
st = st + "dot : " + str(self.dot)+'\n'
st = st + "angle : " + str(self.angle*180/np.pi)+'\n'
st = st + "pcone : " + str(self.pcone)+'\n'
if hasattr(self,'seg0'):
st = st + "from segments ( (xta,xhe) , (yta,yhe) )\n"
st = st + " seg0 : " + str(tuple(self.seg0))+'\n'
st = st + " seg1 : " + str(tuple(self.seg1))+'\n'
return(st)
def upd_angle(self):
"""update cone angle attribute
and associated probability of the Cone object
"""
self.angle = np.arccos(self.dot)
self.pcone = self.angle/(1.0*np.pi)
def belong_seg(self,pta,phe,prob=True,visu=False):
""" test if segment belong to cone
Parameters
----------
pta : np.array (2xNseg)
phe : np.array (2xNseg)
Returns
-------
typ : int
0 : no visibility
1 : full visibility
2 : he.v
3 : ta.v
4 : ta.u
5 : he.u
6 : inside
proba : float
geometric probability
Notes
-----
A segment belongs to the cone if not all termination points
lie in the same side outside the cone.
See Also
--------
outside_point
"""
if visu:
f,a = self.show()
plu.displot(pta,phe,fig=f,ax=a)
plt.show()
vc = (self.u+self.v)/2
#vcn = vc/np.sqrt(np.dot(vc,vc))
w = vc/np.sqrt(np.dot(vc,vc))
w = w.reshape(2,1)
#w = np.array([vcn[1],-vcn[0]])
ptama = pta - self.apex[:,None]
phema = phe - self.apex[:,None]
dtaw = np.sum(ptama*w,axis=0)
dhew = np.sum(phema*w,axis=0)
blta = (dtaw>=0)|(np.isclose(dtaw,0.))
blhe = (dhew>=0)|(np.isclose(dhew,0.))
#if 'seg1' in self.__dict__:
# pa = self.seg1[:,0].reshape(2,1)
# pb = (self.seg1[:,0]+w).reshape(2,1)
#else:
# pa = self.apex.reshape(2,1)
# pb = pa+w.reshape(2,1)
#blta = geu.isleft(pa,pb,pta)
#blhe = geu.isleft(pa,pb,phe)
# segment candidate for being above segment 1 (,Nseg)
boup = blta & blhe
# type of segment
if prob:
proba = np.zeros(np.shape(pta)[1])
else :
proba =[]
typ = np.zeros(np.shape(pta)[1])
# is tail out ? bo1 | bo2
# btaol : boolean tail out left
# btaor : boolean tail out right
# bheol : boolean head out left
# bheor : boolean head out right #
# among upper segment check position wrt cone
#btaol,btaor = self.outside_point(pta)
#bheol,bheor = self.outside_point(phe)
btaor,btaol = self.outside_point(pta)
bheor,bheol = self.outside_point(phe)
# tail and head are they out cone on the same side ?
# if the two termination points are not on the same side of the cone
# --> segment is in.
# boin = (~((btaol&bheol)|(btaor&bheor)))&boup
# full interception (proba to reach = 1)
bfull = ((btaol&bheor)|(btaor&bheol))&boup
if prob :
proba[bfull] = 1
typ[bfull] = 1
#(he-apex).v
btalhein = (btaol & ~bheol & ~bheor)&boup
if (prob and not (btalhein==False).all()):
v2 = phe[:,btalhein]-self.apex.reshape(2,1)
vn2 = v2/np.sqrt(np.sum(v2*v2,axis=0))
vvn2 = np.dot(self.v,vn2)
# paranoid verification of scalar product \in [-1,1]
vvn2 = np.minimum(vvn2,np.ones(len(vvn2)))
vvn2 = np.maximum(vvn2,-np.ones(len(vvn2)))
pr2 = np.arccos(vvn2)/self.angle
proba[btalhein] = pr2
typ[btalhein] = 2
#(ta-apex).v
bheltain = (bheol & ~btaol & ~btaor)&boup
if (prob and not (bheltain==False).all()):
v3 = pta[:,bheltain]-self.apex.reshape(2,1)
vn3 = v3/np.sqrt(np.sum(v3*v3,axis=0))
vvn3 = np.dot(self.v,vn3)
vvn3 = np.minimum(vvn3,np.ones(len(vvn3)))
vvn3 = np.maximum(vvn3,-np.ones(len(vvn3)))
pr3 = np.arccos(vvn3)/self.angle
proba[bheltain] = pr3
typ[bheltain] = 3
#ta.u
bhertain = (bheor & ~btaol & ~btaor)&boup
if (prob and not(bhertain==False).all()):
v4 = pta[:,bhertain]-self.apex.reshape(2,1)
vn4 = v4/np.sqrt(np.sum(v4*v4,axis=0))
vvn4 = np.dot(self.u,vn4)
vvn4 = np.minimum(vvn4,np.ones(len(vvn4)))
vvn4 = np.maximum(vvn4,-np.ones(len(vvn4)))
pr4 = np.arccos(vvn4)/self.angle
proba[bhertain] = pr4
typ[bhertain] = 4
#he.u
btarhein = (btaor & ~bheol & ~bheor)&boup
if (prob and not(btarhein==False).all()):
v5 = phe[:,btarhein]-self.apex.reshape(2,1)
vn5 = v5/np.sqrt(np.sum(v5*v5,axis=0))
vvn5 = np.dot(self.u,vn5)
vvn5 = np.minimum(vvn5,np.ones(len(vvn5)))
vvn5 = np.maximum(vvn5,-np.ones(len(vvn5)))
pr5 = np.arccos(vvn5)/self.angle
proba[btarhein] = pr5
typ[btarhein] = 5
#ta.he
btainhein = (~btaol & ~btaor & ~bheol & ~bheor)&boup
if (prob and not (btainhein==0).all()):
va = pta[:,btainhein]-self.apex.reshape(2,1)
vb = phe[:,btainhein]-self.apex.reshape(2,1)
vna = va/np.sqrt(np.sum(va*va,axis=0))
vnb = vb/np.sqrt(np.sum(vb*vb,axis=0))
# dot product vna,vnb
vnab = np.sum(vna*vnb,axis=0)
vnab = np.minimum(vnab,np.ones(len(vnab)))
vnab = np.maximum(vnab,-np.ones(len(vnab)))
pr6 = np.arccos(vnab)/self.angle
proba[btainhein] = pr6
typ[btainhein] = 6
return(typ,proba)
def above_seg(self):
"""
"""
vc = (self.u+self.v)/2
vcn = vc/np.sqrt(dot(vc,vc))
w = np.array([vcn[1],-vcn[0]])
self.pa = self.seg1[:,0].reshape(2,1)
self.pb = (self.seg1[:,0]+w).reshape(2,1)
def outside_point(self,p):
""" check if p is outside the cone
Parameters
----------
p : np.array (2xNp)
Returns
-------
~b1 & ~b2 : boolean (outside on the left) (,Np)
b1 & b2 : boolean (outside on the right) (,Np)
Examples
--------
Notes
-----
If one of the two output booleans is True the point is outside
There are 2 output bits but only 3 states due to (uv) convention.
v u
p \ / lv & lu
\/
\p /
\/ ~lv & lu
\ / p
\/ ~lu & ~lv
"""
a = self.apex[:,None]
# b = a + self.u.reshape(2,1)
# c = a + self.v.reshape(2,1)
b = a + self.u[:,None]
c = a + self.v[:,None]
p0a0 = p[0,:]-a[0,:]
p1a1 = p[1,:]-a[1,:]
lu = ((b[0,:]-a[0,:])* p1a1 - ((b[1,:]-a[1,:])* p0a0 ))>0
lv = ((c[0,:]-a[0,:])* p1a1 - ((c[1,:]-a[1,:])* p0a0 ))>0
return(~lu & ~lv , lu & lv)
def belong_point2(self,p):
"""
Parameters
----------
p : np.array (Ndim x Npoints)
"""
a = self.apex[:,np.newaxis]
b = a + self.u.reshape(2,1)
c = a + self.v.reshape(2,1)
p1a1 = p[1,:]-a[1,:]
p0a0 = p[0,:]-a[0,:]
b1 = ((b[0,:]-a[0,:])* p1a1 - ((b[1,:]-a[1,:])* p0a0 ))>0
b2 = ((c[0,:]-a[0,:])* p1a1 - ((c[1,:]-a[1,:])* p0a0 ))>0
return(b1^b2)
def belong_point(self, p):
""" test if p belongs to Cone
Parameters
----------
p : np.array (Ndim x Npoints)
Returns
-------
b : np.array boolean (1xNpoints)
"""
# Ndim x Npoints
if not self.degenerated:
pt = p - self.apex[:,np.newaxis]
#puv = np.sum(self.bv[:,:,np.newaxis]*pt[:,np.newaxis,:],axis=0)
#alpha = puv[0,:]-self.gamma*puv[1,:]
#beta = puv[1,:]-self.gamma*puv[0,:]
pu = np.sum(self.u[:,np.newaxis]*pt,axis=0)
pv = np.sum(self.v[:,np.newaxis]*pt,axis=0)
alpha = pu-self.dot*pv
beta = pv-self.dot*pu
b = (beta>0)&(alpha>0)
else:
a0 = self.seg0[:,0]
b0 = self.seg0[:,1]
if self.u[0]<>0:
slope = self.u[1]/self.u[0]
y0 = a0[1]-slope*a0[0]
y1 = b0[1]-slope*b0[0]
b = (p[1,:] > slope*p[0,:] + min(y0,y1) ) & (p[1,:]<slope*p[0,:]+max(y0,y1) )
else:
b = (p[0,:] > min(a0[0],b0[0]) ) & (p[0,:]< max(a0[0],b0[0]) )
return(b)
def above(self, p):
""" check if above
Parameters
----------
p :
"""
bo1 = self.belong(p)
pb = p[:,bo1]
if self.v[1]<>0:
slope1 = self.v[1]/self.v[0]
b1 = self.v[1] - slope1*self.v[0]
bo2 = pb[1,:] > slope1*pb[0,:]+b
else:
bo2 = pb[1,:] > self.seg1[1,0]
return(bo1,bo2)
def fromptseg(self,pt,seg):
""" creates a Cone from one point and one segment
Parameters
----------
pt : nd.array (,2)
seg : nd.array (2,2)
"""
self.apex = pt
a = seg[:,0]
b = seg[:,1]
v0 = b - pt
v1 = a - pt
v0n = v0/np.sqrt(np.dot(v0,v0))
v1n = v1/np.sqrt(np.dot(v1,v1))
if np.cross(v0n,v1n) > 0:
self.u = v0n
self.v = v1n
self.seg1 = seg
else:
self.u = v1n
self.v = v0n
self.seg1 = seg[:,::-1]
self.dot = np.dot(self.u,self.v)
self.cross = np.cross(self.u,self.v)
if self.cross < 1e-15:
self.degenerated=True
self.upd_angle()
def from2segs(self,seg0,seg1):
""" creates a Cone from 2 segments
Parameters
----------
seg0 : 2 x 2 (Ndim x Npoints)
seg1 : 2 x 2
Notes
-----
The only way for the cone to be degenerated is when the two segments are on the same line.
See Also
--------
pylayers.gis.layout.Layout.buildGi
"""
# bv : (4,1)
self.seg0 = seg0
self.seg1 = seg1
a0 = seg0[:,0]
b0 = seg0[:,1]
a1 = seg1[:,0]
b1 = seg1[:,1]
# check for connected segments (This could be determined earlier)
# a0 = a1 | b1
# b0 = a1 | b1
# check segment orientation (crossing)
if not (geu.ccw(a0,b0,b1) ^
geu.ccw(b0,b1,a1) ):
v0 = (b1 - a0)
v1 = (a1 - b0)
twisted = True
else:
v0 = (a1 - a0)
v1 = (b1 - b0)
twisted = False
v0n = v0/np.sqrt(np.dot(v0,v0))
v1n = v1/np.sqrt(np.dot(v1,v1))
if np.cross(v0n,v1n) > 0:
self.u = v0n
self.v = v1n
inversion = False
else:
self.u = v1n
self.v = v0n
inversion = True
if (not twisted) & (not inversion) :
#reverse seg1
#print "reverse seg1"
self.seg1 = self.seg1[:,::-1]
if (inversion) & (not twisted):
#reverse seg0
#print "reverse seg0"
self.seg0 = self.seg0[:,::-1]
if twisted & inversion:
#reverse seg0 and seg1
#print "reverse seg0"
#print "reverse seg1"
self.seg0 = self.seg0[:,::-1]
self.seg1 = self.seg1[:,::-1]
self.dot = np.dot(self.u,self.v)
self.cross = np.cross(self.u,self.v)
if self.cross < 1e-15:
self.degenerated=True
else:
a0u = np.dot(self.seg0[:,0],self.u)
a0v = np.dot(self.seg0[:,0],self.v)
b0u = np.dot(self.seg0[:,1],self.u)
b0v = np.dot(self.seg0[:,1],self.v)
kb = ((b0v-a0v)-self.dot*(b0u-a0u))/(self.dot*self.dot-1)
self.apex = self.seg0[:,1] + kb*self.v
self.upd_angle()
def from2csegs(self,seg0,seg1):
""" creates a Cone from 2 connected segments
Parameters
----------
seg0 : 2 x 2 (Ndim x Npoints)
seg1 : 2 x 2
Notes
-----
The only way for the cone to be degenerated is when the two segments are on the same line.
Examples
--------
>>> from pylayers.util.cone import *
>>> import matplotlib.pyplot as plt
>>> cn = Cone()
>>> f,a = cn.show()
>>> plt.show()
"""
# bv : (4,1)
self.seg0 = seg0
self.seg1 = seg1
a0 = seg0[:,0]
b0 = seg0[:,1]
a1 = seg1[:,0]
b1 = seg1[:,1]
# determine common point
if (np.dot(a0-a1,a0-a1)<1e-8):
p = a0
u = b1-p
v = p-b0
elif (np.dot(a0-b1,a0-b1)<1e-8):
p = a0
u = a1-p
v = p-b0
self.seg1 = self.seg1[:,::-1]
elif (np.dot(b0-a1,b0-a1)<1e-8):
p = b0
self.seg0 = self.seg0[:,::-1]
u = b1-p
v = p-a0
elif (np.dot(b0-b1,b0-b1)<1e-8):
self.seg0 = self.seg0[:,::-1]
self.seg1 = self.seg1[:,::-1]
p = b0
u = a1-p
v = p-a0
else:
logging.critical('segment are not connected')
pdb.set_trace()
self.apex = p
self.v = v/np.sqrt(np.dot(v,v))
self.u = u/np.sqrt(np.dot(u,u))
self.dot = np.dot(self.u,self.v)
self.cross = np.cross(self.u,self.v)
if self.cross<0:
self.u , self.v = self.v , self.u
self.dot = np.dot(self.u,self.v)
self.cross = np.cross(self.u,self.v)
if self.cross < 1e-15:
self.degenerated=True
self.upd_angle()
def show(self, **kwargs):
""" show cone
Parameters
----------
length : float
"""
defaults = {'length': 15.}
for k in defaults:
if k not in kwargs:
kwargs[k] = defaults[k]
if 'seg1' not in self.__dict__:
verts = [tuple(self.apex),
tuple(self.apex + kwargs['length'] * self.u),
tuple(self.apex + kwargs['length'] * self.v),
tuple(self.apex)
]
codes = [Path.MOVETO,
Path.LINETO,
Path.LINETO,
Path.CLOSEPOLY,
]
else:
a1 = self.seg1[:,0]
b1 = self.seg1[:,1]
if 'seg0' not in self.__dict__:
a0 = self.apex
b0 = self.apex
else:
a0 = self.seg0[:,0]
b0 = self.seg0[:,1]
if not(self.degenerated):
#verts = [tuple(self.apex),
# tuple(a1),
# tuple(b1),
# tuple(self.apex)
# ]
verts = [tuple(self.apex),
tuple(self.apex + kwargs['length'] * self.u),
tuple(self.apex + kwargs['length'] * self.v),
tuple(self.apex)
]
codes = [Path.MOVETO,
Path.LINETO,
Path.LINETO,
Path.CLOSEPOLY,
]
else:
if (geu.ccw(a0,b0,b1) ^
geu.ccw(b0,b1,a1) ):
verts = [tuple(b0),
tuple(a1),
tuple(b1),
tuple(a0),
tuple(b0)
]
else:
verts = [tuple(b0),
tuple(b1),
tuple(a1),
tuple(a0),
tuple(b0)
]
codes = [Path.MOVETO,
Path.LINETO,
Path.LINETO,
Path.LINETO,
Path.CLOSEPOLY,
]
path = Path(verts, codes)
if 'fig' not in kwargs:
fig = plt.figure(figsize=(10,10))
else:
fig = kwargs['fig']
if 'ax' not in kwargs:
ax = fig.add_subplot(111)
else:
ax = kwargs['ax']
ax.plot([self.apex[0],self.apex[0]+kwargs['length']*self.u[0]],
[self.apex[1],self.apex[1]+kwargs['length']*self.u[1]],lw=1,color='b')
ax.plot([self.apex[0],self.apex[0]+kwargs['length']*self.v[0]],
[self.apex[1],self.apex[1]+kwargs['length']*self.v[1]],lw=1,color='r')
theta1 = np.arctan2(self.u[1],self.u[0])*180/np.pi
#print theta1
theta2 = np.arctan2(self.v[1],self.v[0])*180/np.pi
#print theta2
angle = self.angle*180/np.pi
#print angle
arc = patches.Arc((self.apex[0],self.apex[1]),kwargs['length'],kwargs['length'],theta1=theta1,theta2=theta2,linewidth=2)
ax.add_patch(arc)
if 'seg0' in self.__dict__:
ax.plot([a0[0],b0[0]],[a0[1],b0[1]],lw=2,color='b')
if 'seg1' in self.__dict__:
ax.plot([a1[0],b1[0]],[a1[1],b1[1]],lw=2,color='r')
patch = patches.PathPatch(path, facecolor='orange', lw=2, alpha=0.3)
ax.add_patch(patch)
ax.axis('equal')
# ax.set_xlim(-2,2)
# ax.set_ylim(-2,2)
return(fig, ax)
if __name__ == '__main__':
plt.ion()
doctest.testmod()
| lgpl-3.0 | 8,532,794,341,175,884,000 | 27.395251 | 128 | 0.4471 | false |
ulif/dropafile | tests/test_dropafile.py | 1 | 14193 | # tests for dropafile module.
import base64
import math
import os
import pytest
import re
import shutil
import subprocess
import tempfile
from contextlib import contextmanager
from io import BytesIO
from werkzeug.datastructures import Headers
from werkzeug.test import Client, create_environ, EnvironBuilder
from werkzeug.wrappers import BaseResponse, Request
from dropafile import (
DropAFileApplication, execute_cmd, create_ssl_cert, get_random_password,
ALLOWED_PWD_CHARS, handle_options, run_server, get_store_path
)
#: find a certificate path in output.
RE_CERTPATH = re.compile(
'^.*Certificate in:[\s]+([^\s][^\n]+)\n.*$', re.M + re.S)
@contextmanager
def popen(*args, **kw):
# a Python 2.6/2.7 compatible Popen contextmanager
p = subprocess.Popen(*args, **kw)
try:
yield p
finally:
for stream in (p.stdout, p.stderr, p.stdin):
if stream:
stream.close()
p.wait()
def encode_creds(username='somename', password=''):
# turn credentials given into base64 encoded string
auth_string = '%s:%s' % (username, password)
encoded = base64.b64encode(auth_string.encode('utf-8'))
return 'Basic %s' % encoded.decode('utf-8')
def get_basic_auth_headers(username='somename', password=''):
# get a set of request headers with authorization set to creds.
headers = Headers()
headers.add(
'Authorization', encode_creds(username=username, password=password))
return headers
class TestHelpers(object):
@pytest.mark.skipif(
not os.path.exists('/bin/echo'), reason="needs /bin/echo")
def test_excute_cmd(self):
# we can execute commands (w/o shell)
cmd = ["/bin/echo", "Hello $PATH"]
out, err = execute_cmd(cmd)
assert out == b'Hello $PATH\n'
assert err == b''
def test_create_cert(self, tmpdir):
# we can create x509 certs
path = tmpdir.dirname
cert_path, key_path = create_ssl_cert(path)
assert os.path.isfile(cert_path)
assert os.path.isfile(key_path)
shutil.rmtree(os.path.dirname(cert_path))
def test_create_cert_no_path(self):
# w/o a given path, one will be created for us
cert_path, key_path = create_ssl_cert()
assert os.path.isfile(cert_path)
shutil.rmtree(os.path.dirname(cert_path))
def test_get_random_password(self):
# we can get a random password
allowed_chars = '[A-HJ-NP-Z2-9a-hjkmnp-z]'
RE_PWD = re.compile('^%s+$' % allowed_chars)
password = get_random_password()
assert RE_PWD.match(password)
def test_get_random_password_entropy(self):
# the entropy delivered by default >= 128 bits
unique_chars = ''.join(list(set(ALLOWED_PWD_CHARS)))
entropy_per_char = math.log(len(unique_chars)) / math.log(2)
password = get_random_password()
assert len(password) * entropy_per_char >= 128
def test_get_store_path(self):
# we can get a safe storage path
store_dir = tempfile.mkdtemp()
path = get_store_path(store_dir, 'test.txt')
assert path == os.path.join(store_dir, 'test.txt')
def test_get_store_path_one_file_in(self):
# with one file in we get a modified filename
store_dir = tempfile.mkdtemp()
open(os.path.join(store_dir, 'test.txt'), 'w').write('foo')
path = get_store_path(store_dir, 'test.txt')
assert path.endswith('/test.txt-1')
open(path, 'w').write('foo')
path = get_store_path(store_dir, 'test.txt')
assert path.endswith('/test.txt-2')
open(path, 'w').write('foo')
path = get_store_path(store_dir, 'test.txt')
assert path.endswith('/test.txt-3')
def test_get_store_path_two_files_in(self):
# with two files in we also get a modified filename
store_dir = tempfile.mkdtemp()
open(os.path.join(store_dir, 'test.txt'), 'w').write('foo')
open(os.path.join(store_dir, 'test.txt-2'), 'w').write('foo')
path = get_store_path(store_dir, 'test.txt')
assert path.endswith('/test.txt-1')
class TestApp(object):
# no browser tests here
def test_app_has_password(self):
# DropAFileApplications have a password
app = DropAFileApplication()
assert hasattr(app, 'password')
assert len(app.password) > 5
def test_app_accepts_passwod(self):
# DropAFileApps accept passwords passed in
app = DropAFileApplication(password='verysecret')
assert app.password == 'verysecret'
def test_check_auth_requires_auth(self):
# we require at least some creds to authenticate
app = DropAFileApplication()
app.password = 'sosecret'
env = create_environ()
request = Request(env)
assert app.check_auth(request) is False
def test_check_auth_wrong_passwd(self):
# of course check_auth requires the correct password
app = DropAFileApplication()
app.password = 'sosecret'
env = create_environ()
env.update(HTTP_AUTHORIZATION=encode_creds(
username='somename', password='wrong-password'))
request = Request(env)
assert app.check_auth(request) is False
def test_check_auth_correct_passwd(self):
# the correct password can be seen.
app = DropAFileApplication()
app.password = 'sosecret'
env = create_environ()
env.update(HTTP_AUTHORIZATION=encode_creds(
username='somename', password='sosecret'))
request = Request(env)
assert app.check_auth(request) is True
def test_handle_uploaded_files(self):
# we can send files (that are stored)
app = DropAFileApplication()
builder = EnvironBuilder(
method='POST',
data={'file': (BytesIO(b'foo'), 'test.txt')}
)
req = Request(builder.get_environ())
app.handle_uploaded_files(req)
expected_path = os.path.join(app.upload_dir, 'test.txt')
assert os.path.isfile(expected_path)
assert open(expected_path, 'r').read() == 'foo'
def test_handle_uploaded_files_wrong_formfield_name(self):
# only files with form-name 'file' are considered
app = DropAFileApplication()
builder = EnvironBuilder(
method='POST',
data={'not_file': (BytesIO(b'foo'), 'test.txt')}
)
req = Request(builder.get_environ())
app.handle_uploaded_files(req)
assert os.listdir(app.upload_dir) == []
def test_handle_uploaded_files_multiple_at_once(self):
# we only take one file, even if multiple are offered
app = DropAFileApplication()
builder = EnvironBuilder(
method='POST',
data={'file': (BytesIO(b'foo'), 'test.txt'),
'file2': (BytesIO(b'bar'), 'test2.txt')}
)
req = Request(builder.get_environ())
app.handle_uploaded_files(req)
assert os.listdir(app.upload_dir) == ['test.txt']
def test_handle_uploaded_files_output(self, capsys):
# sent files are listed on commandline
app = DropAFileApplication()
builder = EnvironBuilder(
method='POST',
data={'file': (BytesIO(b'foo'), 'test.txt')}
)
req = Request(builder.get_environ())
app.handle_uploaded_files(req)
out, err = capsys.readouterr()
assert 'RECEIVED:' in out
assert 'test.txt' in out
def test_handle_uploaded_files_no_files(self, capsys):
# we notice if no files was sent (and do nothing)
app = DropAFileApplication()
req = Request(create_environ())
app.handle_uploaded_files(req)
out, err = capsys.readouterr()
assert os.listdir(app.upload_dir) == []
assert 'RECEIVED' not in out
def test_handle_uploaded_files_not_overwritten(self):
# we do not overwrite uploaded files
app = DropAFileApplication()
builder = EnvironBuilder(
method='POST',
data={'file': (BytesIO(b'uploaded'), 'test.txt')}
)
req = Request(builder.get_environ())
upload_path = os.path.join(app.upload_dir, 'test.txt')
with open(upload_path, 'w') as fd:
fd.write('original')
app.handle_uploaded_files(req)
assert sorted(
os.listdir(app.upload_dir)) == ['test.txt', 'test.txt-1']
with open(os.path.join(app.upload_dir, 'test.txt'), 'r') as fd:
content1 = fd.read()
with open(os.path.join(app.upload_dir, 'test.txt-1'), 'r') as fd:
content2 = fd.read()
assert content1 == 'original'
assert content2 == 'uploaded'
class TestArgParser(object):
def test_help(self, capsys):
# we support --help
with pytest.raises(SystemExit) as exc_info:
handle_options(['foo', '--help'])
out, err = capsys.readouterr()
assert exc_info.value.code == 0
def test_help_lists_all_options(self, capsys):
# all options are listed in --help
with pytest.raises(SystemExit):
handle_options(['foo', '--help'])
out, err = capsys.readouterr()
assert '--host' in out
assert '--port' in out
assert '--secret' in out
def test_defaults(self):
# we can get options with defaults set
result = handle_options([])
assert result is not None
assert result.host == 'localhost'
assert result.port == 8443
assert result.secret is None
def test_host(self):
result = handle_options(['--host', 'foo'])
assert result.host == 'foo'
def test_port(self):
result = handle_options(['--port', '1234'])
assert result.port == 1234
def test_secret(self):
result = handle_options(['--secret', 'sosecret'])
assert result.secret == 'sosecret'
class Test_run_server(object):
def test_no_options(self, proc_runner):
# we can start a server (no options given)
proc_runner.argv = ['dropafile', ]
out, err = proc_runner.run(run_server, args=None)
assert 'Certificate in:' in out
assert 'Running' in err
def test_help(self, proc_runner):
# we can get help from run_server()
out, err = proc_runner.run(run_server, args=["dropafile", "--help"])
assert 'show this help message and exit' in out
assert proc_runner.exitcode == 0
def test_secret(self, proc_runner):
# a passed-in password is respected
out, err = proc_runner.run(
run_server, args=["dropafile", "-s", "sosecret"])
assert 'Password is: sosecret' in out
def test_host_and_port(self, proc_runner):
# we can set a custom host and port we want to bind to
out, err = proc_runner.run(
run_server, args=["dropafile",
"--host", "0.0.0.0",
"--port", "12345"]
)
assert "https://0.0.0.0:12345/" in err
class TestFunctional(object):
# Functional browser tests
def test_page_response(self):
# we can get some HTML page for any path
application = DropAFileApplication()
client = Client(application, BaseResponse)
headers = get_basic_auth_headers(
username='somename', password=application.password)
resp = client.get('/', headers=headers)
assert resp.status == '200 OK'
mimetype = resp.headers.get('Content-Type')
assert mimetype == 'text/html; charset=utf-8'
def test_get_js(self):
# we can get the dropzonejs JavaScript
application = DropAFileApplication()
client = Client(application, BaseResponse)
headers = get_basic_auth_headers(
username='somename', password=application.password)
resp = client.get('dropzone.js', headers=headers)
assert resp.status == '200 OK'
mimetype = resp.headers.get('Content-Type')
assert mimetype == 'text/javascript; charset=utf-8'
def test_get_css(self):
# we can get the dropzonejs CSS
application = DropAFileApplication()
client = Client(application, BaseResponse)
headers = get_basic_auth_headers(
username='somename', password=application.password)
resp = client.get('dropzone.css', headers=headers)
assert resp.status == '200 OK'
mimetype = resp.headers.get('Content-Type')
assert mimetype == 'text/css; charset=utf-8'
def test_send_file(self):
# we can send files
application = DropAFileApplication()
client = Client(application, BaseResponse)
headers = get_basic_auth_headers(
username='somename', password=application.password)
resp = client.post(
'/index.html',
headers=headers,
data={
'file': (BytesIO(b'Some Content'), 'sample.txt'),
},
)
assert resp.status == '200 OK'
uploaded_path = os.path.join(application.upload_dir, 'sample.txt')
assert os.path.isfile(uploaded_path)
def test_unauthorized_by_default(self):
# By default we get an Unauthorized message
app = DropAFileApplication()
client = Client(app, BaseResponse)
resp = client.get('/')
assert resp.status == '401 UNAUTHORIZED'
def test_basic_auth_req_by_default(self):
# By default we require basic auth from client
app = DropAFileApplication()
client = Client(app, BaseResponse)
resp = client.get('/')
header = resp.headers.get('WWW-Authenticate', None)
assert header is not None
def test_page_set_password(self):
# we can get some HTML page for any path
application = DropAFileApplication(password="sosecret")
client = Client(application, BaseResponse)
headers = get_basic_auth_headers(
username='somename', password="sosecret")
resp = client.get('/', headers=headers)
assert resp.status == '200 OK'
| gpl-3.0 | -2,058,787,176,074,779,000 | 35.579897 | 76 | 0.60551 | false |
danylaksono/inasafe | safe/messaging/item/row.py | 1 | 2919 | """
InaSAFE Disaster risk assessment tool developed by AusAid - **Row**
Contact : [email protected]
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '[email protected]'
__revision__ = '$Format:%H$'
__date__ = '04/06/2013'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
from message_element import MessageElement
from exceptions import InvalidMessageItemError
from cell import Cell
#FIXME (MB) remove when all to_* methods are implemented
#pylint: disable=W0223
class Row(MessageElement):
"""A class to model table rows in the messaging system """
def __init__(self, *args, **kwargs):
"""Creates a row object
Args:
args can be list or Cell
Returns:
None
Raises:
Errors are propagated
We pass the kwargs on to the base class so an exception is raised
if invalid keywords were passed. See:
http://stackoverflow.com/questions/13124961/
how-to-pass-arguments-efficiently-kwargs-in-python
"""
super(Row, self).__init__(**kwargs)
self.cells = []
for arg in args:
self.add(arg)
def add(self, item):
"""add a Cell to the row
list can be passed and are automatically converted to Cell
Args:
item an element to add to the Cells can be list or Cell object
Returns:
None
Raises:
Errors are propagated
"""
if isinstance(item, basestring) or self._is_qstring(item):
self.cells.append(Cell(item))
elif isinstance(item, Cell):
self.cells.append(item)
elif isinstance(item, list):
for i in item:
self.cells.append(Cell(i))
else:
raise InvalidMessageItemError(item, item.__class__)
def to_html(self):
"""Render a Text MessageElement as html
Args:
None
Returns:
Str the html representation of the Text MessageElement
Raises:
Errors are propagated
"""
row = '<tr%s>\n' % self.html_attributes()
for cell in self.cells:
row += cell.to_html()
row += '</tr>\n'
return row
def to_text(self):
"""Render a Text MessageElement as plain text
Args:
None
Returns:
Str the plain text representation of the Text MessageElement
Raises:
Errors are propagated
"""
row = '---\n'
for cell in self.cells:
row += cell
row += '---'
return row
| gpl-3.0 | 1,648,468,602,593,973,200 | 24.605263 | 78 | 0.578623 | false |
robin900/sqlalchemy | test/orm/test_lockmode.py | 2 | 8088 | from sqlalchemy.engine import default
from sqlalchemy.databases import *
from sqlalchemy.orm import mapper
from sqlalchemy.orm import Session
from sqlalchemy.testing import AssertsCompiledSQL, eq_
from sqlalchemy.testing import assert_raises_message
from sqlalchemy import exc
from test.orm import _fixtures
class LegacyLockModeTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
def setup_mappers(cls):
User, users = cls.classes.User, cls.tables.users
mapper(User, users)
def _assert_legacy(self, arg, read=False, nowait=False):
User = self.classes.User
s = Session()
q = s.query(User).with_lockmode(arg)
sel = q._compile_context().statement
if arg is None:
assert q._for_update_arg is None
assert sel._for_update_arg is None
return
assert q._for_update_arg.read is read
assert q._for_update_arg.nowait is nowait
assert sel._for_update_arg.read is read
assert sel._for_update_arg.nowait is nowait
def test_false_legacy(self):
self._assert_legacy(None)
def test_plain_legacy(self):
self._assert_legacy("update")
def test_nowait_legacy(self):
self._assert_legacy("update_nowait", nowait=True)
def test_read_legacy(self):
self._assert_legacy("read", read=True)
def test_unknown_legacy_lock_mode(self):
User = self.classes.User
sess = Session()
assert_raises_message(
exc.ArgumentError, "Unknown with_lockmode argument: 'unknown_mode'",
sess.query(User.id).with_lockmode, 'unknown_mode'
)
class ForUpdateTest(_fixtures.FixtureTest):
@classmethod
def setup_mappers(cls):
User, users = cls.classes.User, cls.tables.users
mapper(User, users)
def _assert(self, read=False, nowait=False, of=None, key_share=None,
assert_q_of=None, assert_sel_of=None):
User = self.classes.User
s = Session()
q = s.query(User).with_for_update(read=read, nowait=nowait, of=of, key_share=key_share)
sel = q._compile_context().statement
assert q._for_update_arg.read is read
assert sel._for_update_arg.read is read
assert q._for_update_arg.nowait is nowait
assert sel._for_update_arg.nowait is nowait
assert q._for_update_arg.key_share is key_share
assert sel._for_update_arg.key_share is key_share
eq_(q._for_update_arg.of, assert_q_of)
eq_(sel._for_update_arg.of, assert_sel_of)
def test_key_share(self):
self._assert(key_share=True)
def test_read(self):
self._assert(read=True)
def test_plain(self):
self._assert()
def test_nowait(self):
self._assert(nowait=True)
def test_of_single_col(self):
User, users = self.classes.User, self.tables.users
self._assert(
of=User.id,
assert_q_of=[users.c.id],
assert_sel_of=[users.c.id]
)
class CompileTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""run some compile tests, even though these are redundant."""
run_inserts = None
@classmethod
def setup_mappers(cls):
User, users = cls.classes.User, cls.tables.users
Address, addresses = cls.classes.Address, cls.tables.addresses
mapper(User, users)
mapper(Address, addresses)
def test_default_update(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(),
"SELECT users.id AS users_id FROM users FOR UPDATE",
dialect=default.DefaultDialect()
)
def test_not_supported_by_dialect_should_just_use_update(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(read=True),
"SELECT users.id AS users_id FROM users FOR UPDATE",
dialect=default.DefaultDialect()
)
def test_postgres_read(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(read=True),
"SELECT users.id AS users_id FROM users FOR SHARE",
dialect="postgresql"
)
def test_postgres_read_nowait(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).
with_for_update(read=True, nowait=True),
"SELECT users.id AS users_id FROM users FOR SHARE NOWAIT",
dialect="postgresql"
)
def test_postgres_update(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(),
"SELECT users.id AS users_id FROM users FOR UPDATE",
dialect="postgresql"
)
def test_postgres_update_of(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(of=User.id),
"SELECT users.id AS users_id FROM users FOR UPDATE OF users",
dialect="postgresql"
)
def test_postgres_update_of_entity(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(of=User),
"SELECT users.id AS users_id FROM users FOR UPDATE OF users",
dialect="postgresql"
)
def test_postgres_update_of_entity_list(self):
User = self.classes.User
Address = self.classes.Address
sess = Session()
self.assert_compile(sess.query(User.id, Address.id).
with_for_update(of=[User, Address]),
"SELECT users.id AS users_id, addresses.id AS addresses_id "
"FROM users, addresses FOR UPDATE OF users, addresses",
dialect="postgresql"
)
def test_postgres_for_no_key_update(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(key_share=True),
"SELECT users.id AS users_id FROM users FOR NO KEY UPDATE",
dialect="postgresql"
)
def test_postgres_for_no_key_nowait_update(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(key_share=True, nowait=True),
"SELECT users.id AS users_id FROM users FOR NO KEY UPDATE NOWAIT",
dialect="postgresql"
)
def test_postgres_update_of_list(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).
with_for_update(of=[User.id, User.id, User.id]),
"SELECT users.id AS users_id FROM users FOR UPDATE OF users",
dialect="postgresql"
)
def test_postgres_update_skip_locked(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).
with_for_update(skip_locked=True),
"SELECT users.id AS users_id FROM users FOR UPDATE SKIP LOCKED",
dialect="postgresql"
)
def test_oracle_update(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(),
"SELECT users.id AS users_id FROM users FOR UPDATE",
dialect="oracle"
)
def test_oracle_update_skip_locked(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id)
.with_for_update(skip_locked=True),
"SELECT users.id AS users_id FROM users FOR UPDATE SKIP LOCKED",
dialect="oracle"
)
def test_mysql_read(self):
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(read=True),
"SELECT users.id AS users_id FROM users LOCK IN SHARE MODE",
dialect="mysql"
)
| mit | -4,054,647,636,213,751,300 | 32.7 | 95 | 0.61004 | false |
rahulunair/nova | nova/tests/unit/api/openstack/compute/test_serversV21.py | 1 | 378354 | # Copyright 2010-2011 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import datetime
import ddt
import functools
import fixtures
import iso8601
import mock
from oslo_policy import policy as oslo_policy
from oslo_serialization import base64
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
from oslo_utils import fixture as utils_fixture
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from six.moves import range
import six.moves.urllib.parse as urlparse
import testtools
import webob
from nova.api.openstack import api_version_request
from nova.api.openstack import common
from nova.api.openstack import compute
from nova.api.openstack.compute import ips
from nova.api.openstack.compute.schemas import servers as servers_schema
from nova.api.openstack.compute import servers
from nova.api.openstack.compute import views
from nova.api.openstack import wsgi as os_wsgi
from nova import availability_zones
from nova import block_device
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import task_states
from nova.compute import vm_states
import nova.conf
from nova import context
from nova.db import api as db
from nova.db.sqlalchemy import api as db_api
from nova.db.sqlalchemy import models
from nova import exception
from nova.image import glance
from nova import objects
from nova.objects import instance as instance_obj
from nova.objects.instance_group import InstanceGroup
from nova.objects import tag
from nova.policies import servers as server_policies
from nova import policy
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_flavor
from nova.tests.unit import fake_instance
from nova.tests.unit.image import fake
from nova.tests.unit import matchers
from nova import utils as nova_utils
CONF = nova.conf.CONF
FAKE_UUID = fakes.FAKE_UUID
UUID1 = '00000000-0000-0000-0000-000000000001'
UUID2 = '00000000-0000-0000-0000-000000000002'
INSTANCE_IDS = {FAKE_UUID: 1}
FIELDS = instance_obj.INSTANCE_DEFAULT_FIELDS
GET_ONLY_FIELDS = ['OS-EXT-AZ:availability_zone', 'config_drive',
'OS-EXT-SRV-ATTR:host',
'OS-EXT-SRV-ATTR:hypervisor_hostname',
'OS-EXT-SRV-ATTR:instance_name',
'OS-EXT-SRV-ATTR:hostname',
'OS-EXT-SRV-ATTR:kernel_id',
'OS-EXT-SRV-ATTR:launch_index',
'OS-EXT-SRV-ATTR:ramdisk_id',
'OS-EXT-SRV-ATTR:reservation_id',
'OS-EXT-SRV-ATTR:root_device_name',
'OS-EXT-SRV-ATTR:user_data', 'host_status',
'key_name', 'OS-SRV-USG:launched_at',
'OS-SRV-USG:terminated_at',
'OS-EXT-STS:task_state', 'OS-EXT-STS:vm_state',
'OS-EXT-STS:power_state', 'security_groups',
'os-extended-volumes:volumes_attached']
def instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=None,
):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return (inst, inst)
def instance_update(context, instance_uuid, values):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return inst
def fake_compute_api(cls, req, id):
return True
def fake_start_stop_not_ready(self, context, instance):
raise exception.InstanceNotReady(instance_id=instance["uuid"])
def fake_start_stop_invalid_state(self, context, instance):
raise exception.InstanceInvalidState(
instance_uuid=instance['uuid'], attr='fake_attr',
method='fake_method', state='fake_state')
def fake_instance_get_by_uuid_not_found(context, uuid,
columns_to_join, use_slave=False):
raise exception.InstanceNotFound(instance_id=uuid)
def fake_instance_get_all_with_locked(context, list_locked, **kwargs):
obj_list = []
s_id = 0
for locked in list_locked:
uuid = fakes.get_fake_uuid(locked)
s_id = s_id + 1
kwargs['locked_by'] = None if locked == 'not_locked' else locked
server = fakes.stub_instance_obj(context, id=s_id, uuid=uuid, **kwargs)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
def fake_instance_get_all_with_description(context, list_desc, **kwargs):
obj_list = []
s_id = 0
for desc in list_desc:
uuid = fakes.get_fake_uuid(desc)
s_id = s_id + 1
kwargs['display_description'] = desc
server = fakes.stub_instance_obj(context, id=s_id, uuid=uuid, **kwargs)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
def fake_compute_get_empty_az(*args, **kwargs):
inst = fakes.stub_instance(vm_state=vm_states.ACTIVE,
availability_zone='')
return fake_instance.fake_instance_obj(args[1], **inst)
def fake_bdms_get_all_by_instance_uuids(*args, **kwargs):
return [
fake_block_device.FakeDbBlockDeviceDict({
'id': 1,
'volume_id': 'some_volume_1',
'instance_uuid': FAKE_UUID,
'source_type': 'volume',
'destination_type': 'volume',
'delete_on_termination': True,
}),
fake_block_device.FakeDbBlockDeviceDict({
'id': 2,
'volume_id': 'some_volume_2',
'instance_uuid': FAKE_UUID,
'source_type': 'volume',
'destination_type': 'volume',
'delete_on_termination': False,
}),
]
def fake_get_inst_mappings_by_instance_uuids_from_db(*args, **kwargs):
return [{
'id': 1,
'instance_uuid': UUID1,
'cell_mapping': {
'id': 1, 'uuid': uuids.cell1, 'name': 'fake',
'transport_url': 'fake://nowhere/', 'updated_at': None,
'database_connection': uuids.cell1, 'created_at': None,
'disabled': False},
'project_id': 'fake-project'
}]
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance_id, password):
self.instance_id = instance_id
self.password = password
class ControllerTest(test.TestCase):
project_id = fakes.FAKE_PROJECT_ID
path = '/%s/servers' % project_id
path_v2 = '/v2' + path
path_with_id = path + '/%s'
path_with_id_v2 = path_v2 + '/%s'
path_with_query = path + '?%s'
path_detail = path + '/detail'
path_detail_v2 = path_v2 + '/detail'
path_detail_with_query = path_detail + '?%s'
path_action = path + '/%s/action'
def setUp(self):
super(ControllerTest, self).setUp()
fakes.stub_out_nw_api(self)
fakes.stub_out_key_pair_funcs(self)
fake.stub_out_image_service(self)
fakes.stub_out_secgroup_api(
self, security_groups=[{'name': 'default'}])
return_server = fakes.fake_compute_get(id=2, availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
return_servers = fakes.fake_compute_get_all()
# Server sort keys extension is enabled in v21 so sort data is passed
# to the instance API and the sorted DB API is invoked
self.mock_get_all = self.useFixture(fixtures.MockPatchObject(
compute_api.API, 'get_all', side_effect=return_servers)).mock
self.mock_get = self.useFixture(fixtures.MockPatchObject(
compute_api.API, 'get', side_effect=return_server)).mock
self.stub_out('nova.db.api.instance_update_and_get_original',
instance_update_and_get_original)
self.stub_out('nova.db.api.'
'block_device_mapping_get_all_by_instance_uuids',
fake_bdms_get_all_by_instance_uuids)
self.stub_out('nova.objects.InstanceMappingList.'
'_get_by_instance_uuids_from_db',
fake_get_inst_mappings_by_instance_uuids_from_db)
self.flags(group='glance', api_servers=['http://localhost:9292'])
self.controller = servers.ServersController()
self.ips_controller = ips.IPsController()
policy.reset()
policy.init()
self.addCleanup(policy.reset)
# Assume that anything that hits the compute API and looks for a
# RequestSpec doesn't care about it, since testing logic that deep
# should be done in nova.tests.unit.compute.test_compute_api.
mock_reqspec = mock.patch('nova.objects.RequestSpec')
mock_reqspec.start()
self.addCleanup(mock_reqspec.stop)
# Similarly we shouldn't care about anything hitting conductor from
# these tests.
mock_conductor = mock.patch.object(
self.controller.compute_api, 'compute_task_api')
mock_conductor.start()
self.addCleanup(mock_conductor.stop)
class ServersControllerTest(ControllerTest):
wsgi_api_version = os_wsgi.DEFAULT_API_VERSION
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
@mock.patch('nova.objects.Instance.get_by_uuid')
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid')
def test_instance_lookup_targets(self, mock_get_im, mock_get_inst):
ctxt = context.RequestContext('fake', self.project_id)
mock_get_im.return_value.cell_mapping.database_connection = uuids.cell1
self.controller._get_instance(ctxt, 'foo')
mock_get_im.assert_called_once_with(ctxt, 'foo')
self.assertIsNotNone(ctxt.db_connection)
def test_requested_networks_prefix(self):
"""Tests that we no longer support the legacy br-<uuid> format for
a network id.
"""
uuid = 'br-00000000-0000-0000-0000-000000000000'
requested_networks = [{'uuid': uuid}]
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
self.assertIn('Bad networks format: network uuid is not in proper '
'format', six.text_type(ex))
def test_requested_networks_enabled_with_port(self):
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_enabled_with_network(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None)], res.as_tuples())
def test_requested_networks_enabled_with_network_and_port(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_with_and_duplicate_networks(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}, {'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None),
(network, None, None, None)], res.as_tuples())
def test_requested_networks_enabled_conflict_on_fixed_ip(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
addr = '10.0.0.1'
requested_networks = [{'uuid': network,
'fixed_ip': addr,
'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_api_enabled_with_v2_subclass(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_get_server_by_uuid(self):
req = self.req(self.path_with_id % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
def test_get_server_joins(self):
def fake_get(*args, **kwargs):
expected_attrs = kwargs['expected_attrs']
self.assertEqual(['flavor', 'info_cache', 'metadata',
'numa_topology'], expected_attrs)
ctxt = context.RequestContext('fake', self.project_id)
return fake_instance.fake_instance_obj(
ctxt, expected_attrs=expected_attrs)
self.mock_get.side_effect = fake_get
req = self.req(self.path_with_id % FAKE_UUID)
self.controller.show(req, FAKE_UUID)
def test_unique_host_id(self):
"""Create two servers with the same host and different
project_ids and check that the host_id's are unique.
"""
def return_instance_with_host(context, *args, **kwargs):
project_id = uuidutils.generate_uuid()
return fakes.stub_instance_obj(context, id=1, uuid=FAKE_UUID,
project_id=project_id,
host='fake_host')
req = self.req(self.path_with_id % FAKE_UUID)
self.mock_get.side_effect = return_instance_with_host
server1 = self.controller.show(req, FAKE_UUID)
server2 = self.controller.show(req, FAKE_UUID)
self.assertNotEqual(server1['server']['hostId'],
server2['server']['hostId'])
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
return {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": progress,
"name": "server2",
"status": status,
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "2",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {
"seq": "2",
},
"links": [
{
"rel": "self",
"href": "http://localhost%s/%s" % (self.path_v2, uuid),
},
{
"rel": "bookmark",
"href": "http://localhost%s/%s" % (self.path, uuid),
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000002",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
def test_get_server_by_id(self):
image_bookmark = "http://localhost/%s/images/10" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
uuid = FAKE_UUID
req = self.req(self.path_with_id_v2, uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_empty_az(self):
self.mock_get.side_effect = fakes.fake_compute_get(
availability_zone='')
uuid = FAKE_UUID
req = self.req(self.path_with_id_v2 % uuid)
res_dict = self.controller.show(req, uuid)
self.assertEqual(res_dict['server']['OS-EXT-AZ:availability_zone'], '')
def test_get_server_with_active_status_by_id(self):
image_bookmark = "http://localhost/%s/images/10" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
req = self.req(self.path_with_id % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
self.mock_get.assert_called_once_with(
req.environ['nova.context'], FAKE_UUID,
expected_attrs=['flavor', 'info_cache', 'metadata',
'numa_topology'], cell_down_support=False)
def test_get_server_with_id_image_ref_by_id(self):
image_bookmark = "http://localhost/%s/images/10" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
req = self.req(self.path_with_id % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
self.mock_get.assert_called_once_with(
req.environ['nova.context'], FAKE_UUID,
expected_attrs=['flavor', 'info_cache', 'metadata',
'numa_topology'], cell_down_support=False)
def _generate_nw_cache_info(self):
pub0 = ('172.19.0.1', '172.19.0.2',)
pub1 = ('1.2.3.4',)
pub2 = ('b33f::fdee:ddff:fecc:bbaa',)
priv0 = ('192.168.0.3', '192.168.0.4',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'public',
'subnets': [{'cidr': '172.19.0.0/24',
'ips': [_ip(ip) for ip in pub0]},
{'cidr': '1.2.3.0/16',
'ips': [_ip(ip) for ip in pub1]},
{'cidr': 'b33f::/64',
'ips': [_ip(ip) for ip in pub2]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br1',
'id': 2,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': [_ip(ip) for ip in priv0]}]}}]
return nw_cache
def test_get_server_addresses_from_cache(self):
nw_cache = self._generate_nw_cache_info()
self.mock_get.side_effect = fakes.fake_compute_get(nw_cache=nw_cache,
availability_zone='nova')
req = self.req((self.path_with_id % FAKE_UUID) + '/ips')
res_dict = self.ips_controller.index(req, FAKE_UUID)
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3'},
{'version': 4, 'addr': '192.168.0.4'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1'},
{'version': 4, 'addr': '172.19.0.2'},
{'version': 4, 'addr': '1.2.3.4'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa'},
],
},
}
self.assertThat(res_dict, matchers.DictMatches(expected))
self.mock_get.assert_called_once_with(
req.environ['nova.context'], FAKE_UUID,
expected_attrs=None, cell_down_support=False)
# Make sure we kept the addresses in order
self.assertIsInstance(res_dict['addresses'], collections.OrderedDict)
labels = [vif['network']['label'] for vif in nw_cache]
for index, label in enumerate(res_dict['addresses'].keys()):
self.assertEqual(label, labels[index])
def test_get_server_addresses_nonexistent_network(self):
url = ((self.path_with_id_v2 % FAKE_UUID) + '/ips/network_0')
req = self.req(url)
self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.show,
req, FAKE_UUID, 'network_0')
def test_get_server_addresses_nonexistent_server(self):
self.mock_get.side_effect = exception.InstanceNotFound(
instance_id='fake')
req = self.req((self.path_with_id % uuids.fake) + '/ips')
self.assertRaises(webob.exc.HTTPNotFound,
self.ips_controller.index, req, uuids.fake)
self.mock_get.assert_called_once_with(
req.environ['nova.context'], uuids.fake, expected_attrs=None,
cell_down_support=False)
def test_show_server_hide_addresses_in_building(self):
uuid = FAKE_UUID
self.mock_get.side_effect = fakes.fake_compute_get(
uuid=uuid, vm_state=vm_states.BUILDING)
req = self.req(self.path_with_id_v2 % uuid)
res_dict = self.controller.show(req, uuid)
self.assertEqual({}, res_dict['server']['addresses'])
def test_show_server_addresses_in_non_building(self):
uuid = FAKE_UUID
nw_cache = self._generate_nw_cache_info()
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
{'version': 4, 'addr': '192.168.0.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '172.19.0.2',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '1.2.3.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
],
},
}
self.mock_get.side_effect = fakes.fake_compute_get(
nw_cache=nw_cache, uuid=uuid, vm_state=vm_states.ACTIVE)
req = self.req(self.path_with_id_v2 % uuid)
res_dict = self.controller.show(req, uuid)
self.assertThat(res_dict['server']['addresses'],
matchers.DictMatches(expected['addresses']))
def test_detail_server_hide_addresses(self):
nw_cache = self._generate_nw_cache_info()
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
{'version': 4, 'addr': '192.168.0.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '172.19.0.2',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '1.2.3.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
],
},
}
def fake_get_all(context, **kwargs):
return objects.InstanceList(
objects=[fakes.stub_instance_obj(1,
vm_state=vm_states.BUILDING,
uuid=uuids.fake,
nw_cache=nw_cache),
fakes.stub_instance_obj(2,
vm_state=vm_states.ACTIVE,
uuid=uuids.fake2,
nw_cache=nw_cache)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'deleted=true',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
for server in servers:
if server['OS-EXT-STS:vm_state'] == 'building':
self.assertEqual({}, server['addresses'])
else:
self.assertThat(server['addresses'],
matchers.DictMatches(expected['addresses']))
def test_get_server_list_empty(self):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
req = self.req(self.path)
res_dict = self.controller.index(req)
self.assertEqual(0, len(res_dict['servers']))
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[], limit=1000,
marker=None, search_opts={'deleted': False,
'project_id': self.project_id},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_server_list_with_reservation_id(self):
req = self.req(self.path_with_query % 'reservation_id=foo')
res_dict = self.controller.index(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_empty(self):
req = self.req(self.path_detail_with_query % 'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_details(self):
req = self.req(self.path_detail_with_query % 'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list(self):
req = self.req(self.path)
res_dict = self.controller.index(req)
self.assertEqual(len(res_dict['servers']), 5)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertIsNone(s.get('image', None))
expected_links = [
{
"rel": "self",
"href": "http://localhost" + (
self.path_with_id_v2 % s['id']),
},
{
"rel": "bookmark",
"href": "http://localhost" + (
self.path_with_id % s['id']),
},
]
self.assertEqual(s['links'], expected_links)
def test_get_servers_with_limit(self):
req = self.req(self.path_with_query % 'limit=3')
res_dict = self.controller.index(req)
servers = res_dict['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res_dict['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2' + self.path,
href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected_params = {'limit': ['3'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected_params))
def test_get_servers_with_limit_bad_value(self):
req = self.req(self.path_with_query % 'limit=aaa')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_server_details_empty(self):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
req = self.req(self.path_detail)
expected_attrs = ['flavor', 'info_cache', 'metadata']
if api_version_request.is_supported(req, '2.16'):
expected_attrs.append('services')
res_dict = self.controller.detail(req)
self.assertEqual(0, len(res_dict['servers']))
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'],
expected_attrs=sorted(expected_attrs),
limit=1000, marker=None,
search_opts={'deleted': False, 'project_id': self.project_id},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_server_details_with_bad_name(self):
req = self.req(self.path_detail_with_query % 'name=%2Binstance')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_server_details_with_limit(self):
req = self.req(self.path_detail_with_query % 'limit=3')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual(self.path_detail_v2, href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_server_details_with_limit_bad_value(self):
req = self.req(self.path_detail_with_query % 'limit=aaa')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_get_server_details_with_limit_and_other_params(self):
req = self.req(self.path_detail_with_query %
'limit=3&blah=2:t&sort_key=uuid&sort_dir=asc')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual(self.path_detail_v2, href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'],
'sort_key': ['uuid'], 'sort_dir': ['asc'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_servers_with_too_big_limit(self):
req = self.req(self.path_with_query % 'limit=30')
res_dict = self.controller.index(req)
self.assertNotIn('servers_links', res_dict)
def test_get_servers_with_bad_limit(self):
req = self.req(self.path_with_query % 'limit=asdf')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_with_marker(self):
url = '%s?marker=%s' % (self.path_v2, fakes.get_fake_uuid(2))
req = self.req(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ["server4", "server5"])
def test_get_servers_with_limit_and_marker(self):
url = '%s?limit=2&marker=%s' % (self.path_v2,
fakes.get_fake_uuid(1))
req = self.req(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ['server3', 'server4'])
def test_get_servers_with_bad_marker(self):
req = self.req(self.path_with_query % 'limit=2&marker=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_invalid_filter_param(self):
req = self.req(self.path_with_query % 'info_cache=asdf',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
req = self.req(self.path_with_query % '__foo__=asdf',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_invalid_regex_filter_param(self):
req = self.req(self.path_with_query % 'flavor=[[[',
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_with_empty_regex_filter_param(self):
req = self.req(self.path_with_query % 'flavor=',
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_detail_with_empty_regex_filter_param(self):
req = self.req(self.path_detail_with_query % 'flavor=',
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_get_servers_invalid_sort_key(self):
# "hidden" is a real field for instances but not exposed in the API.
req = self.req(self.path_with_query %
'sort_key=hidden&sort_dir=desc')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_ignore_sort_key(self):
req = self.req(self.path_with_query %
'sort_key=vcpus&sort_dir=asc')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=[], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_locked_sort_key(self):
# Prior to microversion 2.73 locked sort key is ignored.
req = self.req(self.path_with_query %
'sort_key=locked&sort_dir=asc')
self.controller.detail(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=[], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_sort_key_only_one_dir(self):
req = self.req(self.path_with_query %
'sort_key=user_id&sort_key=vcpus&sort_dir=asc')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=['user_id'],
sort_dirs=['asc'], cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_sort_key_with_no_sort_dir(self):
req = self.req(self.path_with_query %
'sort_key=vcpus&sort_key=user_id')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=['user_id'], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_sort_key_with_bad_sort_dir(self):
req = self.req(self.path_with_query %
'sort_key=vcpus&sort_dir=bad_dir')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=[], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_non_admin_with_admin_only_sort_key(self):
req = self.req(self.path_with_query %
'sort_key=host&sort_dir=desc')
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.index, req)
def test_get_servers_admin_with_admin_only_sort_key(self):
req = self.req(self.path_with_query %
'sort_key=node&sort_dir=desc',
use_admin_context=True)
self.controller.detail(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=['node'], sort_dirs=['desc'],
cell_down_support=False, all_tenants=False)
def test_get_servers_with_bad_option(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(100, uuid=uuids.fake)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'unknownoption=whee')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[],
limit=1000, marker=None,
search_opts={'deleted': False, 'project_id': self.project_id},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_servers_with_locked_filter(self):
# Prior to microversion 2.73 locked filter parameter is ignored.
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(100, uuid=uuids.fake)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'locked=true')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[],
limit=1000, marker=None,
search_opts={'deleted': False, 'project_id': self.project_id},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_servers_allows_image(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('image', search_opts)
self.assertEqual(search_opts['image'], '12345')
db_list = [fakes.stub_instance(100, uuid=uuids.fake)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'image=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_tenant_id_filter_no_admin_context(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('tenant_id', search_opts)
self.assertEqual(self.project_id, search_opts['project_id'])
return [fakes.stub_instance_obj(100)]
req = self.req(self.path_with_query % 'tenant_id=newfake')
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_tenant_id_filter_admin_context(self):
""""Test tenant_id search opt is dropped if all_tenants is not set."""
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('tenant_id', search_opts)
self.assertEqual(self.project_id, search_opts['project_id'])
return [fakes.stub_instance_obj(100)]
req = self.req(self.path_with_query % 'tenant_id=newfake',
use_admin_context=True)
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_param_normal(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('project_id', search_opts)
return [fakes.stub_instance_obj(100)]
req = self.req(self.path_with_query % 'all_tenants',
use_admin_context=True)
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_param_one(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('project_id', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'all_tenants=1',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_param_zero(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'all_tenants=0',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_param_false(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'all_tenants=false',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_param_invalid(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'all_tenants=xxx',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_admin_restricted_tenant(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertEqual(search_opts['project_id'], self.project_id)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path, use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_pass_policy(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('project_id', search_opts)
self.assertTrue(context.is_admin)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
rules = {
"os_compute_api:servers:index": "project_id:%s" % self.project_id,
"os_compute_api:servers:index:get_all_tenants":
"project_id:%s" % self.project_id
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
req = self.req(self.path_with_query % 'all_tenants=1')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_fail_policy(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
return [fakes.stub_instance_obj(100)]
rules = {
"os_compute_api:servers:index:get_all_tenants":
"project_id:non_fake",
"os_compute_api:servers:get_all":
"project_id:%s" % self.project_id,
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'all_tenants=1')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_get_servers_allows_flavor(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('flavor', search_opts)
# flavor is an integer ID
self.assertEqual(search_opts['flavor'], '12345')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'flavor=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_with_bad_flavor(self):
req = self.req(self.path_with_query % 'flavor=abcde')
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_server_details_with_bad_flavor(self):
req = self.req(self.path_with_query % 'flavor=abcde')
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
servers = self.controller.detail(req)['servers']
self.assertThat(servers, testtools.matchers.HasLength(0))
def test_get_servers_allows_status(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], [vm_states.ACTIVE])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'status=active')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_task_status(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('task_state', search_opts)
self.assertEqual([task_states.REBOOT_PENDING,
task_states.REBOOT_STARTED,
task_states.REBOOTING],
search_opts['task_state'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(
100, uuid=uuids.fake, task_state=task_states.REBOOTING)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'status=reboot')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_resize_status(self):
# Test when resize status, it maps list of vm states.
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'],
[vm_states.ACTIVE, vm_states.STOPPED])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'status=resize')
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers), 1)
self.assertEqual(servers[0]['id'], uuids.fake)
def test_get_servers_invalid_status(self):
# Test getting servers by invalid status.
req = self.req(self.path_with_query % 'status=baloney',
use_admin_context=False)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_servers_deleted_status_as_user(self):
req = self.req(self.path_with_query % 'status=deleted',
use_admin_context=False)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.detail, req)
def test_get_servers_deleted_status_as_admin(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], ['deleted'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'status=deleted',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_deleted_filter_str_to_bool(self):
db_list = objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake,
vm_state='deleted')])
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = db_list
req = self.req(self.path_with_query % 'deleted=true',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
# Assert that 'deleted' filter value is converted to boolean
# while calling get_all() method.
expected_search_opts = {'deleted': True, 'project_id': self.project_id}
self.assertEqual(expected_search_opts,
self.mock_get_all.call_args[1]['search_opts'])
def test_get_servers_deleted_filter_invalid_str(self):
db_list = objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = db_list
req = fakes.HTTPRequest.blank(self.path_with_query % 'deleted=abc',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
# Assert that invalid 'deleted' filter value is converted to boolean
# False while calling get_all() method.
expected_search_opts = {'deleted': False,
'project_id': self.project_id}
self.assertEqual(expected_search_opts,
self.mock_get_all.call_args[1]['search_opts'])
def test_get_servers_allows_name(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('name', search_opts)
self.assertEqual(search_opts['name'], 'whee.*')
self.assertEqual([], expected_attrs)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'name=whee.*')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_flavor_not_found(self):
self.mock_get_all.side_effect = exception.FlavorNotFound(flavor_id=1)
req = fakes.HTTPRequest.blank(
self.path_with_query % 'status=active&flavor=abc')
servers = self.controller.index(req)['servers']
self.assertEqual(0, len(servers))
def test_get_servers_allows_changes_since(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('changes-since', search_opts)
changes_since = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertNotIn('deleted', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
params = 'changes-since=2011-01-24T17:08:01Z'
req = self.req(self.path_with_query % params)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_changes_since_bad_value(self):
params = 'changes-since=asdf'
req = self.req(self.path_with_query % params)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_get_servers_allows_changes_since_bad_value_on_compat_mode(self):
params = 'changes-since=asdf'
req = self.req(self.path_with_query % params)
req.set_legacy_v2()
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index,
req)
def test_get_servers_admin_filters_as_user(self):
"""Test getting servers by admin-only or unknown options when
context is not admin. Make sure the admin and unknown options
are stripped before they get to compute_api.get_all()
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('ip', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertNotIn('unknown_option', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank(self.path_with_query % query_str)
res = self.controller.index(req)
servers = res['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_options_as_admin(self):
"""Test getting servers by admin-only or unknown options when
context is admin. All options should be passed
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('terminated_at', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertIn('ip', search_opts)
self.assertNotIn('unknown_option', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
query_str = ("name=foo&ip=10.*&status=active&unknown_option=meow&"
"terminated_at=^2016-02-01.*")
req = self.req(self.path_with_query % query_str,
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_filters_as_user_with_policy_override(self):
"""Test getting servers by admin-only or unknown options when
context is not admin but policy allows.
"""
server_uuid = uuids.fake
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('terminated_at', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertIn('ip', search_opts)
self.assertNotIn('unknown_option', search_opts)
# "hidden" is ignored as a filter parameter since it is only used
# internally
self.assertNotIn('hidden', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
rules = {
"os_compute_api:servers:index": "project_id:%s" % self.project_id,
"os_compute_api:servers:allow_all_filters":
"project_id:%s" % self.project_id,
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
self.mock_get_all.side_effect = fake_get_all
query_str = ("name=foo&ip=10.*&status=active&unknown_option=meow&"
"terminated_at=^2016-02-01.*&hidden=true")
req = self.req(self.path_with_query % query_str)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_ip(self):
"""Test getting servers by ip."""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('ip', search_opts)
self.assertEqual(search_opts['ip'], r'10\..*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % r'ip=10\..*')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_allows_ip6(self):
"""Test getting servers by ip6 with admin_api enabled and
admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('ip6', search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'ip6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_ip6_with_new_version(self):
"""Test getting servers by ip6 with new version requested
and no admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('ip6', search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'ip6=ffff.*')
req.api_version_request = api_version_request.APIVersionRequest('2.5')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_allows_access_ip_v4(self):
"""Test getting servers by access_ip_v4 with admin_api enabled and
admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('access_ip_v4', search_opts)
self.assertEqual(search_opts['access_ip_v4'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'access_ip_v4=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_allows_access_ip_v6(self):
"""Test getting servers by access_ip_v6 with admin_api enabled and
admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('access_ip_v6', search_opts)
self.assertEqual(search_opts['access_ip_v6'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'access_ip_v6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def _assertServerUsage(self, server, launched_at, terminated_at):
resp_launched_at = timeutils.parse_isotime(
server.get('OS-SRV-USG:launched_at'))
self.assertEqual(timeutils.normalize_time(resp_launched_at),
launched_at)
resp_terminated_at = timeutils.parse_isotime(
server.get('OS-SRV-USG:terminated_at'))
self.assertEqual(timeutils.normalize_time(resp_terminated_at),
terminated_at)
def test_show_server_usage(self):
DATE1 = datetime.datetime(year=2013, month=4, day=5, hour=12)
DATE2 = datetime.datetime(year=2013, month=4, day=5, hour=13)
self.mock_get.side_effect = fakes.fake_compute_get(
id=1, uuid=FAKE_UUID, launched_at=DATE1, terminated_at=DATE2)
req = self.req(self.path_with_id % FAKE_UUID)
req.accept = 'application/json'
req.method = 'GET'
res = req.get_response(compute.APIRouterV21())
self.assertEqual(res.status_int, 200)
self.useFixture(utils_fixture.TimeFixture())
self._assertServerUsage(jsonutils.loads(res.body).get('server'),
launched_at=DATE1,
terminated_at=DATE2)
def test_detail_server_usage(self):
DATE1 = datetime.datetime(year=2013, month=4, day=5, hour=12)
DATE2 = datetime.datetime(year=2013, month=4, day=5, hour=13)
DATE3 = datetime.datetime(year=2013, month=4, day=5, hour=14)
def fake_compute_get_all(*args, **kwargs):
db_list = [
fakes.stub_instance_obj(context, id=2, uuid=FAKE_UUID,
launched_at=DATE2,
terminated_at=DATE3),
fakes.stub_instance_obj(context, id=3, uuid=FAKE_UUID,
launched_at=DATE1,
terminated_at=DATE3),
]
return objects.InstanceList(objects=db_list)
self.mock_get_all.side_effect = fake_compute_get_all
req = self.req(self.path_detail)
req.accept = 'application/json'
servers = req.get_response(compute.APIRouterV21())
self.assertEqual(servers.status_int, 200)
self._assertServerUsage(jsonutils.loads(
servers.body).get('servers')[0],
launched_at=DATE2,
terminated_at=DATE3)
self._assertServerUsage(jsonutils.loads(
servers.body).get('servers')[1],
launched_at=DATE1,
terminated_at=DATE3)
def test_get_all_server_details(self):
expected_flavor = {
"id": "2",
"links": [
{
"rel": "bookmark",
"href": ('http://localhost/%s/flavors/2' %
self.project_id),
},
],
}
expected_image = {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": ('http://localhost/%s/images/10' %
self.project_id),
},
],
}
req = self.req(self.path_detail)
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], '')
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s['image'], expected_image)
self.assertEqual(s['flavor'], expected_flavor)
self.assertEqual(s['status'], 'ACTIVE')
self.assertEqual(s['metadata']['seq'], str(i + 1))
def test_get_all_server_details_with_host(self):
"""We want to make sure that if two instances are on the same host,
then they return the same hostId. If two instances are on different
hosts, they should return different hostIds. In this test,
there are 5 instances - 2 on one host and 3 on another.
"""
def return_servers_with_host(*args, **kwargs):
return objects.InstanceList(
objects=[fakes.stub_instance_obj(None,
id=i + 1,
user_id='fake',
project_id='fake',
host=i % 2,
uuid=fakes.get_fake_uuid(i))
for i in range(5)])
self.mock_get_all.side_effect = return_servers_with_host
req = self.req(self.path_detail)
res_dict = self.controller.detail(req)
server_list = res_dict['servers']
host_ids = [server_list[0]['hostId'], server_list[1]['hostId']]
self.assertTrue(host_ids[0] and host_ids[1])
self.assertNotEqual(host_ids[0], host_ids[1])
for i, s in enumerate(server_list):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], host_ids[i % 2])
self.assertEqual(s['name'], 'server%d' % (i + 1))
def test_get_servers_joins_services(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
cur = api_version_request.APIVersionRequest(self.wsgi_api_version)
v216 = api_version_request.APIVersionRequest('2.16')
if cur >= v216:
self.assertIn('services', expected_attrs)
else:
self.assertNotIn('services', expected_attrs)
return objects.InstanceList()
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_detail, use_admin_context=True)
self.assertIn('servers', self.controller.detail(req))
req = fakes.HTTPRequest.blank(self.path_detail,
use_admin_context=True,
version=self.wsgi_api_version)
self.assertIn('servers', self.controller.detail(req))
class ServersControllerTestV23(ServersControllerTest):
wsgi_api_version = '2.3'
def setUp(self):
super(ServersControllerTestV23, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
server_dict = super(ServersControllerTestV23,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
return server_dict
def test_show(self):
image_bookmark = "http://localhost/%s/images/10" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
req = self.req(self.path_with_id % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_detail(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None):
obj_list = []
for i in range(2):
server = fakes.stub_instance_obj(context,
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = fake_get_all(context)
req = self.req(self.path_detail)
servers_list = self.controller.detail(req)
image_bookmark = "http://localhost/%s/images/10" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertIn(expected_server['server'], servers_list['servers'])
class ServersControllerTestV29(ServersControllerTest):
wsgi_api_version = '2.9'
def setUp(self):
super(ServersControllerTestV29, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
server_dict = super(ServersControllerTestV29,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']['locked'] = False
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
return server_dict
def _test_get_server_with_lock(self, locked_by):
image_bookmark = "http://localhost/%s/images/10" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, locked_by=locked_by, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
req = self.req(self.path_with_id % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
expected_server['server']['locked'] = True if locked_by else False
self.assertThat(res_dict, matchers.DictMatches(expected_server))
return res_dict
def test_get_server_with_locked_by_admin(self):
res_dict = self._test_get_server_with_lock('admin')
self.assertTrue(res_dict['server']['locked'])
def test_get_server_with_locked_by_owner(self):
res_dict = self._test_get_server_with_lock('owner')
self.assertTrue(res_dict['server']['locked'])
def test_get_server_not_locked(self):
res_dict = self._test_get_server_with_lock(None)
self.assertFalse(res_dict['server']['locked'])
def _test_list_server_detail_with_lock(self,
s1_locked,
s2_locked):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = fake_instance_get_all_with_locked(
context, [s1_locked, s2_locked],
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
req = self.req(self.path_detail)
servers_list = self.controller.detail(req)
# Check that each returned server has the same 'locked' value
# and 'id' as they were created.
for locked in [s1_locked, s2_locked]:
server = next(server for server in servers_list['servers']
if (server['id'] == fakes.get_fake_uuid(locked)))
expected = False if locked == 'not_locked' else True
self.assertEqual(expected, server['locked'])
def test_list_server_detail_with_locked_s1_admin_s2_owner(self):
self._test_list_server_detail_with_lock('admin', 'owner')
def test_list_server_detail_with_locked_s1_owner_s2_admin(self):
self._test_list_server_detail_with_lock('owner', 'admin')
def test_list_server_detail_with_locked_s1_admin_s2_admin(self):
self._test_list_server_detail_with_lock('admin', 'admin')
def test_list_server_detail_with_locked_s1_admin_s2_not_locked(self):
self._test_list_server_detail_with_lock('admin', 'not_locked')
def test_list_server_detail_with_locked_s1_s2_not_locked(self):
self._test_list_server_detail_with_lock('not_locked',
'not_locked')
def test_get_servers_remove_non_search_options(self):
self.mock_get_all.side_effect = None
req = fakes.HTTPRequestV21.blank('/servers'
'?sort_key=uuid&sort_dir=asc'
'&sort_key=user_id&sort_dir=desc'
'&limit=1&marker=123',
use_admin_context=True)
self.controller.index(req)
kwargs = self.mock_get_all.call_args[1]
search_opts = kwargs['search_opts']
for key in ('sort_key', 'sort_dir', 'limit', 'marker'):
self.assertNotIn(key, search_opts)
class ServersControllerTestV216(ServersControllerTest):
wsgi_api_version = '2.16'
def setUp(self):
super(ServersControllerTestV216, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
host="node-fake",
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
self.mock_get_instance_host_status = self.useFixture(
fixtures.MockPatchObject(
compute_api.API, 'get_instance_host_status',
return_value='UP')).mock
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
server_dict = super(ServersControllerTestV216,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']['locked'] = False
server_dict['server']["host_status"] = "UP"
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server']['hostId'] = nova_utils.generate_hostid(
'node-fake', server_dict['server']['tenant_id'])
server_dict['server']["OS-EXT-SRV-ATTR:host"] = "node-fake"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
return server_dict
@mock.patch('nova.compute.api.API.get_instance_host_status')
def _verify_host_status_policy_behavior(self, func, mock_get_host_status):
# Set policy to disallow both host_status cases and verify we don't
# call the get_instance_host_status compute RPC API.
rules = {
'os_compute_api:servers:show:host_status': '!',
'os_compute_api:servers:show:host_status:unknown-only': '!',
}
orig_rules = policy.get_rules()
policy.set_rules(oslo_policy.Rules.from_dict(rules), overwrite=False)
func()
mock_get_host_status.assert_not_called()
# Restore the original rules.
policy.set_rules(orig_rules)
def test_show(self):
image_bookmark = "http://localhost/%s/images/10" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
req = self.req(self.path_with_id % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
func = functools.partial(self.controller.show, req, FAKE_UUID)
self._verify_host_status_policy_behavior(func)
def test_detail(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None):
obj_list = []
for i in range(2):
server = fakes.stub_instance_obj(context,
id=2, uuid=FAKE_UUID,
host="node-fake",
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = fake_get_all(context)
req = self.req(self.path_detail)
servers_list = self.controller.detail(req)
self.assertEqual(2, len(servers_list['servers']))
image_bookmark = "http://localhost/%s/images/10" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertIn(expected_server['server'], servers_list['servers'])
# We should have only gotten the host status once per host (and the
# 2 servers in the response are using the same host).
self.mock_get_instance_host_status.assert_called_once()
func = functools.partial(self.controller.detail, req)
self._verify_host_status_policy_behavior(func)
class ServersControllerTestV219(ServersControllerTest):
wsgi_api_version = '2.19'
def setUp(self):
super(ServersControllerTestV219, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
self.useFixture(fixtures.MockPatchObject(
compute_api.API, 'get_instance_host_status',
return_value='UP')).mock
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100, description=None):
server_dict = super(ServersControllerTestV219,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']['locked'] = False
server_dict['server']['description'] = description
server_dict['server']["host_status"] = "UP"
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
return server_dict
def _test_get_server_with_description(self, description):
image_bookmark = "http://localhost/%s/images/10" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, display_description=description, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
req = self.req(self.path_with_id % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0,
description=description)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
return res_dict
def _test_list_server_detail_with_descriptions(self,
s1_desc,
s2_desc):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = (
fake_instance_get_all_with_description(context,
[s1_desc, s2_desc],
launched_at=None,
terminated_at=None))
req = self.req(self.path_detail)
servers_list = self.controller.detail(req)
# Check that each returned server has the same 'description' value
# and 'id' as they were created.
for desc in [s1_desc, s2_desc]:
server = next(server for server in servers_list['servers']
if (server['id'] == fakes.get_fake_uuid(desc)))
expected = desc
self.assertEqual(expected, server['description'])
def test_get_server_with_description(self):
self._test_get_server_with_description('test desc')
def test_list_server_detail_with_descriptions(self):
self._test_list_server_detail_with_descriptions('desc1', 'desc2')
class ServersControllerTestV226(ControllerTest):
wsgi_api_version = '2.26'
def test_get_server_with_tags_by_id(self):
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID,
version=self.wsgi_api_version)
ctxt = req.environ['nova.context']
tags = ['tag1', 'tag2']
def fake_get(*args, **kwargs):
self.assertIn('tags', kwargs['expected_attrs'])
fake_server = fakes.stub_instance_obj(
ctxt, id=2, vm_state=vm_states.ACTIVE, progress=100)
tag_list = objects.TagList(objects=[
objects.Tag(resource_id=FAKE_UUID, tag=tag)
for tag in tags])
fake_server.tags = tag_list
return fake_server
self.mock_get.side_effect = fake_get
res_dict = self.controller.show(req, FAKE_UUID)
self.assertIn('tags', res_dict['server'])
self.assertEqual(tags, res_dict['server']['tags'])
def _test_get_servers_allows_tag_filters(self, filter_name):
query_string = '%s=t1,t2' % filter_name
req = fakes.HTTPRequest.blank(self.path_with_query % query_string,
version=self.wsgi_api_version)
def fake_get_all(*a, **kw):
self.assertIsNotNone(kw['search_opts'])
self.assertIn(filter_name, kw['search_opts'])
self.assertEqual(kw['search_opts'][filter_name], ['t1', 't2'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(req.environ['nova.context'],
uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_tags_filter(self):
self._test_get_servers_allows_tag_filters('tags')
def test_get_servers_allows_tags_any_filter(self):
self._test_get_servers_allows_tag_filters('tags-any')
def test_get_servers_allows_not_tags_filter(self):
self._test_get_servers_allows_tag_filters('not-tags')
def test_get_servers_allows_not_tags_any_filter(self):
self._test_get_servers_allows_tag_filters('not-tags-any')
class ServerControllerTestV238(ControllerTest):
wsgi_api_version = '2.38'
def _test_invalid_status(self, is_admin):
req = fakes.HTTPRequest.blank(
self.path_detail_with_query % 'status=invalid',
version=self.wsgi_api_version, use_admin_context=is_admin)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_list_servers_detail_invalid_status_for_admin(self):
self._test_invalid_status(True)
def test_list_servers_detail_invalid_status_for_non_admin(self):
self._test_invalid_status(False)
class ServerControllerTestV247(ControllerTest):
"""Server controller test for microversion 2.47
The intent here is simply to verify that when showing server details
after microversion 2.47 that the flavor is shown as a dict of flavor
information rather than as dict of id/links. The existence of the
'extra_specs' key is controlled by policy.
"""
wsgi_api_version = '2.47'
@mock.patch.object(objects.TagList, 'get_by_resource_id')
def test_get_all_server_details(self, mock_get_by_resource_id):
# Fake out tags on the instances
mock_get_by_resource_id.return_value = objects.TagList()
expected_flavor = {
'disk': 20,
'ephemeral': 0,
'extra_specs': {},
'original_name': u'm1.small',
'ram': 2048,
'swap': 0,
'vcpus': 1}
req = fakes.HTTPRequest.blank(self.path_detail,
version=self.wsgi_api_version)
hits = []
real_auth = policy.authorize
# Wrapper for authorize to count the number of times
# we authorize for extra-specs
def fake_auth(context, action, target):
if 'extra-specs' in action:
hits.append(1)
return real_auth(context, action, target)
with mock.patch('nova.policy.authorize') as mock_auth:
mock_auth.side_effect = fake_auth
res_dict = self.controller.detail(req)
# We should have found more than one servers, but only hit the
# policy check once
self.assertGreater(len(res_dict['servers']), 1)
self.assertEqual(1, len(hits))
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['flavor'], expected_flavor)
@mock.patch.object(objects.TagList, 'get_by_resource_id')
def test_get_all_server_details_no_extra_spec(self,
mock_get_by_resource_id):
# Fake out tags on the instances
mock_get_by_resource_id.return_value = objects.TagList()
# Set the policy so we don't have permission to index
# flavor extra-specs but are able to get server details.
servers_rule = 'os_compute_api:servers:detail'
extraspec_rule = 'os_compute_api:os-flavor-extra-specs:index'
self.policy.set_rules({
extraspec_rule: 'rule:admin_api',
servers_rule: '@'})
expected_flavor = {
'disk': 20,
'ephemeral': 0,
'original_name': u'm1.small',
'ram': 2048,
'swap': 0,
'vcpus': 1}
req = fakes.HTTPRequest.blank(self.path_detail,
version=self.wsgi_api_version)
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['flavor'], expected_flavor)
class ServerControllerTestV266(ControllerTest):
"""Server controller test for microversion 2.66
Add changes-before parameter to get servers or servers details of
2.66 microversion.
Filters the response by a date and time stamp when the server last
changed. Those changed before the specified date and time stamp are
returned.
"""
wsgi_api_version = '2.66'
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_get_servers_allows_changes_before(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('changes-before', search_opts)
changes_before = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-before'], changes_before)
self.assertNotIn('deleted', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
params = 'changes-before=2011-01-24T17:08:01Z'
req = self.req(self.path_with_query % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_changes_before_bad_value(self):
params = 'changes-before=asdf'
req = self.req(self.path_with_query % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_get_servers_allows_changes_before_bad_value_on_compat_mode(self):
params = 'changes-before=asdf'
req = self.req(self.path_with_query % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
req.set_legacy_v2()
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
def test_get_servers_allows_changes_since_and_changes_before(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('changes-since', search_opts)
changes_since = datetime.datetime(2011, 1, 23, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertIn('changes-before', search_opts)
changes_before = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertEqual(search_opts['changes-before'], changes_before)
self.assertNotIn('deleted', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
params = 'changes-since=2011-01-23T17:08:01Z&' \
'changes-before=2011-01-24T17:08:01Z'
req = self.req(self.path_with_query % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_filters_with_distinct_changes_time_bad_request(self):
changes_since = '2018-09-04T05:45:27Z'
changes_before = '2018-09-03T05:45:27Z'
query_string = ('changes-since=%s&changes-before=%s' %
(changes_since, changes_before))
req = self.req(self.path_with_query % query_string)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
class ServersControllerTestV271(ControllerTest):
wsgi_api_version = '2.71'
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_show_server_group_not_exist(self):
req = self.req(self.path_with_id % FAKE_UUID)
servers = self.controller.show(req, FAKE_UUID)
expect_sg = []
self.assertEqual(expect_sg, servers['server']['server_groups'])
class ServersControllerTestV273(ControllerTest):
"""Server Controller test for microversion 2.73
The intent here is simply to verify that when showing server details
after microversion 2.73 the response will also have the locked_reason
key for the servers.
"""
wsgi_api_version = '2.73'
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_get_servers_with_locked_filter(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(
100, uuid=uuids.fake, locked_by='fake')]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'locked=true')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
search = {'deleted': False, 'project_id': self.project_id,
'locked': True}
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[],
limit=1000, marker=None,
search_opts=search,
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_servers_with_locked_filter_invalid_value(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(
100, uuid=uuids.fake, locked_by='fake')]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'locked=price')
exp = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
self.assertIn("Unrecognized value 'price'", six.text_type(exp))
def test_get_servers_with_locked_filter_empty_value(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(
100, uuid=uuids.fake, locked_by='fake')]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'locked=')
exp = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
self.assertIn("Unrecognized value ''", six.text_type(exp))
def test_get_servers_with_locked_sort_key(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(
100, uuid=uuids.fake, locked_by='fake')]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query %
'sort_dir=desc&sort_key=locked')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[],
limit=1000, marker=None,
search_opts={'deleted': False, 'project_id': self.project_id},
sort_dirs=['desc'], sort_keys=['locked'],
cell_down_support=False, all_tenants=False)
class ServersControllerTestV275(ControllerTest):
wsgi_api_version = '2.75'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
@mock.patch('nova.compute.api.API.get_all')
def test_get_servers_additional_query_param_old_version(self, mock_get):
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version='2.74')
self.controller.index(req)
@mock.patch('nova.compute.api.API.get_all')
def test_get_servers_ignore_sort_key_old_version(self, mock_get):
req = fakes.HTTPRequest.blank(
self.path_with_query % 'sort_key=deleted',
use_admin_context=True, version='2.74')
self.controller.index(req)
def test_get_servers_additional_query_param(self):
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version=self.wsgi_api_version)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_get_servers_previously_ignored_sort_key(self):
for s_ignore in servers_schema.SERVER_LIST_IGNORE_SORT_KEY_V273:
req = fakes.HTTPRequest.blank(
self.path_with_query % 'sort_key=%s' % s_ignore,
use_admin_context=True,
version=self.wsgi_api_version)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_get_servers_additional_sort_key(self):
req = fakes.HTTPRequest.blank(
self.path_with_query % 'sort_key=unknown',
use_admin_context=True, version=self.wsgi_api_version)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_update_response_no_show_server_only_attributes_old_version(self):
# There are some old server attributes which were added only for
# GET server APIs not for PUT. GET server and PUT server share the
# same view builder method SHOW() to build the response, So make sure
# attributes which are not supposed to be included for PUT
# response are not present.
body = {'server': {'name': 'server_test'}}
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version='2.74')
res_dict = self.controller.update(req, FAKE_UUID, body=body)
for field in GET_ONLY_FIELDS:
self.assertNotIn(field, res_dict['server'])
for items in res_dict['server']['addresses'].values():
for item in items:
self.assertNotIn('OS-EXT-IPS:type', item)
self.assertNotIn('OS-EXT-IPS-MAC:mac_addr', item)
def test_update_response_has_show_server_all_attributes(self):
body = {'server': {'name': 'server_test'}}
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version=self.wsgi_api_version)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
for field in GET_ONLY_FIELDS:
self.assertIn(field, res_dict['server'])
for items in res_dict['server']['addresses'].values():
for item in items:
self.assertIn('OS-EXT-IPS:type', item)
self.assertIn('OS-EXT-IPS-MAC:mac_addr', item)
def test_rebuild_response_no_show_server_only_attributes_old_version(self):
# There are some old server attributes which were added only for
# GET server APIs not for Rebuild. GET server and Rebuild server share
# same view builder method SHOW() to build the response, So make sure
# the attributes which are not supposed to be included for Rebuild
# response are not present.
body = {'rebuild': {"imageRef": self.image_uuid}}
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version='2.74')
fake_get = fakes.fake_compute_get(
vm_state=vm_states.ACTIVE,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
res_dict = self.controller._action_rebuild(req, FAKE_UUID,
body=body).obj
get_only_fields_Rebuild = copy.deepcopy(GET_ONLY_FIELDS)
get_only_fields_Rebuild.remove('key_name')
for field in get_only_fields_Rebuild:
self.assertNotIn(field, res_dict['server'])
for items in res_dict['server']['addresses'].values():
for item in items:
self.assertNotIn('OS-EXT-IPS:type', item)
self.assertNotIn('OS-EXT-IPS-MAC:mac_addr', item)
def test_rebuild_response_has_show_server_all_attributes(self):
body = {'rebuild': {"imageRef": self.image_uuid}}
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version=self.wsgi_api_version)
fake_get = fakes.fake_compute_get(
vm_state=vm_states.ACTIVE,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
res_dict = self.controller._action_rebuild(req, FAKE_UUID,
body=body).obj
for field in GET_ONLY_FIELDS:
if field == 'OS-EXT-SRV-ATTR:user_data':
self.assertNotIn(field, res_dict['server'])
field = 'user_data'
self.assertIn(field, res_dict['server'])
for items in res_dict['server']['addresses'].values():
for item in items:
self.assertIn('OS-EXT-IPS:type', item)
self.assertIn('OS-EXT-IPS-MAC:mac_addr', item)
class ServersControllerDeleteTest(ControllerTest):
def setUp(self):
super(ServersControllerDeleteTest, self).setUp()
self.server_delete_called = False
def fake_delete(api, context, instance):
if instance.uuid == uuids.non_existent_uuid:
raise exception.InstanceNotFound(instance_id=instance.uuid)
self.server_delete_called = True
self.stub_out('nova.compute.api.API.delete', fake_delete)
def _create_delete_request(self, uuid):
fakes.stub_out_instance_quota(self, 0, 10)
req = fakes.HTTPRequestV21.blank(self.path_with_id % uuid)
req.method = 'DELETE'
fake_get = fakes.fake_compute_get(
uuid=uuid,
vm_state=vm_states.ACTIVE,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
return req
def _delete_server_instance(self, uuid=FAKE_UUID):
req = self._create_delete_request(uuid)
self.controller.delete(req, uuid)
def test_delete_server_instance(self):
self._delete_server_instance()
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_not_found(self):
self.assertRaises(webob.exc.HTTPNotFound,
self._delete_server_instance,
uuid=uuids.non_existent_uuid)
def test_delete_server_instance_while_building(self):
req = self._create_delete_request(FAKE_UUID)
self.controller.delete(req, FAKE_UUID)
self.assertTrue(self.server_delete_called)
@mock.patch.object(compute_api.API, 'delete',
side_effect=exception.InstanceIsLocked(
instance_uuid=FAKE_UUID))
def test_delete_locked_server(self, mock_delete):
req = self._create_delete_request(FAKE_UUID)
self.assertRaises(webob.exc.HTTPConflict, self.controller.delete,
req, FAKE_UUID)
mock_delete.assert_called_once_with(
req.environ['nova.context'], test.MatchType(objects.Instance))
def test_delete_server_instance_while_resize(self):
req = self._create_delete_request(FAKE_UUID)
fake_get = fakes.fake_compute_get(
vm_state=vm_states.ACTIVE,
task_state=task_states.RESIZE_PREP,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
self.controller.delete(req, FAKE_UUID)
def test_delete_server_instance_if_not_launched(self):
self.flags(reclaim_instance_interval=3600)
req = fakes.HTTPRequestV21.blank(self.path_with_id % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
fake_get = fakes.fake_compute_get(
launched_at=None,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
deleted_at = timeutils.utcnow()
return fake_instance.fake_db_instance(deleted_at=deleted_at)
self.stub_out('nova.db.api.instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
# delete() should be called for instance which has never been active,
# even if reclaim_instance_interval has been set.
self.assertTrue(self.server_delete_called)
class ServersControllerRebuildInstanceTest(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
expected_key_name = False
def setUp(self):
super(ServersControllerRebuildInstanceTest, self).setUp()
self.req = fakes.HTTPRequest.blank(self.path_action % FAKE_UUID)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
self.req_user_id = self.req.environ['nova.context'].user_id
self.req_project_id = self.req.environ['nova.context'].project_id
self.useFixture(nova_fixtures.SingleCellSimple())
def fake_get(ctrl, ctxt, uuid):
if uuid == 'test_inst':
raise webob.exc.HTTPNotFound(explanation='fakeout')
return fakes.stub_instance_obj(None,
vm_state=vm_states.ACTIVE,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.useFixture(
fixtures.MonkeyPatch('nova.api.openstack.compute.servers.'
'ServersController._get_instance',
fake_get))
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_uuid,
'metadata': {
'open': 'stack',
},
},
}
def test_rebuild_server_with_image_not_uuid(self):
self.body['rebuild']['imageRef'] = 'not-uuid'
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID,
body=self.body)
def test_rebuild_server_with_image_as_full_url(self):
image_href = (
'http://localhost/v2/%s/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' % self.project_id)
self.body['rebuild']['imageRef'] = image_href
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID,
body=self.body)
def test_rebuild_server_with_image_as_empty_string(self):
self.body['rebuild']['imageRef'] = ''
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID,
body=self.body)
def test_rebuild_instance_name_with_spaces_in_the_middle(self):
self.body['rebuild']['name'] = 'abc def'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller._action_rebuild(self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_name_with_leading_trailing_spaces(self):
self.body['rebuild']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_name_with_leading_trailing_spaces_compat_mode(
self):
self.body['rebuild']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.set_legacy_v2()
def fake_rebuild(*args, **kwargs):
self.assertEqual('abc def', kwargs['display_name'])
with mock.patch.object(compute_api.API, 'rebuild') as mock_rebuild:
mock_rebuild.side_effect = fake_rebuild
self.controller._action_rebuild(self.req, FAKE_UUID,
body=self.body)
def test_rebuild_instance_with_blank_metadata_key(self):
self.body['rebuild']['metadata'][''] = 'world'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_key_too_long(self):
self.body['rebuild']['metadata'][('a' * 260)] = 'world'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_too_long(self):
self.body['rebuild']['metadata']['key1'] = ('a' * 260)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_not_string(self):
self.body['rebuild']['metadata']['key1'] = 1
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
@mock.patch.object(fake._FakeImageService, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="4096", min_disk="10"))
def test_rebuild_instance_fails_when_min_ram_too_small(self, mock_show):
# make min_ram larger than our instance ram size
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
@mock.patch.object(fake._FakeImageService, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="128", min_disk="100000"))
def test_rebuild_instance_fails_when_min_disk_too_small(self, mock_show):
# make min_disk larger than our instance disk size
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
@mock.patch.object(fake._FakeImageService, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', size=str(1000 * (1024 ** 3))))
def test_rebuild_instance_image_too_large(self, mock_show):
# make image size larger than our instance disk size
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
def test_rebuild_instance_name_all_blank(self):
self.body['rebuild']['name'] = ' '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
@mock.patch.object(fake._FakeImageService, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='DELETED'))
def test_rebuild_instance_with_deleted_image(self, mock_show):
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
def test_rebuild_instance_onset_file_limit_over_quota(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True, status='active')
with test.nested(
mock.patch.object(fake._FakeImageService, 'show',
side_effect=fake_get_image),
mock.patch.object(self.controller.compute_api, 'rebuild',
side_effect=exception.OnsetFileLimitExceeded)
) as (
show_mock, rebuild_mock
):
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_bad_personality(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
body = {
"rebuild": {
"imageRef": self.image_uuid,
"personality": [{
"path": "/path/to/file",
"contents": "INVALID b64",
}]
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_personality(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
body = {
"rebuild": {
"imageRef": self.image_uuid,
"personality": [{
"path": "/path/to/file",
"contents": base64.encode_as_text("Test String"),
}]
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
self.assertNotIn('personality', body['server'])
def test_rebuild_response_has_no_show_server_only_attributes(self):
# There are some old server attributes which were added only for
# GET server APIs not for rebuild. GET server and Rebuild share the
# same view builder method SHOW() to build the response, So make sure
# attributes which are not supposed to be included for Rebuild
# response are not present.
body = {
"rebuild": {
"imageRef": self.image_uuid,
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
get_only_fields = copy.deepcopy(GET_ONLY_FIELDS)
if self.expected_key_name:
get_only_fields.remove('key_name')
for field in get_only_fields:
self.assertNotIn(field, body['server'])
@mock.patch.object(compute_api.API, 'start')
def test_start(self, mock_start):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(start="")
self.controller._start_server(req, FAKE_UUID, body)
mock_start.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(compute_api.API, 'start', fake_start_stop_not_ready)
def test_start_not_ready(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(
compute_api.API, 'start', fakes.fake_actions_to_locked_server)
def test_start_locked_server(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'start', fake_start_stop_invalid_state)
def test_start_invalid(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'stop')
def test_stop(self, mock_stop):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(stop="")
self.controller._stop_server(req, FAKE_UUID, body)
mock_stop.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(compute_api.API, 'stop', fake_start_stop_not_ready)
def test_stop_not_ready(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch.object(
compute_api.API, 'stop', fakes.fake_actions_to_locked_server)
def test_stop_locked_server(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'stop', fake_start_stop_invalid_state)
def test_stop_invalid_state(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch(
'nova.db.api.instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
def test_start_with_bogus_id(self):
req = fakes.HTTPRequestV21.blank(self.path_action % 'test_inst')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, req, 'test_inst', body)
@mock.patch(
'nova.db.api.instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
def test_stop_with_bogus_id(self):
req = fakes.HTTPRequestV21.blank(self.path_action % 'test_inst')
body = dict(stop="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, req, 'test_inst', body)
class ServersControllerRebuildTestV254(ServersControllerRebuildInstanceTest):
expected_key_name = True
def setUp(self):
super(ServersControllerRebuildTestV254, self).setUp()
fakes.stub_out_key_pair_funcs(self)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.54')
def _test_set_key_name_rebuild(self, set_key_name=True):
key_name = "key"
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=key_name,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
if set_key_name:
self.body['rebuild']['key_name'] = key_name
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(
self.req, FAKE_UUID,
body=self.body).obj['server']
self.assertEqual(server['id'], FAKE_UUID)
self.assertEqual(server['key_name'], key_name)
def test_rebuild_accepted_with_keypair_name(self):
self._test_set_key_name_rebuild()
def test_rebuild_key_not_changed(self):
self._test_set_key_name_rebuild(set_key_name=False)
def test_rebuild_invalid_microversion_253(self):
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.53')
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "key"
},
}
excpt = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('key_name', six.text_type(excpt))
def test_rebuild_with_not_existed_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "nonexistentkey"
},
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_user_has_no_key_pair(self):
def no_key_pair(context, user_id, name):
raise exception.KeypairNotFound(user_id=user_id, name=name)
self.stub_out('nova.db.api.key_pair_get', no_key_pair)
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=None,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
self.body['rebuild']['key_name'] = "a-key-name"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_with_non_string_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": 12345
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_invalid_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "123\0d456"
},
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_empty_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": ''
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_none_keypair_name(self):
key_name = None
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=key_name,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
with mock.patch.object(objects.KeyPair, 'get_by_name') as key_get:
self.body['rebuild']['key_name'] = key_name
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller._action_rebuild(
self.req, FAKE_UUID,
body=self.body)
# NOTE: because the api will call _get_server twice. The server
# response will always be the same one. So we just use
# objects.KeyPair.get_by_name to verify test.
key_get.assert_not_called()
def test_rebuild_with_too_large_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": 256 * "k"
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
class ServersControllerRebuildTestV257(ServersControllerRebuildTestV254):
"""Tests server rebuild at microversion 2.57 where user_data can be
provided and personality files are no longer accepted.
"""
def setUp(self):
super(ServersControllerRebuildTestV257, self).setUp()
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.57')
def test_rebuild_personality(self):
"""Tests that trying to rebuild with personality files fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"personality": [{
"path": "/path/to/file",
"contents": base64.encode_as_text("Test String"),
}]
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('personality', six.text_type(ex))
def test_rebuild_user_data_old_version(self):
"""Tests that trying to rebuild with user_data before 2.57 fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": "ZWNobyAiaGVsbG8gd29ybGQi"
}
}
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.55')
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', six.text_type(ex))
def test_rebuild_user_data_malformed(self):
"""Tests that trying to rebuild with malformed user_data fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": b'invalid'
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', six.text_type(ex))
def test_rebuild_user_data_too_large(self):
"""Tests that passing user_data to rebuild that is too large fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": ('MQ==' * 16384)
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', six.text_type(ex))
@mock.patch.object(context.RequestContext, 'can')
@mock.patch('nova.db.api.instance_update_and_get_original')
def test_rebuild_reset_user_data(self, mock_update, mock_policy):
"""Tests that passing user_data=None resets the user_data on the
instance.
"""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": None
}
}
self.mock_get.side_effect = None
self.mock_get.return_value = fakes.stub_instance_obj(
context.RequestContext(self.req_user_id, self.req_project_id),
user_data='ZWNobyAiaGVsbG8gd29ybGQi')
def fake_instance_update_and_get_original(
ctxt, instance_uuid, values, **kwargs):
# save() is called twice and the second one has system_metadata
# in the updates, so we can ignore that one.
if 'system_metadata' not in values:
self.assertIn('user_data', values)
self.assertIsNone(values['user_data'])
return instance_update_and_get_original(
ctxt, instance_uuid, values, **kwargs)
mock_update.side_effect = fake_instance_update_and_get_original
self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
self.assertEqual(2, mock_update.call_count)
class ServersControllerRebuildTestV219(ServersControllerRebuildInstanceTest):
def setUp(self):
super(ServersControllerRebuildTestV219, self).setUp()
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.19')
def _rebuild_server(self, set_desc, desc):
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
display_description=desc,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
if set_desc:
self.body['rebuild']['description'] = desc
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(self.req, FAKE_UUID,
body=self.body).obj['server']
self.assertEqual(server['id'], FAKE_UUID)
self.assertEqual(server['description'], desc)
def test_rebuild_server_with_description(self):
self._rebuild_server(True, 'server desc')
def test_rebuild_server_empty_description(self):
self._rebuild_server(True, '')
def test_rebuild_server_without_description(self):
self._rebuild_server(False, '')
def test_rebuild_server_remove_description(self):
self._rebuild_server(True, None)
def test_rebuild_server_description_too_long(self):
self.body['rebuild']['description'] = 'x' * 256
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_server_description_invalid(self):
# Invalid non-printable control char in the desc.
self.body['rebuild']['description'] = "123\0d456"
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
# NOTE(jaypipes): Not based from ServersControllerRebuildInstanceTest because
# that test case's setUp is completely b0rked
class ServersControllerRebuildTestV263(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def setUp(self):
super(ServersControllerRebuildTestV263, self).setUp()
self.req = fakes.HTTPRequest.blank(self.path_action % FAKE_UUID)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
self.req_user_id = self.req.environ['nova.context'].user_id
self.req_project_id = self.req.environ['nova.context'].project_id
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.63')
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_uuid,
'metadata': {
'open': 'stack',
},
},
}
@mock.patch('nova.compute.api.API.get')
def _rebuild_server(self, mock_get, certs=None,
conf_enabled=True, conf_certs=None):
fakes.stub_out_trusted_certs(self, certs=certs)
ctx = self.req.environ['nova.context']
mock_get.return_value = fakes.stub_instance_obj(ctx,
vm_state=vm_states.ACTIVE, trusted_certs=certs,
project_id=self.req_project_id, user_id=self.req_user_id)
self.flags(default_trusted_certificate_ids=conf_certs, group='glance')
if conf_enabled:
self.flags(verify_glance_signatures=True, group='glance')
self.flags(enable_certificate_validation=True, group='glance')
self.body['rebuild']['trusted_image_certificates'] = certs
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(
self.req, FAKE_UUID, body=self.body).obj['server']
if certs:
self.assertEqual(certs, server['trusted_image_certificates'])
else:
if conf_enabled:
# configuration file default is used
self.assertEqual(
conf_certs, server['trusted_image_certificates'])
else:
# either not set or empty
self.assertIsNone(server['trusted_image_certificates'])
def test_rebuild_server_with_trusted_certs(self):
"""Test rebuild with valid trusted_image_certificates argument"""
self._rebuild_server(
certs=['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8',
'674736e3-f25c-405c-8362-bbf991e0ce0a'])
def test_rebuild_server_without_trusted_certs(self):
"""Test rebuild without trusted image certificates"""
self._rebuild_server()
def test_rebuild_server_conf_options_turned_off_set(self):
"""Test rebuild with feature disabled and certs specified"""
self._rebuild_server(
certs=['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8'], conf_enabled=False)
def test_rebuild_server_conf_options_turned_off_empty(self):
"""Test rebuild with feature disabled"""
self._rebuild_server(conf_enabled=False)
def test_rebuild_server_default_trusted_certificates_empty(self):
"""Test rebuild with feature enabled and no certs specified"""
self._rebuild_server(conf_enabled=True)
def test_rebuild_server_default_trusted_certificates(self):
"""Test rebuild with certificate specified in configurations"""
self._rebuild_server(conf_enabled=True, conf_certs=['conf-id'])
def test_rebuild_server_with_empty_trusted_cert_id(self):
"""Make sure that we can't rebuild with an empty certificate ID"""
self.body['rebuild']['trusted_image_certificates'] = ['']
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too short', six.text_type(ex))
def test_rebuild_server_with_empty_trusted_certs(self):
"""Make sure that we can't rebuild with an empty array of IDs"""
self.body['rebuild']['trusted_image_certificates'] = []
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too short', six.text_type(ex))
def test_rebuild_server_with_too_many_trusted_certs(self):
"""Make sure that we can't rebuild with an array of >50 unique IDs"""
self.body['rebuild']['trusted_image_certificates'] = [
'cert{}'.format(i) for i in range(51)]
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too long', six.text_type(ex))
def test_rebuild_server_with_nonunique_trusted_certs(self):
"""Make sure that we can't rebuild with a non-unique array of IDs"""
self.body['rebuild']['trusted_image_certificates'] = ['cert', 'cert']
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('has non-unique elements', six.text_type(ex))
def test_rebuild_server_with_invalid_trusted_cert_id(self):
"""Make sure that we can't rebuild with non-string certificate IDs"""
self.body['rebuild']['trusted_image_certificates'] = [1, 2]
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is not of type', six.text_type(ex))
def test_rebuild_server_with_invalid_trusted_certs(self):
"""Make sure that we can't rebuild with certificates in a non-array"""
self.body['rebuild']['trusted_image_certificates'] = "not-an-array"
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is not of type', six.text_type(ex))
def test_rebuild_server_with_trusted_certs_pre_2_63_fails(self):
"""Make sure we can't use trusted_certs before 2.63"""
self._rebuild_server(certs=['trusted-cert-id'])
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.62')
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('Additional properties are not allowed',
six.text_type(ex))
def test_rebuild_server_with_trusted_certs_policy_failed(self):
rule_name = "os_compute_api:servers:rebuild:trusted_certs"
rules = {"os_compute_api:servers:rebuild": "@",
rule_name: "project:%s" % fakes.FAKE_PROJECT_ID}
self.policy.set_rules(rules)
exc = self.assertRaises(exception.PolicyNotAuthorized,
self._rebuild_server,
certs=['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8'])
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
@mock.patch.object(compute_api.API, 'rebuild')
def test_rebuild_server_with_cert_validation_error(
self, mock_rebuild):
mock_rebuild.side_effect = exception.CertificateValidationFailed(
cert_uuid="cert id", reason="test cert validation error")
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._rebuild_server,
certs=['trusted-cert-id'])
self.assertIn('test cert validation error',
six.text_type(ex))
class ServersControllerRebuildTestV271(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def setUp(self):
super(ServersControllerRebuildTestV271, self).setUp()
self.req = fakes.HTTPRequest.blank(self.path_action % FAKE_UUID,
use_admin_context=True)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
self.req_user_id = self.req.environ['nova.context'].user_id
self.req_project_id = self.req.environ['nova.context'].project_id
self.req.api_version_request = (api_version_request.
APIVersionRequest('2.71'))
self.body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": None
}
}
@mock.patch('nova.compute.api.API.get')
def _rebuild_server(self, mock_get):
ctx = self.req.environ['nova.context']
mock_get.return_value = fakes.stub_instance_obj(ctx,
vm_state=vm_states.ACTIVE, project_id=self.req_project_id,
user_id=self.req_user_id)
server = self.controller._action_rebuild(
self.req, FAKE_UUID, body=self.body).obj['server']
return server
@mock.patch.object(InstanceGroup, 'get_by_instance_uuid',
side_effect=exception.InstanceGroupNotFound(group_uuid=FAKE_UUID))
def test_rebuild_with_server_group_not_exist(self, mock_sg_get):
server = self._rebuild_server()
self.assertEqual([], server['server_groups'])
class ServersControllerUpdateTest(ControllerTest):
def _get_request(self, body=None):
req = fakes.HTTPRequestV21.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = jsonutils.dump_as_bytes(body)
fake_get = fakes.fake_compute_get(
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
return req
def test_update_server_all_attributes(self):
body = {'server': {
'name': 'server_test',
}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name(self):
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_response_has_no_show_server_only_attributes(self):
# There are some old server attributes which were added only for
# GET server APIs not for PUT. GET server and PUT server share the
# same view builder method SHOW() to build the response, So make sure
# attributes which are not supposed to be included for PUT
# response are not present.
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
for field in GET_ONLY_FIELDS:
self.assertNotIn(field, res_dict['server'])
def test_update_server_name_too_long(self):
body = {'server': {'name': 'x' * 256}}
req = self._get_request(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_name_all_blank_spaces(self):
self.stub_out('nova.db.api.instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': ' ' * 64}}
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_name_with_spaces_in_the_middle(self):
body = {'server': {'name': 'abc def'}}
req = self._get_request(body)
self.controller.update(req, FAKE_UUID, body=body)
def test_update_server_name_with_leading_trailing_spaces(self):
self.stub_out('nova.db.api.instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': ' abc def '}}
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError,
self.controller.update, req, FAKE_UUID, body=body)
def test_update_server_name_with_leading_trailing_spaces_compat_mode(self):
body = {'server': {'name': ' abc def '}}
req = self._get_request(body)
req.set_legacy_v2()
self.controller.update(req, FAKE_UUID, body=body)
def test_update_server_admin_password_extra_arg(self):
inst_dict = dict(name='server_test', admin_password='bacon')
body = dict(server=inst_dict)
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_host_id(self):
inst_dict = dict(host_id='123')
body = dict(server=inst_dict)
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_not_found(self):
self.mock_get.side_effect = exception.InstanceNotFound(
instance_id='fake')
body = {'server': {'name': 'server_test'}}
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
@mock.patch.object(compute_api.API, 'update_instance')
def test_update_server_not_found_on_update(self, mock_update_instance):
def fake_update(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
mock_update_instance.side_effect = fake_update
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_policy_fail(self):
rule = {'compute:update': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rule))
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.update, req, FAKE_UUID, body=body)
class ServersControllerTriggerCrashDumpTest(ControllerTest):
def setUp(self):
super(ServersControllerTriggerCrashDumpTest, self).setUp()
self.instance = fakes.stub_instance_obj(None,
vm_state=vm_states.ACTIVE,
project_id=self.project_id)
def fake_get(ctrl, ctxt, uuid):
if uuid != FAKE_UUID:
raise webob.exc.HTTPNotFound(explanation='fakeout')
return self.instance
self.useFixture(
fixtures.MonkeyPatch('nova.api.openstack.compute.servers.'
'ServersController._get_instance',
fake_get))
self.req = fakes.HTTPRequest.blank(self.path_action % FAKE_UUID)
self.req.api_version_request =\
api_version_request.APIVersionRequest('2.17')
self.body = dict(trigger_crash_dump=None)
@mock.patch.object(compute_api.API, 'trigger_crash_dump')
def test_trigger_crash_dump(self, mock_trigger_crash_dump):
ctxt = self.req.environ['nova.context']
self.controller._action_trigger_crash_dump(self.req, FAKE_UUID,
body=self.body)
mock_trigger_crash_dump.assert_called_with(ctxt, self.instance)
def test_trigger_crash_dump_policy_failed(self):
rule_name = "os_compute_api:servers:trigger_crash_dump"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
self.assertIn("os_compute_api:servers:trigger_crash_dump",
exc.format_message())
@mock.patch.object(compute_api.API, 'trigger_crash_dump',
fake_start_stop_not_ready)
def test_trigger_crash_dump_not_ready(self):
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
@mock.patch.object(compute_api.API, 'trigger_crash_dump',
fakes.fake_actions_to_locked_server)
def test_trigger_crash_dump_locked_server(self):
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
@mock.patch.object(compute_api.API, 'trigger_crash_dump',
fake_start_stop_invalid_state)
def test_trigger_crash_dump_invalid_state(self):
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
def test_trigger_crash_dump_with_bogus_id(self):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_trigger_crash_dump,
self.req, 'test_inst', body=self.body)
def test_trigger_crash_dump_schema_invalid_type(self):
self.body['trigger_crash_dump'] = 'not null'
self.assertRaises(exception.ValidationError,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
def test_trigger_crash_dump_schema_extra_property(self):
self.body['extra_property'] = 'extra'
self.assertRaises(exception.ValidationError,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
class ServersControllerUpdateTestV219(ServersControllerUpdateTest):
def _get_request(self, body=None):
req = super(ServersControllerUpdateTestV219, self)._get_request(
body=body)
req.api_version_request = api_version_request.APIVersionRequest('2.19')
return req
def _update_server_desc(self, set_desc, desc=None):
body = {'server': {}}
if set_desc:
body['server']['description'] = desc
req = self._get_request()
res_dict = self.controller.update(req, FAKE_UUID, body=body)
return res_dict
def test_update_server_description(self):
res_dict = self._update_server_desc(True, 'server_desc')
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['description'], 'server_desc')
def test_update_server_empty_description(self):
res_dict = self._update_server_desc(True, '')
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['description'], '')
def test_update_server_without_description(self):
res_dict = self._update_server_desc(False)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertIsNone(res_dict['server']['description'])
def test_update_server_remove_description(self):
res_dict = self._update_server_desc(True)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertIsNone(res_dict['server']['description'])
def test_update_server_all_attributes(self):
body = {'server': {
'name': 'server_test',
'description': 'server_desc'
}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
self.assertEqual(res_dict['server']['description'], 'server_desc')
def test_update_server_description_too_long(self):
body = {'server': {'description': 'x' * 256}}
req = self._get_request(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_description_invalid(self):
# Invalid non-printable control char in the desc.
body = {'server': {'description': "123\0d456"}}
req = self._get_request(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
class ServersControllerUpdateTestV271(ServersControllerUpdateTest):
body = {'server': {'name': 'server_test'}}
def _get_request(self, body=None):
req = super(ServersControllerUpdateTestV271, self)._get_request(
body=body)
req.api_version_request = api_version_request.APIVersionRequest('2.71')
return req
@mock.patch.object(InstanceGroup, 'get_by_instance_uuid',
side_effect=exception.InstanceGroupNotFound(group_uuid=FAKE_UUID))
def test_update_with_server_group_not_exist(self, mock_sg_get):
req = self._get_request(self.body)
res_dict = self.controller.update(req, FAKE_UUID, body=self.body)
self.assertEqual([], res_dict['server']['server_groups'])
class ServerStatusTest(test.TestCase):
project_id = fakes.FAKE_PROJECT_ID
path = '/%s/servers' % project_id
path_with_id = path + '/%s'
path_action = path + '/%s/action'
def setUp(self):
super(ServerStatusTest, self).setUp()
fakes.stub_out_nw_api(self)
fakes.stub_out_secgroup_api(
self, security_groups=[{'name': 'default'}])
self.controller = servers.ServersController()
def _get_with_state(self, vm_state, task_state=None):
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(vm_state=vm_state,
task_state=task_state))
request = fakes.HTTPRequestV21.blank(self.path_with_id % FAKE_UUID)
return self.controller.show(request, FAKE_UUID)
def test_active(self):
response = self._get_with_state(vm_states.ACTIVE)
self.assertEqual(response['server']['status'], 'ACTIVE')
def test_reboot(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING)
self.assertEqual(response['server']['status'], 'REBOOT')
def test_reboot_hard(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING_HARD)
self.assertEqual(response['server']['status'], 'HARD_REBOOT')
def test_reboot_resize_policy_fail(self):
rule = {'compute:reboot': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rule))
req = fakes.HTTPRequestV21.blank(self.path_action % '1234')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_reboot, req, '1234',
body={'reboot': {'type': 'HARD'}})
def test_rebuild(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBUILDING)
self.assertEqual(response['server']['status'], 'REBUILD')
def test_rebuild_error(self):
response = self._get_with_state(vm_states.ERROR)
self.assertEqual(response['server']['status'], 'ERROR')
def test_resize(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.RESIZE_PREP)
self.assertEqual(response['server']['status'], 'RESIZE')
def test_confirm_resize_policy_fail(self):
rule = {'compute:confirm_resize': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rule))
req = fakes.HTTPRequestV21.blank(self.path_action % '1234')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_confirm_resize, req, '1234', {})
def test_verify_resize(self):
response = self._get_with_state(vm_states.RESIZED, None)
self.assertEqual(response['server']['status'], 'VERIFY_RESIZE')
def test_revert_resize(self):
response = self._get_with_state(vm_states.RESIZED,
task_states.RESIZE_REVERTING)
self.assertEqual(response['server']['status'], 'REVERT_RESIZE')
def test_revert_resize_policy_fail(self):
rule = {'compute:revert_resize': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rule))
req = fakes.HTTPRequestV21.blank(self.path_action % '1234')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_revert_resize, req, '1234', {})
def test_password_update(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.UPDATING_PASSWORD)
self.assertEqual(response['server']['status'], 'PASSWORD')
def test_stopped(self):
response = self._get_with_state(vm_states.STOPPED)
self.assertEqual(response['server']['status'], 'SHUTOFF')
class ServersControllerCreateTest(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
project_id = fakes.FAKE_PROJECT_ID
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(enable_instance_password=True, group='api')
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
fakes.stub_out_nw_api(self)
self.controller = servers.ServersController()
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/%s/images/%s' % (self.project_id,
image_uuid)
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'display_description': inst['display_description'] or '',
'uuid': FAKE_UUID,
'instance_type': inst_type,
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': fakes.FAKE_PROJECT_ID,
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update_and_get_original(
context, instance_uuid, params, columns_to_join=None):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
fakes.stub_out_key_pair_funcs(self)
fake.stub_out_image_service(self)
self.stub_out('nova.db.api.project_get_networks',
lambda c, u: dict(id='1', host='localhost'))
self.stub_out('nova.db.api.instance_create', instance_create)
self.stub_out('nova.db.api.instance_system_metadata_update',
lambda *a, **kw: None)
self.stub_out('nova.db.api.instance_get', instance_get)
self.stub_out('nova.db.api.instance_update', instance_update)
self.stub_out('nova.db.api.instance_update_and_get_original',
server_update_and_get_original)
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'networks': [{
'uuid': 'ff608d40-75e9-48cb-b745-77bb55b5eaf2'
}],
},
}
self.bdm_v2 = [{
'no_device': None,
'source_type': 'volume',
'destination_type': 'volume',
'uuid': 'fake',
'device_name': 'vdb',
'delete_on_termination': False,
}]
self.bdm = [{
'no_device': None,
'virtual_name': 'root',
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda',
'delete_on_termination': False
}]
self.req = fakes.HTTPRequest.blank('/%s/servers' % self.project_id)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
server = dict(name='server_test', imageRef=FAKE_UUID, flavorRef=2)
body = {'server': server}
self.req.body = encodeutils.safe_encode(jsonutils.dumps(body))
def _check_admin_password_len(self, server_dict):
"""utility function - check server_dict for admin_password length."""
self.assertEqual(CONF.password_length,
len(server_dict["adminPass"]))
def _check_admin_password_missing(self, server_dict):
"""utility function - check server_dict for admin_password absence."""
self.assertNotIn("adminPass", server_dict)
def _test_create_instance(self, flavor=2):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
self.body['server']['imageRef'] = image_uuid
self.body['server']['flavorRef'] = flavor
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller.create(self.req, body=self.body).obj['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_none_value_port(self):
self.body['server'] = {'networks': [{'port': None, 'uuid': FAKE_UUID}]}
self.body['server']['name'] = 'test'
self._test_create_instance()
def test_create_instance_private_flavor(self):
values = {
'name': 'fake_name',
'memory': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': '1324',
'swap': 0,
'rxtx_factor': 0.5,
'is_public': False,
}
flavors.create(**values)
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_instance,
flavor=1324)
self.assertEqual('Flavor 1324 could not be found.', six.text_type(ex))
def test_create_server_bad_image_uuid(self):
self.body['server']['min_count'] = 1
self.body['server']['imageRef'] = 1,
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_server_with_deleted_image(self):
# Get the fake image service so we can set the status to deleted
(image_service, image_id) = glance.get_remote_image_service(
context, '')
image_service.update(context, self.image_uuid, {'status': 'DELETED'})
self.addCleanup(image_service.update, context, self.image_uuid,
{'status': 'active'})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dump_as_bytes(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
'Image 76fa36fc-c930-4bf3-8c8a-ea2a2420deb6 is not active.'):
self.controller.create(self.req, body=self.body)
def test_create_server_image_too_large(self):
# Get the fake image service so we can update the size of the image
(image_service, image_id) = glance.get_remote_image_service(
context, self.image_uuid)
image = image_service.show(context, image_id)
orig_size = image['size']
new_size = str(1000 * (1024 ** 3))
image_service.update(context, self.image_uuid, {'size': new_size})
self.addCleanup(image_service.update, context, self.image_uuid,
{'size': orig_size})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dump_as_bytes(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
"Flavor's disk is too small for requested image."):
self.controller.create(self.req, body=self.body)
@mock.patch.object(fake._FakeImageService, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
status='active',
properties=dict(
cinder_encryption_key_id=fakes.FAKE_UUID)))
def test_create_server_image_nonbootable(self, mock_show):
self.req.body = jsonutils.dump_as_bytes(self.body)
expected_msg = ("Image {} is unacceptable: Direct booting of an image "
"uploaded from an encrypted volume is unsupported.")
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
expected_msg.format(self.image_uuid)):
self.controller.create(self.req, body=self.body)
def test_create_instance_with_image_non_uuid(self):
self.body['server']['imageRef'] = 'not-uuid'
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_instance_with_image_as_full_url(self):
image_href = ('http://localhost/v2/%s/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' % self.project_id)
self.body['server']['imageRef'] = image_href
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_instance_with_image_as_empty_string(self):
self.body['server']['imageRef'] = ''
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
def test_create_instance_no_key_pair(self):
fakes.stub_out_key_pair_funcs(self, have_key_pair=False)
self._test_create_instance()
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PortRequiresFixedIP(
port_id=uuids.port))
def test_create_instance_with_port_with_no_fixed_ips(self, mock_create):
requested_networks = [{'port': uuids.port}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_raise_user_data_too_large(self):
self.body['server']['user_data'] = (b'1' * 65536)
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
# Make sure the failure was about user_data and not something else.
self.assertIn('user_data', six.text_type(ex))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.NetworkRequiresSubnet(
network_uuid=uuids.network))
def test_create_instance_with_network_with_no_subnet(self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.NoUniqueMatch(
"No Unique match found for ..."))
def test_create_instance_with_non_unique_secgroup_name(self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks,
'security_groups': [{'name': 'dup'}, {'name': 'dup'}]}
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
def test_create_instance_secgroup_leading_trailing_spaces(self):
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks,
'security_groups': [{'name': ' sg '}]}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_secgroup_leading_trailing_spaces_compat_mode(
self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks,
'security_groups': [{'name': ' sg '}]}
def fake_create(*args, **kwargs):
self.assertEqual([' sg '], kwargs['security_groups'])
return (objects.InstanceList(objects=[fakes.stub_instance_obj(
self.req.environ['nova.context'])]), None)
mock_create.side_effect = fake_create
self.req.set_legacy_v2()
self._test_create_extra(params)
def test_create_instance_with_networks_disabled_neutronv2(self):
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None,
None, None)]
self.assertEqual(result, kwargs['requested_networks'].as_tuples())
return old_create(*args, **kwargs)
with mock.patch('nova.compute.api.API.create', create):
self._test_create_extra(params)
def test_create_instance_with_pass_disabled(self):
# test with admin passwords disabled See lp bug 921814
self.flags(enable_instance_password=False, group='api')
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.flags(enable_instance_password=False, group='api')
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_name_too_long(self):
self.body['server']['name'] = 'X' * 256
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
def test_create_instance_name_with_spaces_in_the_middle(self):
self.body['server']['name'] = 'abc def'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
def test_create_instance_name_with_leading_trailing_spaces(self):
self.body['server']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_name_with_leading_trailing_spaces_in_compat_mode(
self):
self.body['server']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.set_legacy_v2()
self.controller.create(self.req, body=self.body)
def test_create_instance_name_all_blank_spaces(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/%s/flavors/3' % self.project_id
body = {
'server': {
'name': ' ' * 64,
'imageRef': image_uuid,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
req = fakes.HTTPRequest.blank('/%s/servers' % self.project_id)
req.method = 'POST'
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_az_with_leading_trailing_spaces(self):
self.body['server']['availability_zone'] = ' zone1 '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_az_with_leading_trailing_spaces_in_compat_mode(
self):
self.body['server']['name'] = ' abc def '
self.body['server']['availability_zones'] = ' zone1 '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.set_legacy_v2()
with mock.patch.object(availability_zones, 'get_availability_zones',
return_value=[' zone1 ']):
self.controller.create(self.req, body=self.body)
def test_create_instance(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_pass_disabled(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.flags(enable_instance_password=False, group='api')
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
@mock.patch('nova.virt.hardware.numa_get_constraints')
def _test_create_instance_numa_topology_wrong(self, exc,
numa_constraints_mock):
numa_constraints_mock.side_effect = exc(**{
'name': None,
'source': 'flavor',
'requested': 'dummy',
'available': str(objects.fields.CPUAllocationPolicy.ALL),
'cpunum': 0,
'cpumax': 0,
'cpuset': None,
'memsize': 0,
'memtotal': 0})
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_numa_topology_wrong(self):
for exc in [exception.ImageNUMATopologyIncomplete,
exception.ImageNUMATopologyForbidden,
exception.ImageNUMATopologyAsymmetric,
exception.ImageNUMATopologyCPUOutOfRange,
exception.ImageNUMATopologyCPUDuplicates,
exception.ImageNUMATopologyCPUsUnassigned,
exception.InvalidCPUAllocationPolicy,
exception.InvalidCPUThreadAllocationPolicy,
exception.ImageNUMATopologyMemoryOutOfRange]:
self._test_create_instance_numa_topology_wrong(exc)
def test_create_instance_too_much_metadata(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata']['vote'] = 'fiddletown'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_too_long(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {('a' * 260): '12345'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_too_long(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {'key1': ('a' * 260)}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_blank(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {'': 'abcd'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_not_dict(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = 'string'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_not_string(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {1: 'test'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_not_string(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {'test': ['a', 'list']}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_user_data_malformed_bad_request(self):
params = {'user_data': 'u1234'}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
def _create_instance_body_of_config_drive(self, param):
def create(*args, **kwargs):
self.assertIn('config_drive', kwargs)
return old_create(*args, **kwargs)
old_create = compute_api.API.create
self.stub_out('nova.compute.api.API.create', create)
self.body['server']['config_drive'] = param
self.req.body = jsonutils.dump_as_bytes(self.body)
def test_create_instance_with_config_drive(self):
param = True
self._create_instance_body_of_config_drive(param)
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_config_drive_as_boolean_string(self):
param = 'false'
self._create_instance_body_of_config_drive(param)
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_bad_config_drive(self):
param = 12345
self._create_instance_body_of_config_drive(param)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_without_config_drive(self):
def create(*args, **kwargs):
self.assertIsNone(kwargs['config_drive'])
return old_create(*args, **kwargs)
old_create = compute_api.API.create
self.stub_out('nova.compute.api.API.create', create)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_empty_config_drive(self):
param = ''
self._create_instance_body_of_config_drive(param)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def _test_create(self, params, no_image=False):
self. body['server'].update(params)
if no_image:
del self.body['server']['imageRef']
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body).obj['server']
def test_create_instance_with_volumes_enabled_no_image(self):
"""Test that the create will fail if there is no image
and no bdms supplied in the request
"""
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create, {}, no_image=True)
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
@mock.patch.object(compute_api.API, '_validate_bdm')
@mock.patch.object(compute_api.API, '_get_bdm_image_metadata')
def test_create_instance_with_bdms_and_no_image(
self, mock_bdm_image_metadata, mock_validate_bdm, mock_get_vols):
mock_bdm_image_metadata.return_value = {}
mock_validate_bdm.return_value = True
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertThat(
block_device.BlockDeviceDict(self.bdm_v2[0]),
matchers.DictMatches(kwargs['block_device_mapping'][0])
)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once()
mock_bdm_image_metadata.assert_called_once_with(
mock.ANY, mock.ANY, False)
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
@mock.patch.object(compute_api.API, '_validate_bdm')
@mock.patch.object(compute_api.API, '_get_bdm_image_metadata')
def test_create_instance_with_bdms_and_empty_imageRef(
self, mock_bdm_image_metadata, mock_validate_bdm, mock_get_volumes):
mock_bdm_image_metadata.return_value = {}
mock_validate_bdm.return_value = True
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertThat(
block_device.BlockDeviceDict(self.bdm_v2[0]),
matchers.DictMatches(kwargs['block_device_mapping'][0])
)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2,
'imageRef': ''}
self._test_create(params)
def test_create_instance_with_imageRef_as_full_url(self):
bdm = [{'device_name': 'foo'}]
image_href = ('http://localhost/v2/%s/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' % self.project_id)
params = {'block_device_mapping_v2': bdm,
'imageRef': image_href}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_non_uuid_imageRef(self):
bdm = [{'device_name': 'foo'}]
params = {'block_device_mapping_v2': bdm,
'imageRef': '123123abcd'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_invalid_bdm_in_2nd_dict(self):
bdm_1st = {"source_type": "image", "delete_on_termination": True,
"boot_index": 0,
"uuid": "2ff3a1d3-ed70-4c3f-94ac-941461153bc0",
"destination_type": "local"}
bdm_2nd = {"source_type": "volume",
"uuid": "99d92140-3d0c-4ea5-a49c-f94c38c607f0",
"destination_type": "invalid"}
bdm = [bdm_1st, bdm_2nd]
params = {'block_device_mapping_v2': bdm,
'imageRef': '2ff3a1d3-ed70-4c3f-94ac-941461153bc0'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_boot_index_none_ok(self):
"""Tests creating a server with two block devices. One is the boot
device and the other is a non-bootable device.
"""
# From the docs:
# To disable a device from booting, set the boot index to a negative
# value or use the default boot index value, which is None. The
# simplest usage is, set the boot index of the boot device to 0 and use
# the default boot index value, None, for any other devices.
bdms = [
# This is the bootable device that would create a 20GB cinder
# volume from the given image.
{
'source_type': 'image',
'destination_type': 'volume',
'boot_index': 0,
'uuid': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
'volume_size': 20
},
# This is the non-bootable 10GB ext4 ephemeral block device.
{
'source_type': 'blank',
'destination_type': 'local',
'boot_index': None,
# If 'guest_format' is 'swap' then a swap device is created.
'guest_format': 'ext4'
}
]
params = {'block_device_mapping_v2': bdms}
self._test_create(params, no_image=True)
def test_create_instance_with_boot_index_none_image_local_fails(self):
"""Tests creating a server with a local image-based block device which
has a boot_index of None which is invalid.
"""
bdms = [{
'source_type': 'image',
'destination_type': 'local',
'boot_index': None,
'uuid': '155d900f-4e14-4e4c-a73d-069cbf4541e6'
}]
params = {'block_device_mapping_v2': bdms}
self.assertRaises(webob.exc.HTTPBadRequest, self._test_create,
params, no_image=True)
def test_create_instance_with_invalid_boot_index(self):
bdm = [{"source_type": "image", "delete_on_termination": True,
"boot_index": 'invalid',
"uuid": "2ff3a1d3-ed70-4c3f-94ac-941461153bc0",
"destination_type": "local"}]
params = {'block_device_mapping_v2': bdm,
'imageRef': '2ff3a1d3-ed70-4c3f-94ac-941461153bc0'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_device_name_not_string(self):
self.bdm_v2[0]['device_name'] = 123
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_bdm_param_not_list(self, mock_create):
self.params = {'block_device_mapping': '/dev/vdb'}
self.assertRaises(exception.ValidationError,
self._test_create, self.params)
def test_create_instance_with_device_name_empty(self):
self.bdm_v2[0]['device_name'] = ''
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_device_name_too_long(self):
self.bdm_v2[0]['device_name'] = 'a' * 256
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_space_in_device_name(self):
self.bdm_v2[0]['device_name'] = 'v da'
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertTrue(kwargs['legacy_bdm'])
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_invalid_size(self):
self.bdm_v2[0]['volume_size'] = 'hello world'
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def _test_create_instance_with_destination_type_error(self,
destination_type):
self.bdm_v2[0]['destination_type'] = destination_type
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_destination_type_empty_string(self):
self._test_create_instance_with_destination_type_error('')
def test_create_instance_with_invalid_destination_type(self):
self._test_create_instance_with_destination_type_error('fake')
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
@mock.patch.object(compute_api.API, '_validate_bdm')
def test_create_instance_bdm(self, mock_validate_bdm, mock_get_volumes):
bdm = [{
'source_type': 'volume',
'device_name': 'fake_dev',
'uuid': 'fake_vol'
}]
bdm_expected = [{
'source_type': 'volume',
'device_name': 'fake_dev',
'volume_id': 'fake_vol'
}]
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertFalse(kwargs['legacy_bdm'])
for expected, received in zip(bdm_expected,
kwargs['block_device_mapping']):
self.assertThat(block_device.BlockDeviceDict(expected),
matchers.DictMatches(received))
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': bdm}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once()
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
@mock.patch.object(compute_api.API, '_validate_bdm')
def test_create_instance_bdm_missing_device_name(self, mock_validate_bdm,
mock_get_volumes):
del self.bdm_v2[0]['device_name']
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertFalse(kwargs['legacy_bdm'])
self.assertNotIn(None,
kwargs['block_device_mapping'][0]['device_name'])
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once()
@mock.patch.object(
block_device.BlockDeviceDict, '_validate',
side_effect=exception.InvalidBDMFormat(details='Wrong BDM'))
def test_create_instance_bdm_validation_error(self, mock_validate):
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create, params, no_image=True)
@mock.patch('nova.compute.api.API._get_bdm_image_metadata')
def test_create_instance_non_bootable_volume_fails(self, fake_bdm_meta):
params = {'block_device_mapping_v2': self.bdm_v2}
fake_bdm_meta.side_effect = exception.InvalidBDMVolumeNotBootable(id=1)
self.assertRaises(webob.exc.HTTPBadRequest, self._test_create, params,
no_image=True)
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
def test_create_instance_bdm_api_validation_fails(self, mock_get_volumes):
self.validation_fail_test_validate_called = False
self.validation_fail_instance_destroy_called = False
bdm_exceptions = ((exception.InvalidBDMSnapshot, {'id': 'fake'}),
(exception.InvalidBDMVolume, {'id': 'fake'}),
(exception.InvalidBDMImage, {'id': 'fake'}),
(exception.InvalidBDMBootSequence, {}),
(exception.InvalidBDMLocalsLimit, {}))
ex_iter = iter(bdm_exceptions)
def _validate_bdm(*args, **kwargs):
self.validation_fail_test_validate_called = True
ex, kargs = next(ex_iter)
raise ex(**kargs)
def _instance_destroy(*args, **kwargs):
self.validation_fail_instance_destroy_called = True
self.stub_out('nova.compute.api.API._validate_bdm', _validate_bdm)
self.stub_out('nova.objects.Instance.destroy', _instance_destroy)
for _unused in range(len(bdm_exceptions)):
params = {'block_device_mapping_v2':
[self.bdm_v2[0].copy()]}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create, params)
self.assertTrue(self.validation_fail_test_validate_called)
self.assertFalse(self.validation_fail_instance_destroy_called)
self.validation_fail_test_validate_called = False
self.validation_fail_instance_destroy_called = False
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
@mock.patch.object(compute_api.API, '_validate_bdm')
def _test_create_bdm(self, params, mock_validate_bdm, mock_get_volumes,
no_image=False):
self.body['server'].update(params)
if no_image:
del self.body['server']['imageRef']
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body).obj['server']
mock_validate_bdm.assert_called_once_with(
test.MatchType(fakes.FakeRequestContext),
test.MatchType(objects.Instance),
test.MatchType(objects.Flavor),
test.MatchType(objects.BlockDeviceMappingList),
{},
mock_get_volumes.return_value,
False)
def test_create_instance_with_volumes_enabled(self):
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params)
@mock.patch.object(compute_api.API, '_get_bdm_image_metadata')
def test_create_instance_with_volumes_enabled_and_bdms_no_image(
self, mock_get_bdm_image_metadata):
"""Test that the create works if there is no image supplied but
os-volumes extension is enabled and bdms are supplied
"""
volume = {
'id': uuids.volume_id,
'status': 'active',
'volume_image_metadata':
{'test_key': 'test_value'}
}
mock_get_bdm_image_metadata.return_value = volume
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params, no_image=True)
mock_get_bdm_image_metadata.assert_called_once_with(
mock.ANY, self.bdm, True)
@mock.patch.object(compute_api.API, '_get_bdm_image_metadata')
def test_create_instance_with_imageRef_as_empty_string(
self, mock_bdm_image_metadata):
volume = {
'id': uuids.volume_id,
'status': 'active',
'volume_image_metadata':
{'test_key': 'test_value'}
}
mock_bdm_image_metadata.return_value = volume
params = {'block_device_mapping': self.bdm,
'imageRef': ''}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params)
def test_create_instance_with_imageRef_as_full_url_legacy_bdm(self):
bdm = [{
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda'
}]
image_href = ('http://localhost/v2/%s/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' % self.project_id)
params = {'block_device_mapping': bdm,
'imageRef': image_href}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_non_uuid_imageRef_legacy_bdm(self):
bdm = [{
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda'
}]
params = {'block_device_mapping': bdm,
'imageRef': 'bad-format'}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
@mock.patch('nova.compute.api.API._get_bdm_image_metadata')
def test_create_instance_non_bootable_volume_fails_legacy_bdm(
self, fake_bdm_meta):
bdm = [{
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda'
}]
params = {'block_device_mapping': bdm}
fake_bdm_meta.side_effect = exception.InvalidBDMVolumeNotBootable(id=1)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_bdm, params, no_image=True)
def test_create_instance_with_device_name_not_string_legacy_bdm(self):
self.bdm[0]['device_name'] = 123
old_create = compute_api.API.create
self.params = {'block_device_mapping': self.bdm}
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, self.params)
def test_create_instance_with_snapshot_volume_id_none(self):
old_create = compute_api.API.create
bdm = [{
'no_device': None,
'snapshot_id': None,
'volume_id': None,
'device_name': 'vda',
'delete_on_termination': False
}]
self.params = {'block_device_mapping': bdm}
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, self.params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_legacy_bdm_param_not_list(self, mock_create):
self.params = {'block_device_mapping': '/dev/vdb'}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, self.params)
def test_create_instance_with_device_name_empty_legacy_bdm(self):
self.bdm[0]['device_name'] = ''
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_device_name_too_long_legacy_bdm(self):
self.bdm[0]['device_name'] = 'a' * 256,
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_space_in_device_name_legacy_bdm(self):
self.bdm[0]['device_name'] = 'vd a',
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertTrue(kwargs['legacy_bdm'])
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def _test_create_bdm_instance_with_size_error(self, size):
bdm = [{'delete_on_termination': True,
'device_name': 'vda',
'volume_size': size,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_invalid_size_legacy_bdm(self):
self._test_create_bdm_instance_with_size_error("hello world")
def test_create_instance_with_size_empty_string(self):
self._test_create_bdm_instance_with_size_error('')
def test_create_instance_with_size_zero(self):
self._test_create_bdm_instance_with_size_error("0")
def test_create_instance_with_size_greater_than_limit(self):
self._test_create_bdm_instance_with_size_error(db.MAX_INT + 1)
def test_create_instance_with_bdm_delete_on_termination(self):
bdm = [{'device_name': 'foo1', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'True'},
{'device_name': 'foo2', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True},
{'device_name': 'foo3', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'False'},
{'device_name': 'foo4', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False},
{'device_name': 'foo5', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False}]
expected_bdm = [
{'device_name': 'foo1', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True},
{'device_name': 'foo2', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True},
{'device_name': 'foo3', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False},
{'device_name': 'foo4', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False},
{'device_name': 'foo5', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(expected_bdm, kwargs['block_device_mapping'])
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params)
def test_create_instance_with_bdm_delete_on_termination_invalid_2nd(self):
bdm = [{'device_name': 'foo1', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'True'},
{'device_name': 'foo2', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'invalid'}]
params = {'block_device_mapping': bdm}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_decide_format_legacy(self):
bdm = [{'device_name': 'foo1',
'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True}]
expected_legacy_flag = True
old_create = compute_api.API.create
def create(*args, **kwargs):
legacy_bdm = kwargs.get('legacy_bdm', True)
self.assertEqual(legacy_bdm, expected_legacy_flag)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm({})
params = {'block_device_mapping': bdm}
self._test_create_bdm(params)
def test_create_instance_both_bdm_formats(self):
bdm = [{'device_name': 'foo'}]
bdm_v2 = [{'source_type': 'volume',
'uuid': 'fake_vol'}]
params = {'block_device_mapping': bdm,
'block_device_mapping_v2': bdm_v2}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_bdm, params)
def test_create_instance_invalid_key_name(self):
self.body['server']['key_name'] = 'nonexistentkey'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_valid_key_name(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.body['server']['key_name'] = 'key'
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_password_len(res["server"])
def test_create_server_keypair_name_with_leading_trailing(self):
self.body['server']['key_name'] = ' abc '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create')
def test_create_server_keypair_name_with_leading_trailing_compat_mode(
self, mock_create):
params = {'key_name': ' abc '}
def fake_create(*args, **kwargs):
self.assertEqual(' abc ', kwargs['key_name'])
return (objects.InstanceList(objects=[fakes.stub_instance_obj(
self.req.environ['nova.context'])]), None)
mock_create.side_effect = fake_create
self.req.set_legacy_v2()
self._test_create_extra(params)
def test_create_instance_invalid_flavor_href(self):
flavor_ref = 'http://localhost/v2/flavors/asdf'
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_invalid_flavor_id_int(self):
flavor_ref = -1
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
@mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor())
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_non_existing_snapshot_id(
self, mock_create,
mock_get_flavor_by_flavor_id):
mock_create.side_effect = exception.SnapshotNotFound(snapshot_id='123')
self.body['server'] = {'name': 'server_test',
'flavorRef': self.flavor_ref,
'block_device_mapping_v2':
[{'source_type': 'snapshot',
'uuid': '123'}]}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_invalid_flavor_id_empty(self):
flavor_ref = ""
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_bad_flavor_href(self):
flavor_ref = 'http://localhost/v2/flavors/17'
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_local_href(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_admin_password(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(server['adminPass'],
self.body['server']['adminPass'])
def test_create_instance_admin_password_pass_disabled(self):
self.flags(enable_instance_password=False, group='api')
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertIn('server', res)
self.assertIn('adminPass', self.body['server'])
def test_create_instance_admin_password_empty(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = ''
self.req.body = jsonutils.dump_as_bytes(self.body)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body)
def test_create_location(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
selfhref = 'http://localhost/v2/%s/servers/%s' % (self.project_id,
FAKE_UUID)
self.req.body = jsonutils.dump_as_bytes(self.body)
robj = self.controller.create(self.req, body=self.body)
self.assertEqual(encodeutils.safe_decode(robj['Location']), selfhref)
@mock.patch('nova.objects.Quotas.get_all_by_project')
@mock.patch('nova.objects.Quotas.get_all_by_project_and_user')
@mock.patch('nova.objects.Quotas.count_as_dict')
def _do_test_create_instance_above_quota(self, resource, allowed,
quota, expected_msg, mock_count, mock_get_all_pu,
mock_get_all_p):
count = {'project': {}, 'user': {}}
for res in ('instances', 'ram', 'cores'):
if res == resource:
value = quota - allowed
count['project'][res] = count['user'][res] = value
else:
count['project'][res] = count['user'][res] = 0
mock_count.return_value = count
mock_get_all_p.return_value = {'project_id': fakes.FAKE_PROJECT_ID}
mock_get_all_pu.return_value = {'project_id': fakes.FAKE_PROJECT_ID,
'user_id': 'fake_user'}
if resource in db_api.PER_PROJECT_QUOTAS:
mock_get_all_p.return_value[resource] = quota
else:
mock_get_all_pu.return_value[resource] = quota
fakes.stub_out_instance_quota(self, allowed, quota, resource)
self.body['server']['flavorRef'] = 3
self.req.body = jsonutils.dump_as_bytes(self.body)
try:
self.controller.create(self.req, body=self.body).obj['server']
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_above_quota_instances(self):
msg = ('Quota exceeded for instances: Requested 1, but'
' already used 10 of 10 instances')
self._do_test_create_instance_above_quota('instances', 0, 10, msg)
def test_create_instance_above_quota_ram(self):
msg = ('Quota exceeded for ram: Requested 4096, but'
' already used 8192 of 10240 ram')
self._do_test_create_instance_above_quota('ram', 2048, 10 * 1024, msg)
def test_create_instance_above_quota_cores(self):
msg = ('Quota exceeded for cores: Requested 2, but'
' already used 9 of 10 cores')
self._do_test_create_instance_above_quota('cores', 1, 10, msg)
@mock.patch.object(fakes.QUOTAS, 'limit_check')
def test_create_instance_above_quota_server_group_members(
self, mock_limit_check):
ctxt = self.req.environ['nova.context']
fake_group = objects.InstanceGroup(ctxt)
fake_group.project_id = ctxt.project_id
fake_group.user_id = ctxt.user_id
fake_group.create()
real_count = fakes.QUOTAS.count_as_dict
def fake_count(context, name, group, user_id):
if name == 'server_group_members':
self.assertEqual(group.uuid, fake_group.uuid)
self.assertEqual(user_id,
self.req.environ['nova.context'].user_id)
return {'user': {'server_group_members': 10}}
else:
return real_count(context, name, group, user_id)
def fake_limit_check(context, **kwargs):
if 'server_group_members' in kwargs:
raise exception.OverQuota(overs={})
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
mock_limit_check.side_effect = fake_limit_check
self.stub_out('nova.db.api.instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {'group': fake_group.uuid}
self.req.body = jsonutils.dump_as_bytes(self.body)
expected_msg = "Quota exceeded, too many servers in group"
try:
with mock.patch.object(fakes.QUOTAS, 'count_as_dict',
side_effect=fake_count):
self.controller.create(self.req, body=self.body).obj
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_with_group_hint(self):
ctxt = self.req.environ['nova.context']
test_group = objects.InstanceGroup(ctxt)
test_group.project_id = ctxt.project_id
test_group.user_id = ctxt.user_id
test_group.create()
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stub_out('nova.db.api.instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {'group': test_group.uuid}
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller.create(self.req, body=self.body).obj['server']
test_group = objects.InstanceGroup.get_by_uuid(ctxt, test_group.uuid)
self.assertIn(server['id'], test_group.members)
def _test_create_instance_with_group_hint(self, hint,
hint_name='os:scheduler_hints'):
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
def fake_create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], hint)
return ([fakes.stub_instance(1)], '')
self.stub_out('nova.compute.api.API.create', fake_create)
self.stub_out('nova.db.instance_destroy', fake_instance_destroy)
self.body[hint_name] = hint
self.req.body = jsonutils.dump_as_bytes(self.body)
return self.controller.create(self.req, body=self.body).obj['server']
def test_create_instance_with_group_hint_legacy(self):
self._test_create_instance_with_group_hint(
{'different_host': '9c47bf55-e9d8-42da-94ab-7f9e80cd1857'},
hint_name='OS-SCH-HNT:scheduler_hints')
def test_create_server_with_different_host_hint(self):
self._test_create_instance_with_group_hint(
{'different_host': '9c47bf55-e9d8-42da-94ab-7f9e80cd1857'})
self._test_create_instance_with_group_hint(
{'different_host': ['9c47bf55-e9d8-42da-94ab-7f9e80cd1857',
'82412fa6-0365-43a9-95e4-d8b20e00c0de']})
def test_create_instance_with_group_hint_group_not_found(self):
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stub_out('nova.db.api.instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {
'group': '5b674f73-c8cf-40ef-9965-3b6fe4b304b1'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_with_group_hint_wrong_uuid_format(self):
self.body['os:scheduler_hints'] = {
'group': 'non-uuid'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_server_bad_hints_non_dict(self):
sch_hints = ['os:scheduler_hints', 'OS-SCH-HNT:scheduler_hints']
for hint in sch_hints:
self.body[hint] = 'non-dict'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_server_bad_hints_long_group(self):
self.body['os:scheduler_hints'] = {
'group': 'a' * 256}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_server_with_bad_different_host_hint(self):
self.body['os:scheduler_hints'] = {
'different_host': 'non-server-id'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
self.body['os:scheduler_hints'] = {
'different_host': ['non-server-id01', 'non-server-id02']}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PortInUse(port_id=uuids.port))
def test_create_instance_with_port_in_use(self, mock_create):
requested_networks = [{'uuid': uuids.network, 'port': uuids.port}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_public_network_non_admin(self, mock_create):
public_network_uuid = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
params = {'networks': [{'uuid': public_network_uuid}]}
self.req.body = jsonutils.dump_as_bytes(self.body)
mock_create.side_effect = exception.ExternalNetworkAttachForbidden(
network_uuid=public_network_uuid)
self.assertRaises(webob.exc.HTTPForbidden,
self._test_create_extra, params)
def test_multiple_create_with_string_type_min_and_max(self):
min_count = '2'
max_count = '3'
params = {
'min_count': min_count,
'max_count': max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsInstance(kwargs['min_count'], int)
self.assertIsInstance(kwargs['max_count'], int)
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['max_count'], 3)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_extra(params)
def test_create_instance_with_multiple_create_enabled(self):
min_count = 2
max_count = 3
params = {
'min_count': min_count,
'max_count': max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['max_count'], 3)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_extra(params)
def test_create_instance_invalid_negative_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_negative_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_with_blank_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': '',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_with_blank_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': '',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_min_greater_than_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 4,
'max_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_alpha_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_alpha_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_multiple_instances(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
def create_db_entry_for_new_instance(*args, **kwargs):
instance = args[4]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API.create_db_entry_for_new_instance',
create_db_entry_for_new_instance)
res = self.controller.create(self.req, body=body).obj
instance_uuids = self.instance_cache_by_uuid.keys()
self.assertIn(res["server"]["id"], instance_uuids)
self._check_admin_password_len(res["server"])
def test_create_multiple_instances_pass_disabled(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.flags(enable_instance_password=False, group='api')
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
def create_db_entry_for_new_instance(*args, **kwargs):
instance = args[4]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API.create_db_entry_for_new_instance',
create_db_entry_for_new_instance)
res = self.controller.create(self.req, body=body).obj
instance_uuids = self.instance_cache_by_uuid.keys()
self.assertIn(res["server"]["id"], instance_uuids)
self._check_admin_password_missing(res["server"])
def _create_multiple_instances_resv_id_return(self, resv_id_return):
"""Test creating multiple instances with asking for
reservation_id
"""
def create_db_entry_for_new_instance(*args, **kwargs):
instance = args[4]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API.create_db_entry_for_new_instance',
create_db_entry_for_new_instance)
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'return_reservation_id': resv_id_return
}
}
res = self.controller.create(self.req, body=body)
reservation_id = res.obj['reservation_id']
self.assertNotEqual(reservation_id, "")
self.assertIsNotNone(reservation_id)
self.assertGreater(len(reservation_id), 1)
def test_create_multiple_instances_with_resv_id_return(self):
self._create_multiple_instances_resv_id_return(True)
def test_create_multiple_instances_with_string_resv_id_return(self):
self._create_multiple_instances_resv_id_return("True")
def test_create_multiple_instances_with_multiple_volume_bdm(self):
"""Test that a BadRequest is raised if multiple instances
are requested with a list of block device mappings for volumes.
"""
min_count = 2
bdm = [{'source_type': 'volume', 'uuid': 'vol-xxxx'},
{'source_type': 'volume', 'uuid': 'vol-yyyy'}
]
params = {
'block_device_mapping_v2': bdm,
'min_count': min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(len(kwargs['block_device_mapping']), 2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
self.assertEqual("Cannot attach one or more volumes to multiple "
"instances", exc.explanation)
def test_create_multiple_instances_with_single_volume_bdm(self):
"""Test that a BadRequest is raised if multiple instances
are requested to boot from a single volume.
"""
min_count = 2
bdm = [{'source_type': 'volume', 'uuid': 'vol-xxxx'}]
params = {
'block_device_mapping_v2': bdm,
'min_count': min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['block_device_mapping'][0]['volume_id'],
'vol-xxxx')
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
self.assertEqual("Cannot attach one or more volumes to multiple "
"instances", exc.explanation)
def test_create_multiple_instance_with_non_integer_max_count(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': 2.5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=body)
def test_create_multiple_instance_with_non_integer_min_count(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2.5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=body)
def test_create_multiple_instance_max_count_overquota_min_count_ok(self):
self.flags(instances=3, group='quota')
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'max_count': 5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
def create_db_entry_for_new_instance(*args, **kwargs):
instance = args[4]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API.create_db_entry_for_new_instance',
create_db_entry_for_new_instance)
res = self.controller.create(self.req, body=body).obj
instance_uuids = self.instance_cache_by_uuid.keys()
self.assertIn(res["server"]["id"], instance_uuids)
def test_create_multiple_instance_max_count_overquota_min_count_over(self):
self.flags(instances=3, group='quota')
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 4,
'max_count': 5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(webob.exc.HTTPForbidden, self.controller.create,
self.req, body=body)
@mock.patch.object(compute_api.API, 'create')
def test_create_multiple_instance_with_specified_ip_neutronv2(self,
_api_mock):
_api_mock.side_effect = exception.InvalidFixedIpAndMaxCountRequest(
reason="")
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
address = '10.0.0.1'
requested_networks = [{'uuid': network, 'fixed_ip': address,
'port': port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.MultiplePortsNotApplicable(
reason="Unable to launch multiple instances with "
"a single configured port ID. Please "
"launch your instance one by one with "
"different ports."))
def test_create_multiple_instance_with_port(self, mock_create):
requested_networks = [{'uuid': uuids.network, 'port': uuids.port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.NetworkNotFound(
network_id=uuids.network))
def test_create_instance_with_not_found_network(self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PortNotFound(port_id=uuids.port))
def test_create_instance_with_port_not_found(self, mock_create):
requested_networks = [{'uuid': uuids.network, 'port': uuids.port}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_network_ambiguous(self, mock_create):
mock_create.side_effect = exception.NetworkAmbiguous()
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.UnableToAutoAllocateNetwork(
project_id=FAKE_UUID))
def test_create_instance_with_unable_to_auto_allocate_network(self,
mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.ImageNotAuthorized(
image_id=FAKE_UUID))
def test_create_instance_with_image_not_authorized(self,
mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InstanceExists(
name='instance-name'))
def test_create_instance_raise_instance_exists(self, mock_create):
self.assertRaises(webob.exc.HTTPConflict,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDMEphemeralSize)
def test_create_instance_raise_invalid_bdm_ephsize(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidNUMANodesNumber(
nodes='-1'))
def test_create_instance_raise_invalid_numa_nodes(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDMFormat(details=''))
def test_create_instance_raise_invalid_bdm_format(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDMSwapSize)
def test_create_instance_raise_invalid_bdm_swapsize(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDM)
def test_create_instance_raise_invalid_bdm(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.ImageBadRequest(
image_id='dummy', response='dummy'))
def test_create_instance_raise_image_bad_request(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
def test_create_instance_invalid_availability_zone(self):
self.body['server']['availability_zone'] = 'invalid::::zone'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
def test_create_instance_invalid_availability_zone_as_int(self):
self.body['server']['availability_zone'] = 123
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.FixedIpNotFoundForAddress(
address='dummy'))
def test_create_instance_raise_fixed_ip_not_found_bad_request(self,
mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.CPUThreadPolicyConfigurationInvalid())
def test_create_instance_raise_cpu_thread_policy_configuration_invalid(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.get_mem_encryption_constraint',
side_effect=exception.FlavorImageConflict(
message="fake conflict reason"))
def test_create_instance_raise_flavor_image_conflict(
self, mock_conflict):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.get_mem_encryption_constraint',
side_effect=exception.InvalidMachineType(
message="fake conflict reason"))
def test_create_instance_raise_invalid_machine_type(
self, mock_conflict):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.ImageCPUPinningForbidden())
def test_create_instance_raise_image_cpu_pinning_forbidden(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.ImageCPUThreadPolicyForbidden())
def test_create_instance_raise_image_cpu_thread_policy_forbidden(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.MemoryPageSizeInvalid(pagesize='-1'))
def test_create_instance_raise_memory_page_size_invalid(self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.MemoryPageSizeForbidden(pagesize='1',
against='2'))
def test_create_instance_raise_memory_page_size_forbidden(self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.RealtimeConfigurationInvalid())
def test_create_instance_raise_realtime_configuration_invalid(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.RealtimeMaskNotFoundOrInvalid())
def test_create_instance_raise_realtime_mask_not_found_or_invalid(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_invalid_personality(self, mock_create):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
codec = 'utf8'
content = encodeutils.safe_encode(
'b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==')
start_position = 19
end_position = 20
msg = 'invalid start byte'
mock_create.side_effect = UnicodeDecodeError(codec, content,
start_position,
end_position, msg)
self.body['server']['personality'] = [
{
"path": "/etc/banner.txt",
"contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==",
},
]
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_without_personality_should_get_empty_list(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual([], kwargs['injected_files'])
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_instance()
def test_create_instance_with_extra_personality_arg(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
self.body['server']['personality'] = [
{
"path": "/etc/banner.txt",
"contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==",
"extra_arg": "extra value"
},
]
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PciRequestAliasNotDefined(
alias='fake_name'))
def test_create_instance_pci_alias_not_defined(self, mock_create):
# Tests that PciRequestAliasNotDefined is translated to a 400 error.
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
self.assertIn('PCI alias fake_name is not defined', six.text_type(ex))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PciInvalidAlias(
reason='just because'))
def test_create_instance_pci_invalid_alias(self, mock_create):
# Tests that PciInvalidAlias is translated to a 400 error.
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
self.assertIn('Invalid PCI alias definition', six.text_type(ex))
def test_create_instance_with_user_data(self):
value = base64.encode_as_text("A random string")
params = {'user_data': value}
self._test_create_extra(params)
def test_create_instance_with_bad_user_data(self):
value = "A random string"
params = {'user_data': value}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
@mock.patch('nova.compute.api.API.create')
def test_create_instance_with_none_allowd_for_v20_compat_mode(self,
mock_create):
def create(context, *args, **kwargs):
self.assertIsNone(kwargs['user_data'])
return ([fakes.stub_instance_obj(context)], None)
mock_create.side_effect = create
self.req.set_legacy_v2()
params = {'user_data': None}
self._test_create_extra(params)
class ServersControllerCreateTestV219(ServersControllerCreateTest):
def _create_instance_req(self, set_desc, desc=None):
if set_desc:
self.body['server']['description'] = desc
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.19')
def test_create_instance_with_description(self):
self._create_instance_req(True, 'server_desc')
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_none_description(self):
self._create_instance_req(True)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_empty_description(self):
self._create_instance_req(True, '')
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_without_description(self):
self._create_instance_req(False)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_description_too_long(self):
self._create_instance_req(True, 'X' * 256)
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
def test_create_instance_description_invalid(self):
self._create_instance_req(True, "abc\0ddef")
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
class ServersControllerCreateTestV232(test.NoDBTestCase):
def setUp(self):
super(ServersControllerCreateTestV232, self).setUp()
self.controller = servers.ServersController()
self.body = {
'server': {
'name': 'device-tagging-server',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
'networks': [{
'uuid': 'ff608d40-75e9-48cb-b745-77bb55b5eaf2'
}],
'block_device_mapping_v2': [{
'uuid': '70a599e0-31e7-49b7-b260-868f441e862b',
'source_type': 'image',
'destination_type': 'volume',
'boot_index': 0,
'volume_size': '1'
}]
}
}
self.req = fakes.HTTPRequestV21.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID, version='2.32')
self.req.method = 'POST'
self.req.headers['content-type'] = 'application/json'
def _create_server(self):
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
def test_create_server_no_tags(self):
with test.nested(
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f60012d9-5ba4-4547-ab48-f94ff7e62d4e'}],
1)),
):
self._create_server()
def test_create_server_tagged_nic(self):
with test.nested(
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f60012d9-5ba4-4547-ab48-f94ff7e62d4e'}],
1)),
):
self.body['server']['networks'][0]['tag'] = 'foo'
self._create_server()
def test_create_server_tagged_bdm(self):
with test.nested(
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f60012d9-5ba4-4547-ab48-f94ff7e62d4e'}],
1)),
):
self.body['server']['block_device_mapping_v2'][0]['tag'] = 'foo'
self._create_server()
class ServersControllerCreateTestV237(test.NoDBTestCase):
"""Tests server create scenarios with the v2.37 microversion.
These tests are mostly about testing the validation on the 2.37
server create request with emphasis on negative scenarios.
"""
def setUp(self):
super(ServersControllerCreateTestV237, self).setUp()
# Create the server controller.
self.controller = servers.ServersController()
# Define a basic server create request body which tests can customize.
self.body = {
'server': {
'name': 'auto-allocate-test',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
},
}
# Create a fake request using the 2.37 microversion.
self.req = fakes.HTTPRequestV21.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID, version='2.37')
self.req.method = 'POST'
self.req.headers['content-type'] = 'application/json'
def _create_server(self, networks):
self.body['server']['networks'] = networks
self.req.body = jsonutils.dump_as_bytes(self.body)
return self.controller.create(self.req, body=self.body).obj['server']
def test_create_server_auth_pre_2_37_fails(self):
"""Negative test to make sure you can't pass 'auto' before 2.37"""
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.36')
self.assertRaises(exception.ValidationError, self._create_server,
'auto')
def test_create_server_no_requested_networks_fails(self):
"""Negative test for a server create request with no networks requested
which should fail with the v2.37 schema validation.
"""
self.assertRaises(exception.ValidationError, self._create_server, None)
def test_create_server_network_id_not_uuid_fails(self):
"""Negative test for a server create request where the requested
network id is not one of the auto/none enums.
"""
self.assertRaises(exception.ValidationError, self._create_server,
'not-auto-or-none')
def test_create_server_network_id_empty_string_fails(self):
"""Negative test for a server create request where the requested
network id is the empty string.
"""
self.assertRaises(exception.ValidationError, self._create_server, '')
@mock.patch.object(context.RequestContext, 'can')
def test_create_server_networks_none_skip_policy(self, context_can):
"""Test to ensure skip checking policy rule create:attach_network,
when networks is 'none' which means no network will be allocated.
"""
with test.nested(
mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=14),
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f9bccadf-5ab1-4a56-9156-c00c178fe5f5'}],
1)),
):
network_policy = server_policies.SERVERS % 'create:attach_network'
self._create_server('none')
call_list = [c for c in context_can.call_args_list
if c[0][0] == network_policy]
self.assertEqual(0, len(call_list))
@mock.patch.object(objects.Flavor, 'get_by_flavor_id',
side_effect=exception.FlavorNotFound(flavor_id='2'))
def test_create_server_auto_flavornotfound(self, get_flavor):
"""Tests that requesting auto networking is OK. This test
short-circuits on a FlavorNotFound error.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(
webob.exc.HTTPBadRequest, self._create_server, 'auto')
# make sure it was a flavor not found error and not something else
self.assertIn('Flavor 2 could not be found', six.text_type(ex))
@mock.patch.object(objects.Flavor, 'get_by_flavor_id',
side_effect=exception.FlavorNotFound(flavor_id='2'))
def test_create_server_none_flavornotfound(self, get_flavor):
"""Tests that requesting none for networking is OK. This test
short-circuits on a FlavorNotFound error.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(
webob.exc.HTTPBadRequest, self._create_server, 'none')
# make sure it was a flavor not found error and not something else
self.assertIn('Flavor 2 could not be found', six.text_type(ex))
@mock.patch.object(objects.Flavor, 'get_by_flavor_id',
side_effect=exception.FlavorNotFound(flavor_id='2'))
def test_create_server_multiple_specific_nics_flavornotfound(self,
get_flavor):
"""Tests that requesting multiple specific network IDs is OK. This test
short-circuits on a FlavorNotFound error.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(
webob.exc.HTTPBadRequest, self._create_server,
[{'uuid': 'e3b686a8-b91d-4a61-a3fc-1b74bb619ddb'},
{'uuid': 'e0f00941-f85f-46ec-9315-96ded58c2f14'}])
# make sure it was a flavor not found error and not something else
self.assertIn('Flavor 2 could not be found', six.text_type(ex))
def test_create_server_legacy_neutron_network_id_fails(self):
"""Tests that we no longer support the legacy br-<uuid> format for
a network id.
"""
uuid = 'br-00000000-0000-0000-0000-000000000000'
self.assertRaises(exception.ValidationError, self._create_server,
[{'uuid': uuid}])
@ddt.ddt
class ServersControllerCreateTestV252(test.NoDBTestCase):
def setUp(self):
super(ServersControllerCreateTestV252, self).setUp()
self.controller = servers.ServersController()
self.body = {
'server': {
'name': 'device-tagging-server',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
'networks': [{
'uuid': 'ff608d40-75e9-48cb-b745-77bb55b5eaf2'
}]
}
}
self.req = fakes.HTTPRequestV21.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID, version='2.52')
self.req.method = 'POST'
self.req.headers['content-type'] = 'application/json'
def _create_server(self, tags):
self.body['server']['tags'] = tags
self.req.body = jsonutils.dump_as_bytes(self.body)
return self.controller.create(self.req, body=self.body).obj['server']
def test_create_server_with_tags_pre_2_52_fails(self):
"""Negative test to make sure you can't pass 'tags' before 2.52"""
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.51')
self.assertRaises(
exception.ValidationError, self._create_server, ['tag1'])
@ddt.data([','],
['/'],
['a' * (tag.MAX_TAG_LENGTH + 1)],
['a'] * (instance_obj.MAX_TAG_COUNT + 1),
[''],
[1, 2, 3],
{'tag': 'tag'})
def test_create_server_with_tags_incorrect_tags(self, tags):
"""Negative test to incorrect tags are not allowed"""
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.52')
self.assertRaises(
exception.ValidationError, self._create_server, tags)
class ServersControllerCreateTestV257(test.NoDBTestCase):
"""Tests that trying to create a server with personality files using
microversion 2.57 fails.
"""
def test_create_server_with_personality_fails(self):
controller = servers.ServersController()
body = {
'server': {
'name': 'no-personality-files',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
'networks': 'auto',
'personality': [{
'path': '/path/to/file',
'contents': 'ZWNobyAiaGVsbG8gd29ybGQi'
}]
}
}
req = fakes.HTTPRequestV21.blank('/servers', version='2.57')
req.body = jsonutils.dump_as_bytes(body)
req.method = 'POST'
req.headers['content-type'] = 'application/json'
ex = self.assertRaises(
exception.ValidationError, controller.create, req, body=body)
self.assertIn('personality', six.text_type(ex))
@mock.patch('nova.compute.utils.check_num_instances_quota',
new=lambda *args, **kwargs: 1)
class ServersControllerCreateTestV260(test.NoDBTestCase):
"""Negative tests for creating a server with a multiattach volume."""
def setUp(self):
super(ServersControllerCreateTestV260, self).setUp()
self.useFixture(nova_fixtures.NoopQuotaDriverFixture())
self.controller = servers.ServersController()
get_flavor_mock = mock.patch(
'nova.compute.flavors.get_flavor_by_flavor_id',
return_value=fake_flavor.fake_flavor_obj(
context.get_admin_context(), flavorid='1',
expected_attrs=['extra_specs']))
get_flavor_mock.start()
self.addCleanup(get_flavor_mock.stop)
reqspec_create_mock = mock.patch(
'nova.objects.RequestSpec.create')
reqspec_create_mock.start()
self.addCleanup(reqspec_create_mock.stop)
volume_get_mock = mock.patch(
'nova.volume.cinder.API.get',
return_value={'id': uuids.fake_volume_id, 'multiattach': True})
volume_get_mock.start()
self.addCleanup(volume_get_mock.stop)
def _post_server(self, version=None):
body = {
'server': {
'name': 'multiattach',
'flavorRef': '1',
'networks': 'none',
'block_device_mapping_v2': [{
'uuid': uuids.fake_volume_id,
'source_type': 'volume',
'destination_type': 'volume',
'boot_index': 0,
'delete_on_termination': True}]
}
}
req = fakes.HTTPRequestV21.blank(
'/servers', version=version or '2.60')
req.body = jsonutils.dump_as_bytes(body)
req.method = 'POST'
req.headers['content-type'] = 'application/json'
return self.controller.create(req, body=body)
def test_create_server_with_multiattach_fails_old_microversion(self):
"""Tests the case that the user tries to boot from volume with a
multiattach volume but before using microversion 2.60.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._post_server, '2.59')
self.assertIn('Multiattach volumes are only supported starting with '
'compute API version 2.60', six.text_type(ex))
class ServersControllerCreateTestV263(ServersControllerCreateTest):
def _create_instance_req(self, certs=None):
self.body['server']['trusted_image_certificates'] = certs
self.flags(verify_glance_signatures=True, group='glance')
self.flags(enable_certificate_validation=True, group='glance')
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.63')
def test_create_instance_with_trusted_certs(self):
"""Test create with valid trusted_image_certificates argument"""
self._create_instance_req(
['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8',
'674736e3-f25c-405c-8362-bbf991e0ce0a'])
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_without_trusted_certs(self):
"""Test create without trusted image certificates"""
self._create_instance_req()
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_empty_trusted_cert_id(self):
"""Make sure we can't create with an empty certificate ID"""
self._create_instance_req([''])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is too short', six.text_type(ex))
def test_create_instance_with_empty_trusted_certs(self):
"""Make sure we can't create with an empty array of IDs"""
self.body['server']['trusted_image_certificates'] = []
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.63')
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is too short', six.text_type(ex))
def test_create_instance_with_too_many_trusted_certs(self):
"""Make sure we can't create with an array of >50 unique IDs"""
self._create_instance_req(['cert{}'.format(i) for i in range(51)])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is too long', six.text_type(ex))
def test_create_instance_with_nonunique_trusted_certs(self):
"""Make sure we can't create with a non-unique array of IDs"""
self._create_instance_req(['cert', 'cert'])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('has non-unique elements', six.text_type(ex))
def test_create_instance_with_invalid_trusted_cert_id(self):
"""Make sure we can't create with non-string certificate IDs"""
self._create_instance_req([1, 2])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is not of type', six.text_type(ex))
def test_create_instance_with_invalid_trusted_certs(self):
"""Make sure we can't create with certificates in a non-array"""
self._create_instance_req("not-an-array")
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is not of type', six.text_type(ex))
def test_create_server_with_trusted_certs_pre_2_63_fails(self):
"""Make sure we can't use trusted_certs before 2.63"""
self._create_instance_req(['trusted-cert-id'])
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.62')
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('Additional properties are not allowed',
six.text_type(ex))
def test_create_server_with_trusted_certs_policy_failed(self):
rule_name = "os_compute_api:servers:create:trusted_certs"
rules = {"os_compute_api:servers:create": "@",
"os_compute_api:servers:create:forced_host": "@",
"os_compute_api:servers:create:attach_volume": "@",
"os_compute_api:servers:create:attach_network": "@",
rule_name: "project:fake"}
self._create_instance_req(['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8'])
self.policy.set_rules(rules)
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller.create, self.req,
body=self.body)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
@mock.patch.object(compute_api.API, 'create')
def test_create_server_with_cert_validation_error(
self, mock_create):
mock_create.side_effect = exception.CertificateValidationFailed(
cert_uuid="cert id", reason="test cert validation error")
self._create_instance_req(['trusted-cert-id'])
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req,
body=self.body)
self.assertIn('test cert validation error',
six.text_type(ex))
class ServersControllerCreateTestV267(ServersControllerCreateTest):
def setUp(self):
super(ServersControllerCreateTestV267, self).setUp()
self.block_device_mapping_v2 = [
{'uuid': '70a599e0-31e7-49b7-b260-868f441e862b',
'source_type': 'image',
'destination_type': 'volume',
'boot_index': 0,
'volume_size': '1',
'volume_type': 'fake-lvm-1'
}]
def _test_create_extra(self, *args, **kwargs):
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.67')
return super(ServersControllerCreateTestV267, self)._test_create_extra(
*args, **kwargs)
def test_create_server_with_trusted_volume_type_pre_2_67_fails(self):
"""Make sure we can't use volume_type before 2.67"""
self.body['server'].update(
{'block_device_mapping_v2': self.block_device_mapping_v2})
self.req.body = jsonutils.dump_as_bytes(self.block_device_mapping_v2)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.66')
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn("'volume_type' was unexpected", six.text_type(ex))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.VolumeTypeNotFound(
id_or_name='fake-lvm-1'))
def test_create_instance_with_volume_type_not_found(self, mock_create):
"""Trying to boot from volume with a volume type that does not exist
will result in a 400 error.
"""
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
self.assertIn('Volume type fake-lvm-1 could not be found',
six.text_type(ex))
def test_create_instance_with_volume_type_empty_string(self):
"""Test passing volume_type='' which is accepted but not used."""
self.block_device_mapping_v2[0]['volume_type'] = ''
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
self._test_create_extra(params)
def test_create_instance_with_none_volume_type(self):
"""Test passing volume_type=None which is accepted but not used."""
self.block_device_mapping_v2[0]['volume_type'] = None
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
self._test_create_extra(params)
def test_create_instance_without_volume_type(self):
"""Test passing without volume_type which is accepted but not used."""
self.block_device_mapping_v2[0].pop('volume_type')
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
self._test_create_extra(params)
def test_create_instance_with_volume_type_too_long(self):
"""Tests the maxLength schema validation on volume_type."""
self.block_device_mapping_v2[0]['volume_type'] = 'X' * 256
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
ex = self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
self.assertIn('is too long', six.text_type(ex))
class ServersControllerCreateTestV274(ServersControllerCreateTest):
def setUp(self):
super(ServersControllerCreateTestV274, self).setUp()
self.req.environ['nova.context'] = fakes.FakeRequestContext(
user_id='fake_user',
project_id=self.project_id,
is_admin=True)
self.mock_get = self.useFixture(
fixtures.MockPatch('nova.scheduler.client.report.'
'SchedulerReportClient.get')).mock
def _generate_req(self, host=None, node=None, az=None,
api_version='2.74'):
if host:
self.body['server']['host'] = host
if node:
self.body['server']['hypervisor_hostname'] = node
if az:
self.body['server']['availability_zone'] = az
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest(api_version)
def test_create_instance_with_invalid_host(self):
self._generate_req(host='node-invalid')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
self.assertIn('Compute host node-invalid could not be found.',
six.text_type(ex))
def test_create_instance_with_non_string_host(self):
self._generate_req(host=123)
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
self.assertIn("Invalid input for field/attribute host.",
six.text_type(ex))
def test_create_instance_with_invalid_hypervisor_hostname(self):
get_resp = mock.Mock()
get_resp.status_code = 404
self.mock_get.return_value = get_resp
self._generate_req(node='node-invalid')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
self.assertIn('Compute host node-invalid could not be found.',
six.text_type(ex))
def test_create_instance_with_non_string_hypervisor_hostname(self):
get_resp = mock.Mock()
get_resp.status_code = 404
self.mock_get.return_value = get_resp
self._generate_req(node=123)
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
self.assertIn("Invalid input for field/attribute hypervisor_hostname.",
six.text_type(ex))
def test_create_instance_with_invalid_host_and_hypervisor_hostname(self):
self._generate_req(host='host-invalid', node='node-invalid')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
self.assertIn('Compute host host-invalid could not be found.',
six.text_type(ex))
def test_create_instance_with_non_string_host_and_hypervisor_hostname(
self):
self._generate_req(host=123, node=123)
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
self.assertIn("Invalid input for field/attribute",
six.text_type(ex))
def test_create_instance_pre_274(self):
self._generate_req(host='host', node='node', api_version='2.73')
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
self.assertIn("Invalid input for field/attribute server.",
six.text_type(ex))
def test_create_instance_mutual(self):
self._generate_req(host='host', node='node', az='nova:host:node')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
self.assertIn("mutually exclusive", six.text_type(ex))
def test_create_instance_invalid_policy(self):
self._generate_req(host='host', node='node')
# non-admin
self.req.environ['nova.context'] = fakes.FakeRequestContext(
user_id='fake_user',
project_id=fakes.FAKE_PROJECT_ID,
is_admin=False)
ex = self.assertRaises(exception.PolicyNotAuthorized,
self.controller.create,
self.req, body=self.body)
self.assertIn("Policy doesn't allow compute:servers:create:"
"requested_destination to be performed.",
six.text_type(ex))
def test_create_instance_private_flavor(self):
# Here we use admin context, so if we do not pass it or
# we do not anything, the test case will be failed.
pass
class ServersControllerCreateTestWithMock(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTestWithMock, self).setUp()
self.flags(enable_instance_password=True, group='api')
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
self.controller = servers.ServersController()
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self.req = fakes.HTTPRequest.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_fixed_ip_already_in_use(self, create_mock):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '10.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
create_mock.side_effect = exception.FixedIpAlreadyInUse(
address=address,
instance_uuid=network)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
self.assertEqual(1, len(create_mock.call_args_list))
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_invalid_fixed_ip(self, create_mock):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '999.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
self.assertFalse(create_mock.called)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidVolume(reason='error'))
def test_create_instance_with_invalid_volume_error(self, create_mock):
# Tests that InvalidVolume is translated to a 400 error.
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
class ServersViewBuilderTest(test.TestCase):
project_id = fakes.FAKE_PROJECT_ID
def setUp(self):
super(ServersViewBuilderTest, self).setUp()
fakes.stub_out_nw_api(self)
self.flags(group='glance', api_servers=['http://localhost:9292'])
nw_cache_info = self._generate_nw_cache_info()
db_inst = fakes.stub_instance(
id=1,
image_ref="5",
uuid=FAKE_UUID,
display_name="test_server",
include_fake_metadata=False,
availability_zone='nova',
nw_cache=nw_cache_info,
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
fakes.stub_out_secgroup_api(
self, security_groups=[{'name': 'default'}])
self.stub_out('nova.db.api.'
'block_device_mapping_get_all_by_instance_uuids',
fake_bdms_get_all_by_instance_uuids)
self.stub_out('nova.objects.InstanceMappingList.'
'_get_by_instance_uuids_from_db',
fake_get_inst_mappings_by_instance_uuids_from_db)
self.uuid = db_inst['uuid']
self.view_builder = views.servers.ViewBuilder()
self.request = fakes.HTTPRequestV21.blank("/%s" % self.project_id)
self.request.context = context.RequestContext('fake', self.project_id)
self.instance = fake_instance.fake_instance_obj(
self.request.context,
expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS,
**db_inst)
self.self_link = "http://localhost/v2/%s/servers/%s" % (
self.project_id, self.uuid)
self.bookmark_link = "http://localhost/%s/servers/%s" % (
self.project_id, self.uuid)
def _generate_nw_cache_info(self):
fixed_ipv4 = ('192.168.1.100', '192.168.2.100', '192.168.3.100')
fixed_ipv6 = ('2001:db8:0:1::1',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'test1',
'subnets': [{'cidr': '192.168.1.0/24',
'ips': [_ip(fixed_ipv4[0])]},
{'cidr': 'b33f::/64',
'ips': [_ip(fixed_ipv6[0])]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br0',
'id': 1,
'label': 'test1',
'subnets': [{'cidr': '192.168.2.0/24',
'ips': [_ip(fixed_ipv4[1])]}]}},
{'address': 'cc:cc:cc:cc:cc:cc',
'id': 3,
'network': {'bridge': 'br0',
'id': 2,
'label': 'test2',
'subnets': [{'cidr': '192.168.3.0/24',
'ips': [_ip(fixed_ipv4[2])]}]}}]
return nw_cache
def test_get_flavor_valid_instance_type(self):
flavor_bookmark = "http://localhost/%s/flavors/1" % self.project_id
expected = {"id": "1",
"links": [{"rel": "bookmark",
"href": flavor_bookmark}]}
result = self.view_builder._get_flavor(self.request, self.instance,
False)
self.assertEqual(result, expected)
@mock.patch('nova.context.scatter_gather_cells')
def test_get_volumes_attached_with_faily_cells(self, mock_sg):
bdms = fake_bdms_get_all_by_instance_uuids()
# just faking a nova list scenario
mock_sg.return_value = {
uuids.cell1: bdms[0],
uuids.cell2: exception.BDMNotFound(id='fake')
}
ctxt = context.RequestContext('fake', fakes.FAKE_PROJECT_ID)
result = self.view_builder._get_instance_bdms_in_multiple_cells(
ctxt, [self.instance.uuid])
# will get the result from cell1
self.assertEqual(result, bdms[0])
mock_sg.assert_called_once()
def test_build_server(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_with_project_id(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail(self):
image_bookmark = "http://localhost/%s/images/5" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/1" % self.project_id
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
image_bookmark = "http://localhost/%s/images/5" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/1" % self.project_id
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"name": "test_server",
"status": "ERROR",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"fault": {
"code": 404,
"created": "2010-10-10T12:00:00Z",
"message": "HTTPNotFound",
"details": "Stock details for test",
},
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ERROR,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
self.request.context = context.RequestContext('fake', self.project_id)
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault_that_has_been_deleted(self):
self.instance['deleted'] = 1
self.instance['vm_state'] = vm_states.ERROR
fault = fake_instance.fake_fault_obj(self.request.context,
self.uuid, code=500,
message="No valid host was found")
self.instance['fault'] = fault
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "No valid host was found"}
self.request.context = context.RequestContext('fake', self.project_id)
output = self.view_builder.show(self.request, self.instance)
# Regardless of vm_state deleted servers should be DELETED
self.assertEqual("DELETED", output['server']['status'])
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid')
def test_build_server_detail_with_fault_no_instance_mapping(self,
mock_im):
self.instance['vm_state'] = vm_states.ERROR
mock_im.side_effect = exception.InstanceMappingNotFound(uuid='foo')
self.request.context = context.RequestContext('fake', self.project_id)
self.view_builder.show(self.request, self.instance)
mock_im.assert_called_once_with(mock.ANY, self.uuid)
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid')
def test_build_server_detail_with_fault_loaded(self, mock_im):
self.instance['vm_state'] = vm_states.ERROR
fault = fake_instance.fake_fault_obj(self.request.context,
self.uuid, code=500,
message="No valid host was found")
self.instance['fault'] = fault
self.request.context = context.RequestContext('fake', self.project_id)
self.view_builder.show(self.request, self.instance)
self.assertFalse(mock_im.called)
def test_build_server_detail_with_fault_no_details_not_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.context = context.RequestContext('fake', self.project_id)
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error",
'details': 'Stock details for test'}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error',
details='')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_but_active(self):
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
output = self.view_builder.show(self.request, self.instance)
self.assertNotIn('fault', output['server'])
def test_build_server_detail_active_status(self):
# set the power state of the instance to running
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
image_bookmark = "http://localhost/%s/images/5" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/1" % self.project_id
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_metadata(self):
metadata = []
metadata.append(models.InstanceMetadata(key="Open", value="Stack"))
metadata = nova_utils.metadata_to_dict(metadata)
self.instance['metadata'] = metadata
image_bookmark = "http://localhost/%s/images/5" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/1" % self.project_id
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {"Open": "Stack"},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
class ServersViewBuilderTestV269(ServersViewBuilderTest):
"""Server ViewBuilder test for microversion 2.69
The intent here is simply to verify that when showing server details
after microversion 2.69 the response could have missing keys for those
servers from the down cells.
"""
wsgi_api_version = '2.69'
def setUp(self):
super(ServersViewBuilderTestV269, self).setUp()
self.view_builder = views.servers.ViewBuilder()
self.ctxt = context.RequestContext('fake', self.project_id)
def fake_is_supported(req, min_version="2.1", max_version="2.69"):
return (fakes.api_version.APIVersionRequest(max_version) >=
req.api_version_request >=
fakes.api_version.APIVersionRequest(min_version))
self.stub_out('nova.api.openstack.api_version_request.is_supported',
fake_is_supported)
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_get_server_list_detail_with_down_cells(self):
# Fake out 1 partially constructued instance and one full instance.
self.instances = [
self.instance,
objects.Instance(
context=self.ctxt,
uuid=uuids.fake1,
project_id=fakes.FAKE_PROJECT_ID,
created_at=datetime.datetime(1955, 11, 5)
)
]
req = self.req('/%s/servers/detail' % self.project_id)
output = self.view_builder.detail(req, self.instances, True)
self.assertEqual(2, len(output['servers']))
image_bookmark = "http://localhost/%s/images/5" % self.project_id
expected = {
"servers": [{
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
'disk': 1,
'ephemeral': 1,
'vcpus': 1,
'ram': 256,
'original_name': 'flavor1',
'extra_specs': {},
'swap': 0
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"tags": [],
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"OS-EXT-SRV-ATTR:root_device_name": None,
"accessIPv4": '',
"accessIPv6": '',
"host_status": '',
"OS-EXT-SRV-ATTR:user_data": None,
"trusted_image_certificates": None,
"OS-EXT-AZ:availability_zone": "nova",
"OS-EXT-SRV-ATTR:kernel_id": '',
"OS-EXT-SRV-ATTR:reservation_id": '',
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:hostname": 'test_server',
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"locked": False,
"description": None,
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"OS-EXT-SRV-ATTR:launch_index": 0,
"OS-EXT-SRV-ATTR:ramdisk_id": '',
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False},
]
},
{
'created': '1955-11-05T00:00:00Z',
'id': uuids.fake1,
'tenant_id': fakes.FAKE_PROJECT_ID,
"status": "UNKNOWN",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/%s/servers/%s" %
(self.project_id, uuids.fake1),
},
{
"rel": "bookmark",
"href": "http://localhost/%s/servers/%s" %
(self.project_id, uuids.fake1),
},
],
}]
}
self.assertThat(output, matchers.DictMatches(expected))
def test_get_server_list_with_down_cells(self):
# Fake out 1 partially constructued instance and one full instance.
self.instances = [
self.instance,
objects.Instance(
context=self.ctxt,
uuid=uuids.fake1,
project_id=fakes.FAKE_PROJECT_ID,
created_at=datetime.datetime(1955, 11, 5)
)
]
req = self.req('/%s/servers' % self.project_id)
output = self.view_builder.index(req, self.instances, True)
self.assertEqual(2, len(output['servers']))
expected = {
"servers": [{
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
]
},
{
'id': uuids.fake1,
"status": "UNKNOWN",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/%s/servers/%s" %
(self.project_id, uuids.fake1),
},
{
"rel": "bookmark",
"href": "http://localhost/%s/servers/%s" %
(self.project_id, uuids.fake1),
},
],
}]
}
self.assertThat(output, matchers.DictMatches(expected))
def test_get_server_with_down_cells(self):
# Fake out 1 partially constructued instance.
self.instance = objects.Instance(
context=self.ctxt,
uuid=self.uuid,
project_id=self.instance.project_id,
created_at=datetime.datetime(1955, 11, 5),
user_id=self.instance.user_id,
image_ref=self.instance.image_ref,
power_state=0,
flavor=self.instance.flavor,
availability_zone=self.instance.availability_zone
)
req = self.req('/%s/servers/%s' % (self.project_id, FAKE_UUID))
output = self.view_builder.show(req, self.instance,
cell_down_support=True)
# ten fields from request_spec and instance_mapping
self.assertEqual(10, len(output['server']))
image_bookmark = "http://localhost/%s/images/5" % self.project_id
expected = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"created": '1955-11-05T00:00:00Z',
"status": "UNKNOWN",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
'disk': 1,
'ephemeral': 1,
'vcpus': 1,
'ram': 256,
'original_name': 'flavor1',
'extra_specs': {},
'swap': 0
},
"OS-EXT-AZ:availability_zone": "nova",
"OS-EXT-STS:power_state": 0,
"links": [
{
"rel": "self",
"href": "http://localhost/v2/%s/servers/%s" %
(self.project_id, self.uuid),
},
{
"rel": "bookmark",
"href": "http://localhost/%s/servers/%s" %
(self.project_id, self.uuid),
},
]
}
}
self.assertThat(output, matchers.DictMatches(expected))
def test_get_server_without_image_avz_user_id_set_from_down_cells(self):
# Fake out 1 partially constructued instance.
self.instance = objects.Instance(
context=self.ctxt,
uuid=self.uuid,
project_id=self.instance.project_id,
created_at=datetime.datetime(1955, 11, 5),
user_id=None,
image_ref=None,
power_state=0,
flavor=self.instance.flavor,
availability_zone=None
)
req = self.req('/%s/servers/%s' % (self.project_id, FAKE_UUID))
output = self.view_builder.show(req, self.instance,
cell_down_support=True)
# nine fields from request_spec and instance_mapping
self.assertEqual(10, len(output['server']))
expected = {
"server": {
"id": self.uuid,
"user_id": "UNKNOWN",
"tenant_id": "fake_project",
"created": '1955-11-05T00:00:00Z',
"status": "UNKNOWN",
"image": "",
"flavor": {
'disk': 1,
'ephemeral': 1,
'vcpus': 1,
'ram': 256,
'original_name': 'flavor1',
'extra_specs': {},
'swap': 0
},
"OS-EXT-AZ:availability_zone": "UNKNOWN",
"OS-EXT-STS:power_state": 0,
"links": [
{
"rel": "self",
"href": "http://localhost/v2/%s/servers/%s" %
(self.project_id, self.uuid),
},
{
"rel": "bookmark",
"href": "http://localhost/%s/servers/%s" %
(self.project_id, self.uuid),
},
]
}
}
self.assertThat(output, matchers.DictMatches(expected))
class ServersAllExtensionsTestCase(test.TestCase):
"""Servers tests using default API router with all extensions enabled.
The intent here is to catch cases where extensions end up throwing
an exception because of a malformed request before the core API
gets a chance to validate the request and return a 422 response.
For example, AccessIPsController extends servers.Controller::
| @wsgi.extends
| def create(self, req, resp_obj, body):
| context = req.environ['nova.context']
| if authorize(context) and 'server' in resp_obj.obj:
| resp_obj.attach(xml=AccessIPTemplate())
| server = resp_obj.obj['server']
| self._extend_server(req, server)
we want to ensure that the extension isn't barfing on an invalid
body.
"""
def setUp(self):
super(ServersAllExtensionsTestCase, self).setUp()
self.app = compute.APIRouterV21()
@mock.patch.object(compute_api.API, 'create',
side_effect=test.TestingException(
"Should not reach the compute API."))
def test_create_missing_server(self, mock_create):
# Test create with malformed body.
req = fakes.HTTPRequestV21.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID)
req.method = 'POST'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dump_as_bytes(body)
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
def test_update_missing_server(self):
# Test update with malformed body.
req = fakes.HTTPRequestV21.blank(
'/%s/servers/1' % fakes.FAKE_PROJECT_ID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dump_as_bytes(body)
with mock.patch('nova.objects.Instance.save') as mock_save:
res = req.get_response(self.app)
self.assertFalse(mock_save.called)
self.assertEqual(400, res.status_int)
class ServersInvalidRequestTestCase(test.TestCase):
"""Tests of places we throw 400 Bad Request from."""
def setUp(self):
super(ServersInvalidRequestTestCase, self).setUp()
self.controller = servers.ServersController()
def _invalid_server_create(self, body):
req = fakes.HTTPRequestV21.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID)
req.method = 'POST'
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_server_no_body(self):
self._invalid_server_create(body=None)
def test_create_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_server_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
def _unprocessable_server_update(self, body):
req = fakes.HTTPRequestV21.blank(
'/%s/servers/%s' % (fakes.FAKE_PROJECT_ID, FAKE_UUID))
req.method = 'PUT'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, FAKE_UUID, body=body)
def test_update_server_no_body(self):
self._invalid_server_create(body=None)
def test_update_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_update_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
# TODO(alex_xu): There isn't specified file for ips extension. Most of
# unittest related to ips extension is in this file. So put the ips policy
# enforcement tests at here until there is specified file for ips extension.
class IPsPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(IPsPolicyEnforcementV21, self).setUp()
self.controller = ips.IPsController()
self.req = fakes.HTTPRequest.blank("/v2/%s" % fakes.FAKE_PROJECT_ID)
def test_index_policy_failed(self):
rule_name = "os_compute_api:ips:index"
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index, self.req, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_show_policy_failed(self):
rule_name = "os_compute_api:ips:show"
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.show, self.req, fakes.FAKE_UUID, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
class ServersPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(ServersPolicyEnforcementV21, self).setUp()
self.useFixture(nova_fixtures.AllServicesCurrent())
self.controller = servers.ServersController()
self.req = fakes.HTTPRequest.blank('')
self.image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def _common_policy_check(self, rules, rule_name, func, *arg, **kwarg):
self.policy.set_rules(rules)
exc = self.assertRaises(
exception.PolicyNotAuthorized, func, *arg, **kwarg)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
@mock.patch.object(servers.ServersController, '_get_instance')
def test_start_policy_failed(self, _get_instance_mock):
_get_instance_mock.return_value = None
rule_name = "os_compute_api:servers:start"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller._start_server,
self.req, FAKE_UUID, body={})
@mock.patch.object(servers.ServersController, '_get_instance')
def test_trigger_crash_dump_policy_failed_with_other_project(
self, _get_instance_mock):
_get_instance_mock.return_value = fake_instance.fake_instance_obj(
self.req.environ['nova.context'])
rule_name = "os_compute_api:servers:trigger_crash_dump"
rule = {rule_name: "project_id:%(project_id)s"}
self.req.api_version_request =\
api_version_request.APIVersionRequest('2.17')
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
self._common_policy_check(
rule, rule_name, self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body={'trigger_crash_dump': None})
@mock.patch('nova.compute.api.API.trigger_crash_dump')
@mock.patch.object(servers.ServersController, '_get_instance')
def test_trigger_crash_dump_overridden_policy_pass_with_same_project(
self, _get_instance_mock, trigger_crash_dump_mock):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
project_id=self.req.environ['nova.context'].project_id)
_get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:trigger_crash_dump"
self.policy.set_rules({rule_name: "project_id:%(project_id)s"})
self.req.api_version_request = (
api_version_request.APIVersionRequest('2.17'))
self.controller._action_trigger_crash_dump(
self.req, fakes.FAKE_UUID, body={'trigger_crash_dump': None})
trigger_crash_dump_mock.assert_called_once_with(
self.req.environ['nova.context'], instance)
@mock.patch.object(servers.ServersController, '_get_instance')
def test_trigger_crash_dump_overridden_policy_failed_with_other_user(
self, _get_instance_mock):
_get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:trigger_crash_dump"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
self.req.api_version_request = (
api_version_request.APIVersionRequest('2.17'))
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_trigger_crash_dump,
self.req,
fakes.FAKE_UUID,
body={'trigger_crash_dump': None})
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
@mock.patch('nova.compute.api.API.trigger_crash_dump')
@mock.patch.object(servers.ServersController, '_get_instance')
def test_trigger_crash_dump_overridden_policy_pass_with_same_user(
self, _get_instance_mock, trigger_crash_dump_mock):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
user_id=self.req.environ['nova.context'].user_id)
_get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:trigger_crash_dump"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
self.req.api_version_request = (
api_version_request.APIVersionRequest('2.17'))
self.controller._action_trigger_crash_dump(
self.req, fakes.FAKE_UUID, body={'trigger_crash_dump': None})
trigger_crash_dump_mock.assert_called_once_with(
self.req.environ['nova.context'], instance)
def test_index_policy_failed(self):
rule_name = "os_compute_api:servers:index"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller.index, self.req)
def test_detail_policy_failed(self):
rule_name = "os_compute_api:servers:detail"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller.detail, self.req)
def test_detail_get_tenants_policy_failed(self):
req = fakes.HTTPRequest.blank('')
req.GET["all_tenants"] = "True"
rule_name = "os_compute_api:servers:detail:get_all_tenants"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller._get_servers, req, True)
def test_index_get_tenants_policy_failed(self):
req = fakes.HTTPRequest.blank('')
req.GET["all_tenants"] = "True"
rule_name = "os_compute_api:servers:index:get_all_tenants"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller._get_servers, req, False)
@mock.patch.object(common, 'get_instance')
def test_show_policy_failed(self, get_instance_mock):
get_instance_mock.return_value = None
rule_name = "os_compute_api:servers:show"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller.show, self.req, FAKE_UUID)
@mock.patch.object(common, 'get_instance')
def test_delete_policy_failed_with_other_project(self, get_instance_mock):
get_instance_mock.return_value = fake_instance.fake_instance_obj(
self.req.environ['nova.context'])
rule_name = "os_compute_api:servers:delete"
rule = {rule_name: "project_id:%(project_id)s"}
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
self._common_policy_check(
rule, rule_name, self.controller.delete, self.req, FAKE_UUID)
@mock.patch('nova.compute.api.API.soft_delete')
@mock.patch('nova.api.openstack.common.get_instance')
def test_delete_overridden_policy_pass_with_same_project(self,
get_instance_mock,
soft_delete_mock):
self.flags(reclaim_instance_interval=3600)
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
project_id=self.req.environ['nova.context'].project_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:delete"
self.policy.set_rules({rule_name: "project_id:%(project_id)s"})
self.controller.delete(self.req, fakes.FAKE_UUID)
soft_delete_mock.assert_called_once_with(
self.req.environ['nova.context'], instance)
@mock.patch('nova.api.openstack.common.get_instance')
def test_delete_overridden_policy_failed_with_other_user_in_same_project(
self, get_instance_mock):
get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:delete"
rule = {rule_name: "user_id:%(user_id)s"}
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
self._common_policy_check(
rule, rule_name, self.controller.delete, self.req, FAKE_UUID)
@mock.patch('nova.compute.api.API.soft_delete')
@mock.patch('nova.api.openstack.common.get_instance')
def test_delete_overridden_policy_pass_with_same_user(self,
get_instance_mock,
soft_delete_mock):
self.flags(reclaim_instance_interval=3600)
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
user_id=self.req.environ['nova.context'].user_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:delete"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
self.controller.delete(self.req, fakes.FAKE_UUID)
soft_delete_mock.assert_called_once_with(
self.req.environ['nova.context'], instance)
@mock.patch.object(common, 'get_instance')
def test_update_policy_failed_with_other_project(self, get_instance_mock):
get_instance_mock.return_value = fake_instance.fake_instance_obj(
self.req.environ['nova.context'])
rule_name = "os_compute_api:servers:update"
rule = {rule_name: "project_id:%(project_id)s"}
body = {'server': {'name': 'server_test'}}
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
self._common_policy_check(
rule, rule_name, self.controller.update, self.req,
FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.compute.views.servers.ViewBuilder.show')
@mock.patch.object(compute_api.API, 'update_instance')
@mock.patch.object(common, 'get_instance')
def test_update_overridden_policy_pass_with_same_project(
self, get_instance_mock, update_instance_mock, view_show_mock):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
project_id=self.req.environ['nova.context'].project_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:update"
self.policy.set_rules({rule_name: "project_id:%(project_id)s"})
body = {'server': {'name': 'server_test'}}
self.controller.update(self.req, fakes.FAKE_UUID, body=body)
@mock.patch.object(common, 'get_instance')
def test_update_overridden_policy_failed_with_other_user_in_same_project(
self, get_instance_mock):
get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:update"
rule = {rule_name: "user_id:%(user_id)s"}
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
body = {'server': {'name': 'server_test'}}
self._common_policy_check(
rule, rule_name, self.controller.update, self.req,
FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.common.'
'instance_has_port_with_resource_request', return_value=False)
@mock.patch('nova.api.openstack.compute.views.servers.ViewBuilder.show')
@mock.patch.object(compute_api.API, 'update_instance')
@mock.patch.object(common, 'get_instance')
def test_update_overridden_policy_pass_with_same_user(self,
get_instance_mock,
update_instance_mock,
view_show_mock,
mock_port_check):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
user_id=self.req.environ['nova.context'].user_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:update"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
body = {'server': {'name': 'server_test'}}
self.controller.update(self.req, fakes.FAKE_UUID, body=body)
def test_confirm_resize_policy_failed(self):
rule_name = "os_compute_api:servers:confirm_resize"
rule = {rule_name: "project:non_fake"}
body = {'server': {'name': 'server_test'}}
self._common_policy_check(
rule, rule_name, self.controller._action_confirm_resize,
self.req, FAKE_UUID, body=body)
def test_revert_resize_policy_failed(self):
rule_name = "os_compute_api:servers:revert_resize"
rule = {rule_name: "project:non_fake"}
body = {'server': {'name': 'server_test'}}
self._common_policy_check(
rule, rule_name, self.controller._action_revert_resize,
self.req, FAKE_UUID, body=body)
def test_reboot_policy_failed(self):
rule_name = "os_compute_api:servers:reboot"
rule = {rule_name: "project:non_fake"}
body = {'reboot': {'type': 'HARD'}}
self._common_policy_check(
rule, rule_name, self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.common.get_instance')
def test_resize_policy_failed_with_other_project(self, get_instance_mock):
get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:resize"
rule = {rule_name: "project_id:%(project_id)s"}
body = {'resize': {'flavorRef': '1'}}
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
self._common_policy_check(
rule, rule_name, self.controller._action_resize, self.req,
FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.common.'
'instance_has_port_with_resource_request', return_value=False)
@mock.patch('nova.compute.api.API.resize')
@mock.patch('nova.api.openstack.common.get_instance')
def test_resize_overridden_policy_pass_with_same_project(self,
get_instance_mock,
resize_mock,
mock_post_check):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
project_id=self.req.environ['nova.context'].project_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:resize"
self.policy.set_rules({rule_name: "project_id:%(project_id)s"})
body = {'resize': {'flavorRef': '1'}}
self.controller._action_resize(self.req, fakes.FAKE_UUID, body=body)
resize_mock.assert_called_once_with(self.req.environ['nova.context'],
instance, '1',
auto_disk_config=None)
@mock.patch('nova.api.openstack.common.get_instance')
def test_resize_overridden_policy_failed_with_other_user_in_same_project(
self, get_instance_mock):
get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:resize"
rule = {rule_name: "user_id:%(user_id)s"}
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
body = {'resize': {'flavorRef': '1'}}
self._common_policy_check(
rule, rule_name, self.controller._action_resize, self.req,
FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.common.'
'instance_has_port_with_resource_request', return_value=False)
@mock.patch('nova.compute.api.API.resize')
@mock.patch('nova.api.openstack.common.get_instance')
def test_resize_overridden_policy_pass_with_same_user(self,
get_instance_mock,
resize_mock,
mock_port_check):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
user_id=self.req.environ['nova.context'].user_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:resize"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
body = {'resize': {'flavorRef': '1'}}
self.controller._action_resize(self.req, fakes.FAKE_UUID, body=body)
resize_mock.assert_called_once_with(self.req.environ['nova.context'],
instance, '1',
auto_disk_config=None)
@mock.patch('nova.api.openstack.common.get_instance')
def test_rebuild_policy_failed_with_other_project(self, get_instance_mock):
get_instance_mock.return_value = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
project_id=self.req.environ['nova.context'].project_id)
rule_name = "os_compute_api:servers:rebuild"
rule = {rule_name: "project_id:%(project_id)s"}
body = {'rebuild': {'imageRef': self.image_uuid}}
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
self._common_policy_check(
rule, rule_name, self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.common.get_instance')
def test_rebuild_overridden_policy_failed_with_other_user_in_same_project(
self, get_instance_mock):
get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:rebuild"
rule = {rule_name: "user_id:%(user_id)s"}
body = {'rebuild': {'imageRef': self.image_uuid}}
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
self._common_policy_check(
rule, rule_name, self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.compute.views.servers.ViewBuilder.show')
@mock.patch('nova.compute.api.API.rebuild')
@mock.patch('nova.api.openstack.common.get_instance')
def test_rebuild_overridden_policy_pass_with_same_user(self,
get_instance_mock,
rebuild_mock,
view_show_mock):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
user_id=self.req.environ['nova.context'].user_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:rebuild"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
body = {'rebuild': {'imageRef': self.image_uuid,
'adminPass': 'dumpy_password'}}
self.controller._action_rebuild(self.req, fakes.FAKE_UUID, body=body)
rebuild_mock.assert_called_once_with(self.req.environ['nova.context'],
instance,
self.image_uuid,
'dumpy_password')
def test_create_image_policy_failed(self):
rule_name = "os_compute_api:servers:create_image"
rule = {rule_name: "project:non_fake"}
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
self._common_policy_check(
rule, rule_name, self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.compute.utils.is_volume_backed_instance',
return_value=True)
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
@mock.patch.object(servers.ServersController, '_get_server')
def test_create_vol_backed_img_snapshotting_policy_blocks_project(self,
mock_get_server,
mock_get_uuidi,
mock_is_vol_back):
"""Don't permit a snapshot of a volume backed instance if configured
not to based on project
"""
rule_name = "os_compute_api:servers:create_image:allow_volume_backed"
rules = {
rule_name: "project:non_fake",
"os_compute_api:servers:create_image": "",
}
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
self._common_policy_check(
rules, rule_name, self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.compute.utils.is_volume_backed_instance',
return_value=True)
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
@mock.patch.object(servers.ServersController, '_get_server')
def test_create_vol_backed_img_snapshotting_policy_blocks_role(self,
mock_get_server,
mock_get_uuidi,
mock_is_vol_back):
"""Don't permit a snapshot of a volume backed instance if configured
not to based on role
"""
rule_name = "os_compute_api:servers:create_image:allow_volume_backed"
rules = {
rule_name: "role:non_fake",
"os_compute_api:servers:create_image": "",
}
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
self._common_policy_check(
rules, rule_name, self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
def _create_policy_check(self, rules, rule_name):
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': flavor_ref,
'availability_zone': "zone1:host1:node1",
'block_device_mapping': [{'device_name': "/dev/sda1"}],
'networks': [{'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}],
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self._common_policy_check(
rules, rule_name, self.controller.create, self.req, body=body)
def test_create_policy_failed(self):
rule_name = "os_compute_api:servers:create"
rules = {rule_name: "project:non_fake"}
self._create_policy_check(rules, rule_name)
def test_create_forced_host_policy_failed(self):
rule_name = "os_compute_api:servers:create:forced_host"
rule = {"os_compute_api:servers:create": "@",
rule_name: "project:non_fake"}
self._create_policy_check(rule, rule_name)
def test_create_attach_volume_policy_failed(self):
rule_name = "os_compute_api:servers:create:attach_volume"
rules = {"os_compute_api:servers:create": "@",
"os_compute_api:servers:create:forced_host": "@",
rule_name: "project:non_fake"}
self._create_policy_check(rules, rule_name)
def test_create_attach_attach_network_policy_failed(self):
rule_name = "os_compute_api:servers:create:attach_network"
rules = {"os_compute_api:servers:create": "@",
"os_compute_api:servers:create:forced_host": "@",
"os_compute_api:servers:create:attach_volume": "@",
rule_name: "project:non_fake"}
self._create_policy_check(rules, rule_name)
class ServersActionsJsonTestV239(test.NoDBTestCase):
def setUp(self):
super(ServersActionsJsonTestV239, self).setUp()
self.controller = servers.ServersController()
self.req = fakes.HTTPRequest.blank('', version='2.39')
@mock.patch.object(common, 'check_img_metadata_properties_quota')
@mock.patch.object(common, 'get_instance')
def test_server_create_image_no_quota_checks(self, mock_get_instance,
mock_check_quotas):
# 'mock_get_instance' helps to skip the whole logic of the action,
# but to make the test
mock_get_instance.side_effect = webob.exc.HTTPNotFound
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_create_image, self.req,
FAKE_UUID, body=body)
# starting from version 2.39 no quota checks on Nova side are performed
# for 'createImage' action after removing 'image-metadata' proxy API
mock_check_quotas.assert_not_called()
| apache-2.0 | 1,245,279,963,337,041,000 | 43.045867 | 79 | 0.557206 | false |
hansehe/Wind-Blade-Inspection | TestUnits/Test_src/Test_DroneVision/Test_DroneVision_src/Test_imgProcessing/Test_Heading/Test_EdgeHeading.py | 1 | 4905 | '''
Author: Hans Erik Heggem
Email: [email protected]
Project: Master's Thesis - Autonomous Inspection Of Wind Blades
Repository: Master's Thesis - CV (Computer Vision)
'''
################### UNIT TEST ########################
import unittest
from Settings.TestData import TestData
from TestUnits.Test_main import Test_main
'''
@brief Test unit for EdgeHeading
'''
class Test_EdgeHeading(unittest.TestCase, Test_main, TestData):
def setUp(self):
'''
@brief Give all setups to the unit test.
'''
self.SetAllKey()
self.InitTestData()
#### IMPORTS #####
from Settings import Settings
from src.DroneVision.DroneVision_src.imgProcessing.Heading import EdgeHeading
from src.DroneVision.DroneVision_src.imgProcessing.featureDetection.PointDetection import PointDetection
self.Settings = Settings
self.EdgeHeading = EdgeHeading
self.PointDetection = PointDetection
##################
def tearDown(self):
'''
@brief Give all tear down steps.
Is runned even if the test failed.
'''
pass
def test_EdgeHeading(self):
'''
@brief Main start test function.
Append functions to test for this unit.
'''
###### START TEST #####
for folder, left_frames, right_frames, actual_distances, baselines, use_set in self.GetFrameSets():
if use_set:
for fn_frame, fn_slframe in left_frames:
self.TestEdgeHeading(folder, fn_frame, fn_slframe)
###########################
def TestEdgeHeading(self, folder, fn_frame, fn_slframe):
'''
@brief Test function for EdgeHeading unit.
@param folder Input folder
@param fn_frame Frame filename without points.
@param fn_slframe Frame filename with points.
'''
import timeit
import numpy as np
from src.DroneVision.DroneVision_src.hardware.imageTools import GetImage, MatplotShow
print '\n'
print '#----------- TESTING EDGE HEADING PROCESSING \t---------------#'
print '#----------- Image without points: {0} \t---------------#'.format(fn_frame)
print '#----------- Image with points: {0} \t---------------#'.format(fn_slframe)
settings_inst = self.Settings.Settings()
fn_frame = folder + fn_frame
fn_slframe = folder + fn_slframe
delay = timeit.default_timer()
frame = GetImage(fn_frame)
sl_frame = GetImage(fn_slframe)
print 'Delay reading images: {0} sec'.format(timeit.default_timer() - delay)
edgeHeading = self.EdgeHeading.EdgeHeading()
pointDet = self.PointDetection.PointDetection(True, settings_inst.GetSettings())
pointDet.CalibratePointDetection()
print 'Min distance between blobs: {0}'.format(pointDet.GetMinDistanceBetweenBlobs())
total_delay = timeit.default_timer()
delay = timeit.default_timer()
delta_frame, point_kp, blob_desc, frame_un, sl_frame_un = pointDet.GetPointList(frame, sl_frame, draw=True)
print 'Delay for blob point detection: {0} sec, detected blobs: {1}'.format(timeit.default_timer() - delay, len(point_kp))
delay = timeit.default_timer()
hough_frame, edgel_map_filtered, boundary_hough_lines = pointDet.GetBoundaryHoughLines(frame_un, delta_frame, point_kp, draw=True, print_hough_positions=True)
print 'Delay for finding boundary edges (filtered) + lines: {0} sec'.format(timeit.default_timer() - delay)
delay = timeit.default_timer()
selected_hor_edge_heading, selected_vert_edge_heading, possible_hor_edge_headings, possible_vert_edge_headings = edgeHeading.ComputeEdgeHeading(edgel_map_filtered, boundary_hough_lines, draw=False)
print 'Delay for finding edge heading angle: {0} sec, hor_edge_heading = {1}, vert_edge_heading = {2}'.format(timeit.default_timer() - delay, selected_hor_edge_heading, selected_vert_edge_heading)
timeout = timeit.default_timer() - total_delay
print 'Total delay for downscaling + undistort + blob + hough lines + bounded lines + edge heading: {0} sec'.format(timeout)
edgel_map_filtered_all_headings = np.array(edgel_map_filtered, dtype=edgel_map_filtered.dtype)
selected_hor_edge_heading, selected_vert_edge_heading, possible_hor_edge_headings, possible_vert_edge_headings, edgel_map_filtered_all_headings = edgeHeading.ComputeEdgeHeading(edgel_map_filtered_all_headings, boundary_hough_lines, draw=True)
touple_frames = []
#touple_frames.append(('SL frame', sl_frame))
#touple_frames.append(('SL undistorted', sl_frame_un))
#touple_frames.append(('Original points', delta_frame))
#touple_frames.append(('Hough lines', hough_frame))
#touple_frames.append(('Selected edge heading', edgel_map_filtered))
touple_frames.append(('Possible edge headings', edgel_map_filtered_all_headings))
print 'max_hor = BLUE, min_hor = RED, max_vert = PURPLE, min_vert = GREEN'
if not(self.CheckAllTests()):
MatplotShow(touple_frames, fn_frame+'_Edge_heading_test', savefig_folder=self.savefig_folder+'edge_heading_test/', save_fig=self.save_figs, save_fig_only=self.save_figs_only, inlude_main_title_in_plot=False) | mit | 4,514,450,617,151,839,000 | 42.035088 | 244 | 0.707441 | false |
lddias/python-avs | debug.py | 1 | 1030 | import time
def fake_mic(logger, q, mic_stopped):
time.sleep(60)
logger.debug("TRIGGERED")
class StoppableAudioStreamLike:
def __init__(self, file):
self._f = file
self._eof = False
self._last_byte = None
def read(self, size=-1):
if mic_stopped.is_set():
logger.info("MIC STOP REQUESTED")
mic_stopped.clear()
return b''
if self._eof:
ret = self._last_byte
else:
ret = self._f.read(size)
if len(ret) < size:
self._last_byte = ret[-1:]
self._eof = True
ret += ret[-1:] * (size - len(ret))
assert len(ret) == size
return ret
q.put(('hotword', StoppableAudioStreamLike(open('flashbriefing2.wav', 'rb')), mic_stopped))
def fake_mic2(logger, q, mic_stopped):
time.sleep(3)
logger.debug("TRIGGERED")
q.put(('hotword', open('timer.wav', 'rb'), None))
| mit | -3,517,990,759,981,812,000 | 27.611111 | 95 | 0.495146 | false |
tristandb/CaDaBata | cadabata.py | 1 | 1545 | from eca import *
from eca.generators import start_offline_tweets
import datetime
@event('init')
def setup(ctx, e):
'''The code that will be executed at initialization: starting the offline tweet stream.'''
start_offline_tweets('cadabata_static/batatweets.txt', 'tweet', time_factor=100000, arff_file='classifiers/bata_2014_classifier.arff')
@event('tweet')
def tweet(ctx, e):
'''The code that will be excecuted when a tweet is received.'''
# The tweet data.
tweet = e.data
# Rename the classification of the tweet.
tweetclass = classify_tweet(tweet['extra']['class_predicted_by: NaiveBayes']);
# Parse the time and date of the tweet. This has to be done with '{}'.format(), otherwise
# it can't be JSON encoded.
time = '{}'.format(datetime.datetime.strptime(tweet['created_at'], '%a %b %d %H:%M:%S %z %Y'))
# Print to the console, so we know something is happening.
print('Tweet classified (and emitted) as:',tweetclass)
# Emit to the right handler.
emit('tweet_'+tweetclass, e.data)
# Emit to the graph.
emit('tweet_flot', {
'action': 'add',
'series': tweetclass,
'time': time,
'value': 1
});
def classify_tweet(cls):
'''Rename the classifications according to cls. Default is neutral.'''
o = 'neutral'
if cls == 'T':
o = 'positive'
elif cls == 'N':
o = 'neutral'
elif cls == 'F':
o = 'negative'
elif cls == 'A':
o = 'alert'
return o
| mit | 3,844,286,205,517,304,000 | 27.090909 | 138 | 0.604531 | false |
jeffposnick/chromium-dashboard | common.py | 1 | 6649 | # -*- coding: utf-8 -*-
# Copyright 2013 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = '[email protected] (Eric Bidelman)'
import datetime
import json
import logging
import webapp2
# App Engine imports.
from google.appengine.api import users
from django.template.loader import render_to_string
from django.utils import feedgenerator
import models
import settings
class BaseHandler(webapp2.RequestHandler):
def __init__(self, request, response):
self.initialize(request, response)
# Add CORS and Chrome Frame to all responses.
self.response.headers.add_header('Access-Control-Allow-Origin', '*')
self.response.headers.add_header('X-UA-Compatible', 'IE=Edge,chrome=1')
# Settings can't be global in python 2.7 env.
logging.getLogger().setLevel(logging.DEBUG)
class JSONHandler(BaseHandler):
def __truncate_day_percentage(self, data):
# Need 6 decimals b/c num will by mutiplied by 100 to get a percentage.
data.day_percentage = float("%.*f" % (6, data.day_percentage))
return data
def _is_googler(self, user):
return user and user.email().endswith('@google.com')
def _clean_data(self, data):
user = users.get_current_user()
# Show raw day percentage numbers if user is a googler.
if not self._is_googler(user):
data = map(self.__truncate_day_percentage, data)
return data
def get(self, data, formatted=False, public=True):
cache_type = 'public'
if not public:
cache_type = 'private'
# Cache script generated json responses.
self.response.headers['Cache-Control'] = '%s, max-age=%s' % (
cache_type, settings.DEFAULT_CACHE_TIME)
self.response.headers['Content-Type'] = 'application/json;charset=utf-8'
if formatted:
return self.response.write(json.dumps(data, separators=(',',':')))
else:
data = [entity.to_dict() for entity in data]
return self.response.write(json.dumps(data, separators=(',',':')))
class ContentHandler(BaseHandler):
def _is_user_whitelisted(self, user):
if not user:
return False
is_whitelisted = False
if users.is_current_user_admin():
is_whitelisted = True
elif user.email().endswith('@chromium.org'):
is_whitelisted = True
else:
# TODO(ericbidelman): memcache user lookup.
query = models.AppUser.all(keys_only=True).filter('email =', user.email())
found_user = query.get()
if found_user is not None:
is_whitelisted = True
return is_whitelisted
def _add_common_template_values(self, d):
"""Mixin common values for templates into d."""
template_data = {
'prod': settings.PROD,
'APP_TITLE': settings.APP_TITLE,
'current_path': self.request.path,
'VULCANIZE': settings.VULCANIZE
}
user = users.get_current_user()
if user:
template_data['login'] = (
'Logout', users.create_logout_url(dest_url=self.request.path))
template_data['user'] = {
'is_whitelisted': self._is_user_whitelisted(user),
'is_admin': users.is_current_user_admin(),
'email': user.email(),
}
else:
template_data['user'] = None
template_data['login'] = (
'Login', users.create_login_url(dest_url=self.request.path))
d.update(template_data)
def render(self, data={}, template_path=None, status=None, message=None,
relpath=None):
if status is not None and status != 200:
self.response.set_status(status, message)
# Add common template data to every request.
self._add_common_template_values(data)
try:
self.response.out.write(render_to_string(template_path, data))
except Exception:
handle_404(self.request, self.response, Exception)
def render_atom_feed(self, title, data):
features_url = '%s://%s%s' % (self.request.scheme,
self.request.host,
self.request.path.replace('.xml', ''))
feature_url_prefix = '%s://%s%s' % (self.request.scheme,
self.request.host,
'/feature')
feed = feedgenerator.Atom1Feed(
title=unicode('%s - %s' % (settings.APP_TITLE, title)),
link=features_url,
description=u'New features exposed to web developers',
language=u'en'
)
for f in data:
pubdate = datetime.datetime.strptime(str(f['updated'][:19]),
'%Y-%m-%d %H:%M:%S')
feed.add_item(
title=unicode(f['name']),
link='%s/%s' % (feature_url_prefix, f.get('id')),
description=f.get('summary', ''),
pubdate=pubdate,
author_name=unicode(settings.APP_TITLE),
categories=[f['category']]
)
self.response.headers.add_header('Content-Type',
'application/atom+xml;charset=utf-8')
self.response.out.write(feed.writeString('utf-8'))
def handle_401(request, response, exception):
ERROR_401 = (
'<style>'
'body { padding: 2em; }'
'h1, h2 { font-weight: 300; font-family: "Roboto", sans-serif; }\n'
'</style>\n'
'<title>401 Unauthorized</title>\n'
'<h1>Error: Unauthorized</h1>\n'
'<h2>User does not have permission to view this page.</h2>')
response.write(ERROR_401)
response.set_status(401)
def handle_404(request, response, exception):
ERROR_404 = (
'<style>'
'body { padding: 2em; }'
'h1, h2 { font-weight: 300; font-family: "Roboto", sans-serif; }\n'
'</style>\n'
'<title>404 Not Found</title>\n'
'<h1>Error: Not Found</h1>\n'
'<h2>The requested URL was not found on this server.'
'</h2>')
response.write(ERROR_404)
response.set_status(404)
def handle_500(request, response, exception):
logging.exception(exception)
ERROR_500 = (
'<style>'
'body { padding: 2em; }'
'h1, h2 { font-weight: 300; font-family: "Roboto", sans-serif; }\n'
'</style>\n'
'<title>500 Internal Server Error</title>\n'
'<h1>Error: 500 Internal Server Error</h1>')
response.write(ERROR_500)
response.set_status(500)
| apache-2.0 | 5,276,832,687,915,904,000 | 30.813397 | 80 | 0.630772 | false |
sdrdis/iarpa_contest_submission | lib_exec/StereoPipeline/libexec/asp_cmd_utils.py | 1 | 6816 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# __BEGIN_LICENSE__
# Copyright (c) 2009-2013, United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration. All
# rights reserved.
#
# The NGT platform is licensed under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# __END_LICENSE__
"""IrgSystemFunctions.py - General systems related utilities"""
import sys, os, re, shutil, subprocess, string, time, errno, multiprocessing
def isCmdOption(arg):
"""Returns True if the string is a command line option,
False otherwise (if it is an argument)"""
# An option must start with '-' and not consist of all numbers
if ( arg.startswith('-') and not re.match('^-[0-9.]+$', arg) ):
return True
else:
return False
# The following functions are useful for going between string and list
# representations of command line arguments
def isNotString(a):
"""Returns true if the object is not a string"""
return (not isinstance(a, basestring))
def argListToString(argList):
"""Converts a list of arguments into a single argument string"""
string = ""
for arg in argList:
stringVersion = str(arg)
# Wrap arguments with spaces in them in "" so they stay together
if stringVersion.find(' ') >= 0:
string = string + '"' + stringVersion + '" '
else:
string = string + stringVersion + ' '
return string
def stringToArgList(string):
"""Converts a single argument string into a list of arguments"""
return string.split(" ")
# TODO: Improve this function a bit
def executeCommand(cmd,
outputPath=None, # If given, throw if the file is not created. Don't run if it already exists.
suppressOutput=False, # If true, don't print anything!
force=False): # If true , run even if outputPath already exists.
'''Executes a command with multiple options'''
if cmd == '': # An empty task
return
# Convert the input to list format if needed
if not isNotString(cmd):
cmd = stringToArgList(cmd)
# Run the command if conditions are met
if force or (not outputPath) or (not os.path.exists(outputPath)):
if suppressOutput: # Process silently
FNULL = open(os.devnull, 'w')
subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT)
else: # Display output
print cmd
subprocess.call(cmd)
# Optionally check that the output file was created
if outputPath and (not os.path.exists(outputPath)):
raise CmdRunException('Failed to create output file: ' + outputPath)
return True
#==================================================
# This class implements a variant of OptionParser which ignores unknown options.
from optparse import (OptionParser,BadOptionError,AmbiguousOptionError)
class PassThroughOptionParser(OptionParser):
# Overwrite the default implementation which deletes newlines
def format_epilog(self, formatter):
return self.epilog
def _process_args(self, largs, rargs, values):
while rargs:
try:
self._process_args2(largs,rargs,values)
except (BadOptionError,AmbiguousOptionError) as e: # On failure, pass option to output list
if sys.version_info < (2, 6, 0):
# Port to Python 2.4
p = re.match("^.*?no such option:\s*(.*?)$", e.msg)
if p:
largs.append(p.group(1))
else:
largs.append(e.opt_str)
# This version of the function successfully passes through negative numbers
def _process_args2(self, largs, rargs, values):
"""_process_args(largs : [string],
rargs : [string],
values : Values)
Process command-line arguments and populate 'values', consuming
options and arguments from 'rargs'. If 'allow_interspersed_args' is
false, stop at the first non-option argument. If true, accumulate any
interspersed non-option arguments in 'largs'.
"""
while rargs:
arg = rargs[0]
p = re.match('^-[0-9.]+$', arg) # Identify a numeric argument
if p:
del rargs[0]
raise BadOptionError(arg)
#self.error(_("%s unrecognized number in arguments") % arg)
# We handle bare "--" explicitly, and bare "-" is handled by the
# standard arg handler since the short arg case ensures that the
# len of the opt string is greater than 1.
if arg == "--":
del rargs[0]
return
elif arg[0:2] == "--":
# process a single long option (possibly with value(s))
OptionParser._process_long_opt(self, rargs, values)
elif arg[:1] == "-" and len(arg) > 1:
# process a cluster of short options (possibly with
# value(s) for the last one only)
OptionParser._process_short_opts(self, rargs, values)
elif self.allow_interspersed_args:
largs.append(arg)
del rargs[0]
else:
return # stop now, leave this arg in rargs
# Say this is the original argument list:
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
# ^
# (we are about to process arg(i)).
#
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
# [arg0, ..., arg(i-1)] (any options and their arguments will have
# been removed from largs).
#
# The while loop will usually consume 1 or more arguments per pass.
# If it consumes 1 (eg. arg is an option that takes no arguments),
# then after _process_arg() is done the situation is:
#
# largs = subset of [arg0, ..., arg(i)]
# rargs = [arg(i+1), ..., arg(N-1)]
#
# If allow_interspersed_args is false, largs will always be
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but
# not a very interesting subset!
| mit | -4,660,527,434,852,196,000 | 37.948571 | 119 | 0.59375 | false |
f-frhs/queequeg | document.py | 1 | 5023 | #!/usr/bin/env python
## $Id: document.py,v 1.2 2003/07/27 13:54:05 euske Exp $
##
## document.py - Document analyzer (HTML/TeX/PlainText)
##
import re, sys
from texparser import TexParser, TexTokenizer
from sgmllib_rev import SGMLParser
from abstfilter import AbstractFeeder, AbstractFilter, AbstractConsumer
class HTMLProcessor(SGMLParser, AbstractFeeder):
def __init__(self, next_filter):
AbstractFeeder.__init__(self, next_filter)
SGMLParser.__init__(self)
self.t = 0
self.ok = 1
return
def handle_data(self, s):
if not self.ok:
return
if s:
self.feed_next(s)
self.t = 1
return
def newline(self):
if self.t:
self.t = 0
self.feed_next(None)
return
def do_p(self, attrs):
self.newline()
return
def do_br(self, attrs):
self.newline()
return
def do_th(self, attrs):
self.newline()
return
def do_td(self, attrs):
self.newline()
return
def do_li(self, attrs):
self.newline()
return
def do_hr(self, attrs):
self.newline()
return
def do_h1(self, attrs):
self.newline()
return
def do_h2(self, attrs):
self.newline()
return
def do_h3(self, attrs):
self.newline()
return
def do_h4(self, attrs):
self.newline()
return
def do_h5(self, attrs):
self.newline()
return
def do_h6(self, attrs):
self.newline()
return
def do_h5(self, attrs):
self.newline()
return
def start_style(self, attrs):
self.ok = 0
return
def end_style(self):
self.ok = 1
return
def start_script(self, attrs):
self.ok = 0
return
def end_script(self):
self.ok = 1
return
def close(self):
SGMLParser.close(self)
AbstractFeeder.close(self)
return
def read(self, f):
while 1:
s = f.readline()
if not s: break
self.feed(s)
self.close()
return
class TexProcessor(TexParser, AbstractFeeder):
def __init__(self, next_filter):
AbstractFeeder.__init__(self, next_filter)
TexParser.__init__(self)
self.next_paragraph = 0
self.t = 0
return
def process_paragraph(self):
if self.t:
self.feed_next(None)
self.next_paragraph = 0
return
def handle_data(self, data):
data1 = data.strip()
if not data1:
self.next_paragraph = 1
if self.next_paragraph:
self.process_paragraph()
if data1:
self.t = 1
self.feed_next(data)
return
def do_documentclass(self, arg):
return
def do_usepackage(self, arg):
return
def do_bibliography(self, arg):
return
def do_includegraphics(self, arg):
return
def do_cite(self, arg):
return
def do_ref(self, arg):
return
def do_label(self, arg):
return
def do_unknown_command(self, cmd):
return
def begin_tabular(self,arg):
return
def end_tabular(self):
return
# do not consider inline math expressions as individual sentences.
def begin_math(self):
return
def end_math(self):
return
def start_title(self):
self.next_paragraph = 1
return
def start_chapter(self):
self.next_paragraph = 1
return
def startchapter_a(self):
self.next_paragraph = 1
return
def startsection(self):
self.next_paragraph = 1
return
def startsection_a(self):
self.next_paragraph = 1
return
def startsubsection(self):
self.next_paragraph = 1
return
def startsubsection_a(self):
self.next_paragraph = 1
return
def startsubsubsection(self):
self.next_paragraph = 1
return
def startsubsubsection_a(self):
self.next_paragraph = 1
return
def do_tablesep(self):
self.next_paragraph = 1
return
def do_linebreak(self):
self.next_paragraph = 1
return
def do_item(self):
self.next_paragraph = 1
return
def begin_unknown_environment(self, env):
self.next_paragraph = 1
return
def close(self):
AbstractFeeder.close(self)
TexParser.close(self)
if self.next_paragraph:
self.process_paragraph()
return
def read(self, f):
tokenizer = TexTokenizer(f)
while 1:
t = tokenizer.get()
# print repr(t)
if not t: break
self.feed(t)
self.close()
return
class PlainTextProcessor(AbstractFeeder):
def __init__(self, next_filter):
AbstractFeeder.__init__(self, next_filter)
self.t = 0
return
def read(self, f):
while 1:
s = f.readline()
if not s: break
if not s.strip() and self.t:
self.feed_next(None)
else:
self.t = 1
self.feed_next(s)
self.close()
return
# main
if __name__ == "__main__":
class Consumer(AbstractConsumer):
def feed(self, s):
if s == None:
print "-"
else:
print repr(s)
return
if sys.argv[1] == "-t":
proc = TexProcessor
elif sys.argv[1] == "-l":
proc = HTMLProcessor
elif sys.argv[1] == "-p":
proc = PlainTextProcessor
else:
assert 0
proc(Consumer()).read(sys.stdin)
| gpl-2.0 | 445,269,795,249,341,760 | 18.93254 | 71 | 0.614175 | false |
kayhayen/Nuitka | tests/programs/absolute_import/foobar/foobar.py | 1 | 1043 | # Copyright 2021, Kay Hayen, mailto:[email protected]
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Using absolute import, do from module imports.
"""
from __future__ import absolute_import, print_function
from foobar import util
from . import local # pylint: disable=unused-import
class Foobar(object):
def __init__(self):
print(util.someFunction())
| apache-2.0 | -9,060,150,200,861,828,000 | 32.645161 | 79 | 0.710451 | false |
akelm/YAMS | yams/wzm_layer.py | 1 | 3735 | import numpy as np
from scipy import special
from sum_conv import sum_conv
def wzm_layer(ME1,MM1, MEdd, MMdd,Lambda,odl, Ceps,pin, taun, bn1mat,settings):
## wzmocnienie pola w srodku warstwy
# (Cst{1}.ME,Cst{1}.MM, Cst{dd}.ME, Cst{dd}.MM,...
# lambda, dip_pos ,Cepsilon{dd},theta,stPinTaun )
nNbtheta=pin.shape[1]
theta=np.linspace(0,np.pi,nNbtheta)[None,:,None]
(Ecr_j,Ect_j,Esf_j)=PweEgenThetaAllPhi(Lambda,Ceps,\
bn1mat*(MMdd[:,:,0,0] - MMdd[:,:,0,1]*MM1[:,:,1,0]/MM1[:,:,1,1])[:,:,None],\
bn1mat*(MEdd[:,:,0,0] - MEdd[:,:,0,1]*ME1[:,:,1,0]/ME1[:,:,1,1])[:,:,None],\
odl,theta,'j',pin,taun,settings) # [L x 1 x T]
(Ecr_h,Ect_h,Esf_h)=PweEgenThetaAllPhi(Lambda,Ceps,\
bn1mat*(MMdd[:,:,1,0]- MMdd[:,:,1,1]*MM1[:,:,1,0]/MM1[:,:,1,1])[:,:,None],\
bn1mat*(MEdd[:,:,1,0] - MEdd[:,:,1,1]*ME1[:,:,1,0]/ME1[:,:,1,1])[:,:,None],\
odl,theta,'h1',pin,taun,settings) # [L x 1 x T]
Fexcperp= 3/2*np.matmul(np.absolute(Ecr_j+Ecr_h)**2, np.sin(theta)) \
/np.sum(np.sin(theta)) # L
# print(np.max(np.abs(MEdd[:,:,1,0]- MEdd[:,:,1,1]*ME1[:,:,1,0]/ME1[:,:,1,1]))) # L
Fexcpara = 3/4*(np.matmul(np.absolute(Ect_j+Ect_h)**2 + np.absolute(Esf_j+Esf_h)**2, \
np.sin(theta)) ) /np.sum(np.sin(theta))
return (Fexcperp[:,0,0],Fexcpara[:,0,0])
def PweEgenThetaAllPhi(Lambda,epsilon,cn1,dn1,r0,theta,sBessel,pin,taun,settings):
nNmax=cn1.shape[1]
nm1=np.arange(0,nNmax+1)[None,:,None] # 1 x nNmax+1
n=nm1[:,1:,:] # 1 x nNmax
cffnr=np.sqrt((2*n+1)/(4*np.pi)) # 1 x nNmax
mun=cffnr/(n*(n+1)) # 1 x nNmax
if r0==0:
Esf= (dn1[:,0,:]/np.sqrt(3*np.pi))[:,None]
Ecr=-Esf * np.sin(theta)
Ect=-Esf * np.cos(theta)
else:
# get Zn(rho) for radial dependence and derived functions
if np.isinf(r0):
# for far-field radiation profile
dn1Z1=0 # [L x 1]
icn1Z0=cn1 # [L x nNmax]
dn1Z2=dn1 # [L x nNmax]
mun=mun*((-1j)**(n+1)) # 1 x nNmax
else:
rho=(2*np.pi* np.sqrt(epsilon)/Lambda*r0)[:,:,None] # column [L x 1]
f=special.spherical_jn(nm1,rho) # [L x nNmax+1]
if sBessel=='h1':
f=f+1j*special.spherical_yn(nm1,rho) # [L x nNmax+1]
stZnAll_Z0=f[:,1:,:] # [L x nNmax]
stZnAll_Z1=stZnAll_Z0/rho # [L x nNmax]
stZnAll_Z2=f[:,:-1,:] - nm1[:,1:,:]*stZnAll_Z1 # [L x nNmax]
dn1Z1=dn1*stZnAll_Z1 # [L x nNmax]
icn1Z0=1j*cn1*stZnAll_Z0 # [L x nNmax]
dn1Z2=dn1*stZnAll_Z2 # [L x nNmax]
# pin 1 x T x N
# vecNdep=dn1Z1*cffnr # [L x nNmax x 1]
# Ersum=np.matmul(pin,vecNdep)
vecNdep=(dn1Z1*cffnr).swapaxes(1,2) # [L x 1 x nNmax]
Ersum=sum_conv(pin*vecNdep,2,settings)
# vecNdep=icn1Z0*mun # [L x nNmax]
# vecNdep2=dn1Z2*mun # [L x nNmax]
vecNdep=(icn1Z0*mun).swapaxes(1,2) # [L x 1 x nNmax]
vecNdep2=(dn1Z2*mun).swapaxes(1,2) # [L x 1 x nNmax]
# tmp1=np.matmul(pin, vecNdep)
# tmp2=np.matmul(taun, vecNdep2)
tmp1=sum_conv(pin*vecNdep,2,settings)
tmp2=sum_conv(taun*vecNdep2,2,settings)
Etsum=tmp1+tmp2
# tmp1=np.matmul(taun, vecNdep)
# tmp2=np.matmul(pin, vecNdep2)
tmp1=sum_conv(pin*vecNdep2,2,settings)
tmp2=sum_conv(taun*vecNdep,2,settings)
Efsum=tmp1+tmp2
Ecr=-2*np.sin(theta)*Ersum
Ect=-2*Etsum # corresponds to S_2 if r0==Inf
Esf=2*Efsum # corresponds to (-S_1) if r0==Inf
return (np.swapaxes(Ecr,1,2),np.swapaxes(Ect,1,2),np.swapaxes(Esf,1,2)) # Lx1xT
| gpl-3.0 | -6,966,585,909,532,253,000 | 37.90625 | 90 | 0.538956 | false |
nrocco/pycli-tools | test/test_ask_user_yesno.py | 1 | 1624 | import os
import sys
from pycli_tools import rawinput
def test_default_yes_yes():
def raw_input_mock(prompt):
return 'y'
rawinput.raw_input = raw_input_mock
assert True == rawinput.ask_user_yesno()
def test_default_yes_no():
def raw_input_mock(prompt):
return 'n'
rawinput.raw_input = raw_input_mock
assert False == rawinput.ask_user_yesno()
def test_default_yes_empty():
def raw_input_mock(prompt):
return ''
rawinput.raw_input = raw_input_mock
assert True == rawinput.ask_user_yesno()
def test_default_no_yes():
def raw_input_mock(prompt):
return 'y'
rawinput.raw_input = raw_input_mock
assert True == rawinput.ask_user_yesno(yes_on_enter=False)
def test_default_no_no():
def raw_input_mock(prompt):
return 'n'
rawinput.raw_input = raw_input_mock
assert False == rawinput.ask_user_yesno(yes_on_enter=False)
def test_default_no_empty():
def raw_input_mock(prompt):
return ''
rawinput.raw_input = raw_input_mock
assert False == rawinput.ask_user_yesno(yes_on_enter=False)
def test_continue():
def raw_input_mock(prompt):
return 'c'
rawinput.raw_input = raw_input_mock
assert True == rawinput.ask_user_yesno(yes='c', no='a')
def test_abort():
def raw_input_mock(prompt):
return 'a'
rawinput.raw_input = raw_input_mock
assert False == rawinput.ask_user_yesno(yes='c', no='a')
def test_ctrl_c():
def raw_input_mock(prompt):
raise KeyboardInterrupt()
rawinput.raw_input = raw_input_mock
assert False == rawinput.ask_user_yesno()
| mit | 4,690,743,906,382,569,000 | 23.238806 | 63 | 0.649015 | false |
epam/DLab | infrastructure-provisioning/src/general/scripts/azure/ssn_create_vpc.py | 1 | 1571 | #!/usr/bin/python
# *****************************************************************************
#
# Copyright (c) 2016, EPAM SYSTEMS INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ******************************************************************************
import argparse
from dlab.actions_lib import *
from dlab.meta_lib import *
parser = argparse.ArgumentParser()
parser.add_argument('--vpc_name', type=str, default='')
parser.add_argument('--resource_group_name', type=str, default='')
parser.add_argument('--region', type=str, default='')
parser.add_argument('--vpc_cidr', type=str, default='')
args = parser.parse_args()
if __name__ == "__main__":
if args.vpc_name != '':
if AzureMeta().get_vpc(args.resource_group_name, args.vpc_name):
print("REQUESTED VIRTUAL NETWORK {} EXISTS".format(args.vpc_name))
else:
print("Creating Virtual Network {}".format(args.vpc_name))
AzureActions().create_vpc(args.resource_group_name, args.vpc_name, args.region, args.vpc_cidr)
else:
sys.exit(1)
| apache-2.0 | 2,222,215,809,739,852,500 | 37.317073 | 106 | 0.623806 | false |
arunchandramouli/fanofpython | code/features/datatypes/dicts.py | 1 | 3592 |
'''
Aim :: To demonstrate the use of a dictionary
Define a simple dictionary , add values to it and iterate and print it
Dictionary works based on hashing principle ... simply said key,value pairs
** A dictionary object is a mutable datatype which means it couldnt be hashed
Anything that cant be hashed cant be set as a dictionary key
'''
'''
An Empty Dict
'''
dictA = dict() # same as dictA = {}
dictB = dict() # same as dictB = {}
'''
Adding values to it
'''
for i in [1,3,2,4,8,9,5,6,7]:
dictB[i] = i
'''
Adding values to a dict
'''
# Let us use a simple for loop to add values to a dict
for i in xrange(10):
dictA[i] = i + 10
print dictA,'\n\n',dictB,'\n\n'
'''
Adding same keys to the dict
When @ run-time it see similar keys, the former key will be removed
and the latter will be retained with its value.
'''
for i in [11,11,14,12,13]:
dictB[i] = i * 10
print dictB,'\n\n'
'''
Exploring a dict
'''
#print dir(dictB)
'''
['__class__', '__cmp__', '__contains__', '__delattr__', '__delitem__', '__doc__', '__eq__', '__format__', '__ge__',
'__getattribute__', '__getitem__', '__gt__', '__hash__', '__init__', '__iter__', '__le__', '__len__', '__lt__',
'__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__setitem__', '__sizeof__', '__str__',
'__subclasshook__', 'clear', 'copy', 'fromkeys', 'get', 'has_key', 'items', 'iteritems', 'iterkeys', 'itervalues', 'keys',
'pop', 'popitem', 'setdefault', 'update', 'values', 'viewitems', 'viewkeys', 'viewvalues']
'''
'''
Lets explore them ...
'''
print "__class__",' = ',dictB.__class__,'\n\n'
print "Values ",' = ',dictB.values(),'\n\n'
print "Keys ",' = ',dictB.keys(),'\n\n'
print "Items ",' = ',dictB.items(),'\n\n'
print "iteritems ",' = ',dictB.iteritems(),'\n\n'
print "iterkeys ",' = ',dictB.iterkeys(),'\n\n'
print "itervalues ",' = ',dictB.itervalues(),'\n\n'
print "viewitems ",' = ',dictB.viewitems(),'\n\n'
print "viewkeys ",' = ',dictB.viewkeys(),'\n\n'
print "viewvalues ",' = ',dictB.viewvalues(),'\n\n'
'''
Check if the dict has certain key
'''
print "Check for Key = 0 in dictA and dictB ",'\n\n',dictA.has_key(0),'\n',dictB.has_key(1000),'\n'
'''
Accessing the value of a dictionary
'''
'''
Now I want to access dictA and get some values
'''
print "Acessing dictA using [] ",'\n'
print dictA[5] # same as dictA.__getitem__(5)
# This will generate KeyError
#print dictA['Arun'] # same as dictA.__getitem__('Arun')
'''
In the case above, when we access as dictA[5]
if the key is not there we will get KeyError
to avoid that use .get() method , it shall return None
in that case
'''
print "Acessing dictA using .get() ",'\n'
print dictA.get(1000,"Key aint there yet"),'\n\n'
'''
Iterate and print the keys and values
'''
print "Iterate and print the keys and values .... ",'\n\n'
for key , value in dictB.items():
print "Key = %s and Value = %s "%(key,value),'\n\n'
'''
Clear the Values in the dictionary
** Before we clear, lets pick the memory location **
'''
'''
When we use .clear to empty the dictionary
the address will not get changed
'''
print "Memory Address dictB - Before Clear %s "%id(dictB),'\n\n'
dictB.clear()
print "dictB = %s "%dictB,'\n\n'
print "Memory Address dictB - After Clear %s "%id(dictB),'\n\n'
'''
When we use {} to empty the dictionary
the address will get changed
'''
print "Memory Address dictA - Before Clear %s "%id(dictA),'\n\n'
dictA = {}
print "dictA = %s "%dictA,'\n\n'
print "Memory Address dictA - After Clear %s "%id(dictA),'\n\n'
| gpl-3.0 | -2,898,241,896,216,891,000 | 19.179775 | 125 | 0.599109 | false |
samshara/Stock-Market-Analysis-and-Prediction | smap_nepse/preprocessing/visualization.py | 1 | 6795 | import pandas as pd
import numpy as np
import csv
import os
import matplotlib.pyplot as plt
import cufflinks as cf
import plotly
import plotly.offline as py
from plotly.offline.offline import _plot_html
import plotly.graph_objs as go
from plotly.tools import FigureFactory as FF
cf.set_config_file(world_readable=False,offline=True)
plt.style.use('ggplot')
def plot(name, *, cols=[], plot_kind=None, start_date=None, end_date=None):
""" Plots selected financial data of selected company which ranges over specified
date range[start_date:end_date]. The plot is as specified by the plot_kind parameter.
:param
name: company's ticker
cols: list of columns specifying data fields to plot.
kind: type of plot. One of 'line', 'box', 'hexbin','scatter_matrix'.
start_date: The data is indexed by the Date column. starting date specifies
the first date index row to be plotted.
end_date: end_date specifies the last date index row to be plotted.
"""
header = ['Date','Total Transactions','Traded Shares','TotalTraded Amount',
'Maximum Price','Minimum Price','Closing Price']
plottypes = ['line', 'box', 'hexbin','scatter_matrix']
if cols is None or not cols:
cols = header[1:]
if plot_kind is None:
plot_kind = 'line'
if not set(cols) <= set(header):
raise ValueError('{} is not a valid column list in the data present.'.format(cols))
if not plot_kind in plottypes:
raise ValueError('{} is not a valid plot type. Please enter one of these {}.'.format(plot_kind, plottypes))
filename = name
try:
data = pd.read_csv(filename,index_col=0, parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return None
if plot_kind == 'scatter_matrix':
scatter_matrix(data.ix[:,cols][start_date:end_date], alpha=0.2, diagonal='kde')
elif plot_kind == 'hexbin':
if len(cols) < 2:
print('invalid no of columns for a hexbin plot. Two data columns are required.')
return None
data.ix[:,cols][start_date:end_date].plot(kind=plot_kind, x=cols[0], y=cols[1], gridsize=25)
else:
data.ix[:,cols][start_date:end_date].plot(kind=plot_kind,subplots=True,
title='{} Plot of {}.'.format(plot_kind.title(),name))
plt.show()
def comparision_plot(name,*, cols=None, plot_kind=None, start_date=None, end_date=None):
""" Plots selected financial data of selected companies which ranges over specified
date range[start_date:end_date]. The plot is as specified by the plot_kind parameter.
:param
name: list of companies ticker.
cols: list of columns specifying data fields to plot.
kind: type of plot. One of 'line', 'box'.
start_date: The data is indexed by the Date column. starting date specifies
the first date index row to be plotted.
end_date: end_date specifies the last date index row to be plotted.
"""
header = ['Date','Total Transactions','Traded Shares','TotalTraded Amount',
'Maximum Price','Minimum Price','Closing Price']
plottypes = ['line', 'box']
if cols is None or not cols:
cols = header[1:]
if plot_kind is None:
plot_kind = 'line'
if not set(cols) <= set(header):
raise ValueError('{} is not a valid column list in the data present.'.format(cols))
if not plot_kind in plottypes:
raise ValueError('{} is not a valid plot type. Please enter one of these {}.'.format(plot_kind, plottypes))
filenames = name
try:
data = pd.concat([pd.read_csv(company, index_col=0, parse_dates=True) for company in filenames], axis=1, keys=name)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return None
ax = data.ix[:, data.columns.get_level_values(1).isin(set(cols))][start_date:end_date].plot()
ax.set_title('{} Plot of {} of {}.'.format(plot_kind.title(),','.join(cols), ','.join([s.strip('.csv') for s in name])))
plt.legend(title='Companies', fancybox=True, shadow=True, loc='best')
plt.show()
def financialplots(filename, plotkind):
try:
data = pd.read_csv(filename,index_col=0, parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return None
if plotkind == 'candlestick':
fig = FF.create_candlestick(data['Opening Price'], data['Maximum Price'], data['Minimum Price'], data['Closing Price'],dates=data.index)
elif plotkind == 'macd':
fig = data['Closing Price'].ta_plot(study='macd', fast_period=12, slow_period=26, signal_period=9, asFigure=True)
elif plotkind == 'boll':
fig = data['Closing Price'].ta_plot(study='boll',asFigure=True)
elif plotkind == 'ohlc':
fig = FF.create_ohlc(data['Opening Price'], data['Maximum Price'], data['Minimum Price'], data['Closing Price'],dates=data.index)
elif plotkind == 'sma':
fig = data['Closing Price'].ta_plot(study='sma', asFigure=True)
py.plot(fig,filename='../../plots/'+filename[:-4]+plotkind,validate=False,auto_open=False)
#py.plot(fig,image='png',image_width=1200, image_height=800)
def statisticplots(filename, plotkind,columns):
try:
data = pd.read_csv(filename,index_col=0, parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return None
if columns is None or not columns:
columns = list(data.columns.values)
data = data.ix[:,columns]
if plotkind == 'scattermatrix':
fig = FF.create_scatterplotmatrix(data,diag='box',title='Scattermatrix plot of {}'.format(filename[:-4]))
elif plotkind == 'line':
fig = data.iplot(theme='pearl',kind='scatter',title='Line plot of {}.'.format(filename[:-4]),subplots=True,asFigure=True)
elif plotkind == 'box':
fig = data.iplot(theme='pearl',kind='box',title='Box plot of {}.'.format(filename[:-4]),asFigure=True)
py.plot(fig,filename='../../plots/'+filename[:-4]+plotkind,validate=False,auto_open=False)
def compare(names,columns):
try:
data = pd.concat([pd.read_csv(company, index_col=0, parse_dates=True) for company in names], axis=1, keys=names)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return None
data = data.ix[:,data.columns.get_level_values(1).isin(set(columns))]
fig = data.iplot(theme='pearl',kind='scatter',title='Line Plot of {} of {}.'.format(','.join(columns), ','.join([s.strip('.csv') for s in names])),subplots=True,asFigure=True)
py.plot(fig,filename='../../plots/'+compareplot,validate=False,auto_open=False)
| mit | 210,266,800,827,579,520 | 45.541096 | 179 | 0.647976 | false |
markreidvfx/pyaaf2 | tests/test_mobid.py | 1 | 1392 | from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
from aaf2.mobid import MobID
from uuid import UUID
from aaf2.auid import AUID
import uuid
import unittest
class MobIDTests(unittest.TestCase):
def test_mob_id(self):
m = MobID.new()
material_uuid = AUID("52c02cd8-6801-4806-986a-b68c0a0cf9d3")
m.material = material_uuid
m_str = "urn:smpte:umid:060a2b34.01010105.01010f20.13000000.52c02cd8.68014806.986ab68c.0a0cf9d3"
m2 = MobID(str(m))
assert m == m2
m2 = MobID(bytes_le=m.bytes_le)
assert m == m2
assert m.int == m2.int
assert m == MobID(m_str)
assert hash(m) == hash(m2)
assert str(m) == m_str
assert m.material == material_uuid
def test_int(self):
for i in range(1000):
m = MobID()
m.int = i
assert m.int == i
def test_material_id(self):
for i in range(10000):
material = AUID(int=i)
m = MobID(int=i)
assert m.material == material
for i in range(100):
material = uuid.uuid4()
m = MobID.new()
m.material = material
assert m.material == material
if __name__ == "__main__":
import logging
# logging.basicConfig(level=logging.DEBUG)
unittest.main()
| mit | 3,974,100,127,361,442,000 | 22.59322 | 104 | 0.564655 | false |
michael-lazar/mailcap_fix | tests/test_mailcap_python.py | 1 | 10329 | ###############################################################################
# This file was copied from cpython/Lib/tests/test_mailcap.py
# Some lines have been modified to work without the python test runner
###############################################################################
import os
import copy
import unittest
from mailcap_fix import mailcap
# Location of mailcap file
MAILCAPFILE = os.path.join(os.path.dirname(__file__), 'data/mailcap.txt')
# Dict to act as mock mailcap entry for this test
# The keys and values should match the contents of MAILCAPFILE
MAILCAPDICT = {
'application/x-movie':
[{'compose': 'moviemaker %s',
'x11-bitmap': '"/usr/lib/Zmail/bitmaps/movie.xbm"',
'description': '"Movie"',
'view': 'movieplayer %s',
'lineno': 4}],
'application/*':
[{'copiousoutput': '',
'view': 'echo "This is \\"%t\\" but is 50 \\% Greek to me" \\; cat %s',
'lineno': 5}],
'audio/basic':
[{'edit': 'audiocompose %s',
'compose': 'audiocompose %s',
'description': '"An audio fragment"',
'view': 'showaudio %s',
'lineno': 6}],
'video/mpeg':
[{'view': 'mpeg_play %s', 'lineno': 13}],
'application/postscript':
[{'needsterminal': '', 'view': 'ps-to-terminal %s', 'lineno': 1},
{'compose': 'idraw %s', 'view': 'ps-to-terminal %s', 'lineno': 2}],
'application/x-dvi':
[{'view': 'xdvi %s', 'lineno': 3}],
'message/external-body':
[{'composetyped': 'extcompose %s',
'description': '"A reference to data stored in an external location"',
'needsterminal': '',
'view': 'showexternal %s %{access-type} %{name} %{site} %{directory} %{mode} %{server}',
'lineno': 10}],
'text/richtext':
[{'test': 'test "`echo %{charset} | tr \'[A-Z]\' \'[a-z]\'`" = iso-8859-8',
'copiousoutput': '',
'view': 'shownonascii iso-8859-8 -e richtext -p %s',
'lineno': 11}],
'image/x-xwindowdump':
[{'view': 'display %s', 'lineno': 9}],
'audio/*':
[{'view': '/usr/local/bin/showaudio %t', 'lineno': 7}],
'video/*':
[{'view': 'animate %s', 'lineno': 12}],
'application/frame':
[{'print': '"cat %s | lp"', 'view': 'showframe %s', 'lineno': 0}],
'image/rgb':
[{'view': 'display %s', 'lineno': 8}]
}
# For backwards compatibility, readmailcapfile() and lookup() still support
# the old version of mailcapdict without line numbers.
MAILCAPDICT_DEPRECATED = copy.deepcopy(MAILCAPDICT)
for entry_list in MAILCAPDICT_DEPRECATED.values():
for entry in entry_list:
entry.pop('lineno')
class HelperFunctionTest(unittest.TestCase):
def test_listmailcapfiles(self):
# The return value for listmailcapfiles() will vary by system.
# So verify that listmailcapfiles() returns a list of strings that is of
# non-zero length.
mcfiles = mailcap.listmailcapfiles()
self.assertIsInstance(mcfiles, list)
for m in mcfiles:
self.assertIsInstance(m, str)
env = os.environ
# According to RFC 1524, if MAILCAPS env variable exists, use that
# and only that.
if "MAILCAPS" in env:
env_mailcaps = env["MAILCAPS"].split(os.pathsep)
else:
env_mailcaps = ["/testdir1/.mailcap", "/testdir2/mailcap"]
env["MAILCAPS"] = os.pathsep.join(env_mailcaps)
mcfiles = mailcap.listmailcapfiles()
self.assertEqual(env_mailcaps, mcfiles)
def test_readmailcapfile(self):
# Test readmailcapfile() using test file. It should match MAILCAPDICT.
with open(MAILCAPFILE, 'r') as mcf:
d = mailcap.readmailcapfile(mcf)
self.assertDictEqual(d, MAILCAPDICT_DEPRECATED)
def test_lookup(self):
# Test without key
expected = [{'view': 'animate %s', 'lineno': 12},
{'view': 'mpeg_play %s', 'lineno': 13}]
actual = mailcap.lookup(MAILCAPDICT, 'video/mpeg')
self.assertListEqual(expected, actual)
# Test with key
key = 'compose'
expected = [{'edit': 'audiocompose %s',
'compose': 'audiocompose %s',
'description': '"An audio fragment"',
'view': 'showaudio %s',
'lineno': 6}]
actual = mailcap.lookup(MAILCAPDICT, 'audio/basic', key)
self.assertListEqual(expected, actual)
# Test on user-defined dicts without line numbers
c = copy.deepcopy(MAILCAPDICT)
for entry_list in c.values():
for entry in entry_list:
entry.pop('lineno')
expected = [{'view': 'mpeg_play %s'}, {'view': 'animate %s'}]
actual = mailcap.lookup(c, 'video/mpeg')
self.assertListEqual(expected, actual)
def test_subst(self):
plist = ['id=1', 'number=2', 'total=3']
# test case: ([field, MIMEtype, filename, plist=[]], <expected string>)
test_cases = [
(["", "audio/*", "foo.txt"], ""),
(["echo foo", "audio/*", "foo.txt"], "echo foo"),
(["echo %s", "audio/*", "foo.txt"], "echo foo.txt"),
(["echo %t", "audio/*", "foo.txt"], "echo audio/*"),
(["echo \%t", "audio/*", "foo.txt"], "echo %t"),
(["echo foo", "audio/*", "foo.txt", plist], "echo foo"),
(["echo %{total}", "audio/*", "foo.txt", plist], "echo 3")
]
for tc in test_cases:
self.assertEqual(mailcap.subst(*tc[0]), tc[1])
class GetcapsTest(unittest.TestCase):
def test_mock_getcaps(self):
# Test mailcap.getcaps() using mock mailcap file in this dir.
# Temporarily override any existing system mailcap file by pointing the
# MAILCAPS environment variable to our mock file.
os.environ["MAILCAPS"] = MAILCAPFILE
caps = mailcap.getcaps()
self.assertDictEqual(caps, MAILCAPDICT)
def test_system_mailcap(self):
# Test mailcap.getcaps() with mailcap file(s) on system, if any.
caps = mailcap.getcaps()
self.assertIsInstance(caps, dict)
mailcapfiles = mailcap.listmailcapfiles()
existingmcfiles = [mcf for mcf in mailcapfiles if os.path.exists(mcf)]
if existingmcfiles:
# At least 1 mailcap file exists, so test that.
for (k, v) in caps.items():
self.assertIsInstance(k, str)
self.assertIsInstance(v, list)
for e in v:
self.assertIsInstance(e, dict)
else:
# No mailcap files on system. getcaps() should return empty dict.
self.assertEqual({}, caps)
class FindmatchTest(unittest.TestCase):
def test_findmatch(self):
# default findmatch arguments
c = MAILCAPDICT
fname = "foo.txt"
plist = ["access-type=default", "name=john", "site=python.org",
"directory=/tmp", "mode=foo", "server=bar"]
audio_basic_entry = {
'edit': 'audiocompose %s',
'compose': 'audiocompose %s',
'description': '"An audio fragment"',
'view': 'showaudio %s',
'lineno': 6
}
audio_entry = {"view": "/usr/local/bin/showaudio %t", 'lineno': 7}
video_entry = {'view': 'animate %s', 'lineno': 12}
message_entry = {
'composetyped': 'extcompose %s',
'description': '"A reference to data stored in an external location"', 'needsterminal': '',
'view': 'showexternal %s %{access-type} %{name} %{site} %{directory} %{mode} %{server}',
'lineno': 10,
}
# test case: (findmatch args, findmatch keyword args, expected output)
# positional args: caps, MIMEtype
# keyword args: key="view", filename="/dev/null", plist=[]
# output: (command line, mailcap entry)
cases = [
([{}, "video/mpeg"], {}, (None, None)),
([c, "foo/bar"], {}, (None, None)),
([c, "video/mpeg"], {}, ('animate /dev/null', video_entry)),
([c, "audio/basic", "edit"], {}, ("audiocompose /dev/null", audio_basic_entry)),
([c, "audio/basic", "compose"], {}, ("audiocompose /dev/null", audio_basic_entry)),
([c, "audio/basic", "description"], {}, ('"An audio fragment"', audio_basic_entry)),
([c, "audio/basic", "foobar"], {}, (None, None)),
([c, "video/*"], {"filename": fname}, ("animate %s" % fname, video_entry)),
([c, "audio/basic", "compose"],
{"filename": fname},
("audiocompose %s" % fname, audio_basic_entry)),
([c, "audio/basic"],
{"key": "description", "filename": fname},
('"An audio fragment"', audio_basic_entry)),
([c, "audio/*"],
{"filename": fname},
("/usr/local/bin/showaudio audio/*", audio_entry)),
([c, "message/external-body"],
{"plist": plist},
("showexternal /dev/null default john python.org /tmp foo bar", message_entry))
]
self._run_cases(cases)
@unittest.skipUnless(os.name == "posix", "Requires 'test' command on system")
def test_test(self):
# findmatch() will automatically check any "test" conditions and skip
# the entry if the check fails.
caps = {"test/pass": [{"test": "test 1 -eq 1"}],
"test/fail": [{"test": "test 1 -eq 0"}]}
# test case: (findmatch args, findmatch keyword args, expected output)
# positional args: caps, MIMEtype, key ("test")
# keyword args: N/A
# output: (command line, mailcap entry)
cases = [
# findmatch will return the mailcap entry for test/pass because it evaluates to true
([caps, "test/pass", "test"], {}, ("test 1 -eq 1", {"test": "test 1 -eq 1"})),
# findmatch will return None because test/fail evaluates to false
([caps, "test/fail", "test"], {}, (None, None))
]
self._run_cases(cases)
def _run_cases(self, cases):
for c in cases:
self.assertEqual(mailcap.findmatch(*c[0], **c[1]), c[2]) | unlicense | 7,113,809,215,644,407,000 | 41.336066 | 104 | 0.538871 | false |
Jinwithyoo/han | tests/por.py | 1 | 6384 | # -*- coding: utf-8 -*-
from tests import HangulizeTestCase
from hangulize.langs.por import Portuguese
class PortugueseTestCase(HangulizeTestCase):
""" http://korean.go.kr/09_new/dic/rule/rule_foreign_0219.jsp """
lang = Portuguese()
def test_1st(self):
"""제1항
c, g는 a, o, u 앞에서는 각각 ‘ㅋ, ㄱ'으로 적고, e, i 앞에서는
‘ㅅ, ㅈ'으로 적는다.
"""
self.assert_examples({
'Cabral': '카브랄',
'Camocim': '카모싱',
'Egas': '에가스',
'Gil': '질',
})
def test_2nd(self):
"""제2항
gu, qu는 a, o, u 앞에서는 각각 ‘구, 쿠'로 적고, e, i 앞에서는
‘ㄱ, ㅋ'으로 적는다.
"""
self.assert_examples({
'Iguaçú': '이구아수',
'Araquari': '아라쿠아리',
'Guerra': '게하',
'Aquilino': '아킬리누',
})
def test_3rd(self):
"""제3항
d, t는 ㄷ, ㅌ으로 적는다.
"""
self.assert_examples({
'Amado': '아마두',
'Costa': '코스타',
'Diamantina': '디아만티나',
'Alegrete': '알레그레트',
'Montes': '몬트스',
})
def test_4th(self):
"""제4항
어말의 -che는 ‘시'로 적는다.
"""
self.assert_examples({
'Angoche': '앙고시',
'Peniche': '페니시',
})
def test_5th(self):
"""제5항: l
1. 어중의 l이 모음 앞에 오거나 모음이 따르지 않는 비음 앞에 오는
경우에는 ‘?'로 적는다. 다만, 비음 뒤의 l은 모음 앞에 오더라도 ‘ㄹ'로
적는다.
2. 어말 또는 자음 앞의 l은 받침 ‘ㄹ'로 적는다.
"""
self.assert_examples({
'Carlos': '카를루스',
'Amalia': '아말리아',
'Sul': '술',
'Azul': '아줄',
'Gilberto': '질베르투',
'Caracol': '카라콜',
})
def test_6th(self):
"""제6항
m, n은 각각 ㅁ, ㄴ으로 적고, 어말에서는 모두 받침 ‘ㅇ'으로 적는다.
어말 -ns의 n도 받침 ‘ㅇ'으로 적는다.
"""
self.assert_examples({
'Manuel': '마누엘',
'Moniz': '모니스',
'Campos': '캄푸스',
'Vincente': '빈센트',
'Santarem': '산타렝',
'Rondon': '혼동',
'Lins': '링스',
'Rubens': '후벵스',
})
def test_7th(self):
"""제7항
ng, nc, nq 연쇄에서 ‘g, c, q'가 ‘ㄱ'이나 ‘ㅋ'으로 표기되면 ‘n'은
받침 ‘ㅇ'으로 적는다.
"""
self.assert_examples({
'Angola': '앙골라',
'Angelo': '안젤루',
'Branco': '브랑쿠',
'Francisco': '프란시스쿠',
'Conquista': '콩키스타',
'Junqueiro': '중케이루',
})
def test_8th(self):
"""제8항
r는 어두나 n, l, s 뒤에 오는 경우에는 ‘ㅎ'으로 적고, 그 밖의 경우에는
‘ㄹ, 르'로 적는다.
"""
self.assert_examples({
'Ribeiro': '히베이루',
'Henrique': '엔히크',
'Bandeira': '반데이라',
'Salazar': '살라자르',
})
def test_9th(self):
"""제9항: s
1. 어두나 모음 앞에서는 ‘ㅅ'으로 적고, 모음 사이에서는 ‘ㅈ'으로 적는다.
2. 무성 자음 앞이나 어말에서는 ‘스'로 적고, 유성 자음 앞에서는 ‘즈'로
적는다.
"""
self.assert_examples({
'Salazar': '살라자르',
'Afonso': '아폰수',
'Barroso': '바호주',
'Gervasio': '제르바지우',
})
def test_10th(self):
"""제10항: sc, sç, xc
sc와 xc는 e, i 앞에서 ‘ㅅ'으로 적는다. sç는 항상 ‘ㅅ'으로 적는다.
"""
self.assert_examples({
'Nascimento': '나시멘투',
'piscina': '피시나',
'excelente': '이셀렌트',
'cresça': '크레사',
})
def test_11st(self):
"""제11항
x는 ‘시'로 적되, 어두 e와 모음 사이에 오는 경우에는 ‘ㅈ'으로 적는다.
"""
self.assert_examples({
'Teixeira': '테이셰이라',
'lixo': '리슈',
'exame': '이자므',
'exemplo': '이젬플루',
})
def test_12nd(self):
"""제12항
같은 자음이 겹치는 경우에는 겹치지 않은 경우와 같이 적는다. 다만, rr는
‘ㅎ, 흐'로, ss는 ‘ㅅ, 스'로 적는다.
"""
self.assert_examples({
'Garrett': '가헤트',
'Barroso': '바호주',
'Mattoso': '마토주',
'Toress': '토레스',
})
def test_13rd(self):
"""제13항
o는 ‘오'로 적되, 어말이나 -os의 o는 ‘우'로 적는다.
"""
self.assert_examples({
'Nobre': '노브르',
'Antonio': '안토니우',
'Melo': '멜루',
'Saramago': '사라마구',
'Passos': '파수스',
'Lagos': '라구스',
})
def test_14th(self):
"""제14항
e는 ‘에'로 적되, 어두 무강세 음절에서는 ‘이'로 적는다. 어말에서는
‘으'로 적는다.
"""
self.assert_examples({
'Montemayor': '몬테마요르',
'Estremoz': '이스트레모스',
'Chifre': '시프르',
'de': '드',
})
def test_15th(self):
"""제15항: -es
1. p, b, m, f, v 다음에 오는 어말 -es는 ‘-에스'로 적는다.
2. 그 밖의 어말 -es는 ‘-으스'로 적는다.
"""
self.assert_examples({
'Lopes': '로페스',
'Gomes': '고메스',
'Neves': '네베스',
'Chaves': '샤베스',
'Soares': '소아르스',
'Pires': '피르스',
}) | bsd-3-clause | 2,189,456,993,221,242,600 | 23.818182 | 69 | 0.392428 | false |
LittleBirdLiu/Flask_WebDev | config.py | 1 | 1391 | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
MAIL_SERVER = 'smtp.126.com'
MAIL_PORT = 25
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get('MAIL_USERNAME') or '[email protected]'
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD') or '19910820'
FLASKY_MAIL_SUBJECT_PREFIX = '[Flasky]'
FLASKY_MAIL_SENDER = 'Flasky Admin <[email protected]>'
FLASKY_ADMIN = os.environ.get('FLASKY_ADMIN') or '[email protected]'
FLASKY_POSTS_PER_PAGE = os.environ.get('FLASKY_POSTS_PER_PAGE') or 20
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}
| mit | 1,774,406,381,036,137,500 | 31.348837 | 73 | 0.667865 | false |
PKRoma/httpie | tests/test_compress.py | 1 | 3882 | """
We test against httpbin which doesn't return the request data in a
consistent way:
1. Non-form requests: the `data` field contains base64 encoded version of
our zlib-encoded request data.
2. Form requests: `form` contains a messed up version of the data.
"""
import base64
import zlib
from .fixtures import FILE_PATH, FILE_CONTENT
from httpie.status import ExitStatus
from .utils import StdinBytesIO, http, HTTP_OK, MockEnvironment
def assert_decompressed_equal(base64_compressed_data, expected_str):
compressed_data = base64.b64decode(
base64_compressed_data.split(',', 1)[1])
data = zlib.decompress(compressed_data)
actual_str = data.decode()
# FIXME: contains a trailing linebreak with an uploaded file
actual_str = actual_str.rstrip()
assert actual_str == expected_str
def test_cannot_combine_compress_with_chunked(httpbin):
r = http('--compress', '--chunked', httpbin.url + '/get',
tolerate_error_exit_status=True)
assert r.exit_status == ExitStatus.ERROR
assert 'cannot combine --compress and --chunked' in r.stderr
def test_cannot_combine_compress_with_multipart(httpbin):
r = http('--compress', '--multipart', httpbin.url + '/get',
tolerate_error_exit_status=True)
assert r.exit_status == ExitStatus.ERROR
assert 'cannot combine --compress and --multipart' in r.stderr
def test_compress_skip_negative_ratio(httpbin_both):
r = http(
'--compress',
httpbin_both + '/post',
'foo=bar',
)
assert HTTP_OK in r
assert 'Content-Encoding' not in r.json['headers']
assert r.json['json'] == {'foo': 'bar'}
def test_compress_force_with_negative_ratio(httpbin_both):
r = http(
'--compress',
'--compress',
httpbin_both + '/post',
'foo=bar',
)
assert HTTP_OK in r
assert r.json['headers']['Content-Encoding'] == 'deflate'
assert_decompressed_equal(r.json['data'], '{"foo": "bar"}')
def test_compress_json(httpbin_both):
r = http(
'--compress',
'--compress',
httpbin_both + '/post',
'foo=bar',
)
assert HTTP_OK in r
assert r.json['headers']['Content-Encoding'] == 'deflate'
assert_decompressed_equal(r.json['data'], '{"foo": "bar"}')
assert r.json['json'] is None
def test_compress_form(httpbin_both):
r = http(
'--form',
'--compress',
'--compress',
httpbin_both + '/post',
'foo=bar',
)
assert HTTP_OK in r
assert r.json['headers']['Content-Encoding'] == 'deflate'
assert r.json['data'] == ""
assert '"foo": "bar"' not in r
def test_compress_raw(httpbin_both):
r = http(
'--raw',
FILE_CONTENT,
'--compress',
'--compress',
httpbin_both + '/post',
)
assert HTTP_OK in r
assert r.json['headers']['Content-Encoding'] == 'deflate'
assert_decompressed_equal(r.json['data'], FILE_CONTENT.strip())
def test_compress_stdin(httpbin_both):
env = MockEnvironment(
stdin=StdinBytesIO(FILE_PATH.read_bytes()),
stdin_isatty=False,
)
r = http(
'--compress',
'--compress',
'PATCH',
httpbin_both + '/patch',
env=env,
)
assert HTTP_OK in r
assert r.json['headers']['Content-Encoding'] == 'deflate'
assert_decompressed_equal(r.json['data'], FILE_CONTENT.strip())
assert not r.json['json']
def test_compress_file(httpbin_both):
r = http(
'--form',
'--compress',
'--compress',
'PUT',
httpbin_both + '/put',
f'file@{FILE_PATH}',
)
assert HTTP_OK in r
assert r.json['headers']['Content-Encoding'] == 'deflate'
assert r.json['headers']['Content-Type'].startswith(
'multipart/form-data; boundary=')
assert r.json['files'] == {}
assert FILE_CONTENT not in r
| bsd-3-clause | 5,335,729,547,268,975,000 | 26.728571 | 73 | 0.607161 | false |
alirizakeles/tendenci | tendenci/apps/accountings/management/commands/correct_membership_acct_number.py | 1 | 2067 | from django.core.management.base import BaseCommand
from django.contrib.contenttypes.models import ContentType
class Command(BaseCommand):
"""
Correct the account_number of AcctTran records for the memberships
for those that are wrongly assigned with the event's account_number.
Usage: python manage.py correct_membership_acct_number
"""
def handle(self, *args, **options):
from tendenci.apps.memberships.models import MembershipDefault
from tendenci.apps.invoices.models import Invoice
from tendenci.apps.accountings.models import Acct, AcctEntry
account_number = MembershipDefault().get_acct_number()
acct = Acct.objects.get(account_number=account_number)
accts_ignore = Acct.objects.filter(
account_number__in=['220000',
'120000',
'106000']
)
num_trans_updated = 0
[content_type] = ContentType.objects.filter(
app_label='memberships',
model='membershipdefault'
)[:1] or [None]
if content_type:
membership_invoices = Invoice.objects.filter(
object_type=content_type
)
for invoice in membership_invoices:
acct_entries = AcctEntry.objects.filter(
source='invoice',
object_id=invoice.id)
for ae in acct_entries:
acct_trans = ae.trans.exclude(
account=acct).exclude(
account__in=accts_ignore)
if acct_trans.exists():
num_trans_updated += acct_trans.count()
acct_trans.update(account=acct)
print '# acct_tran updated ', num_trans_updated
| gpl-3.0 | 4,600,237,004,960,355,300 | 42.0625 | 72 | 0.50895 | false |
schleichdi2/OpenNfr_E2_Gui-6.0 | lib/python/Plugins/Extensions/MediaPortal/additions/mediatheken/youtube.py | 1 | 66866 | # -*- coding: utf-8 -*-
import json
from Plugins.Extensions.MediaPortal.plugin import _
from Plugins.Extensions.MediaPortal.resources.imports import *
from Plugins.Extensions.MediaPortal.resources.choiceboxext import ChoiceBoxExt
from Plugins.Extensions.MediaPortal.resources.keyboardext import VirtualKeyBoardExt
from Plugins.Extensions.MediaPortal.resources.youtubeplayer import YoutubePlayer
from Plugins.Extensions.MediaPortal.resources.menuhelper import MenuHelper
from Plugins.Extensions.MediaPortal.resources.twagenthelper import twAgentGetPage
config.mediaportal.yt_param_regionid_idx = ConfigInteger(default = 2)
config.mediaportal.yt_param_time_idx = ConfigInteger(default = 0)
config.mediaportal.yt_param_meta_idx = ConfigInteger(default = 1)
config.mediaportal.yt_paramListIdx = ConfigInteger(default = 0)
config.mediaportal.yt_param_3d_idx = ConfigInteger(default = 0)
config.mediaportal.yt_param_duration_idx = ConfigInteger(default = 0)
config.mediaportal.yt_param_video_definition_idx = ConfigInteger(default = 0)
config.mediaportal.yt_param_event_types_idx = ConfigInteger(default = 0)
config.mediaportal.yt_param_video_type_idx = ConfigInteger(default = 0)
config.mediaportal.yt_refresh_token = ConfigText(default="")
APIKEYV3 = mp_globals.yt_a
param_hl = ('&hl=en-GB', '&hl=de-DE', '&hl=fr-FR', '&hl=it-IT', '')
param_ajax_hl = ('en', 'de', 'fr', 'it', '')
picker_lang = ''
param_ajax_gl = ('us','gb','de','fr','it')
agent = getUserAgent()
std_headers = {
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
}
default_cover = "file://%s/youtube.png" % (config.mediaportal.iconcachepath.value + "logos")
class youtubeGenreScreen(MenuHelper):
def __init__(self, session):
global yt_oauth2
self.param_qr = ""
self.param_author = ""
self.old_mainidx = -1
self.param_safesearch = ['&safeSearch=none']
self.param_format = '&format=5'
self.subCat = []
self.subCat_L2 = []
self.param_time = [
(_("Date"), "&order=date"),
(_("Rating"), "&order=rating"),
(_("Relevance"), "&order=relevance"),
(_("Title"), "&order=title"),
(_("Video count"), "&order=videoCount"),
(_("View count"), "&order=viewCount")
]
self.param_metalang = [
(_('English'), '&relevanceLanguage=en'),
(_('German'), '&relevanceLanguage=de'),
(_('French'), '&relevanceLanguage=fr'),
(_('Italian'), '&relevanceLanguage=it'),
(_('Any'), '')
]
self.param_regionid = [
(_('Whole world'), '®ionCode=US'),
(_('England'), '®ionCode=GB'),
(_('Germany'), '®ionCode=DE'),
(_('France'), '®ionCode=FR'),
(_('Italy'), '®ionCode=IT')
]
self.param_duration = [
(_('Any'), ''),
('< 4 Min', '&videoDuration=short'),
('4..20 Min', '&videoDuration=medium'),
('> 20 Min', '&videoDuration=long')
]
self.param_3d = [
(_('Any'), ''),
(_('2D'), '&videoDimension=2d'),
(_('3D'), '&videoDimension=3d')
]
self.param_video_definition = [
(_('Any'), ''),
(_('High'), '&videoDefinition=high'),
(_('Low'), '&videoDefinition=standard')
]
self.param_event_types = [
(_('None'), ''),
(_('Completed'), '&eventType=completed'),
(_('Live'), '&eventType=live'),
(_('Upcoming'), '&eventType=upcoming')
]
self.param_video_type = [
(_('Any'), ''),
(_('Episode'), '&videoType=episode'),
(_('Movie'), '&videoType=movie')
]
self.paramList = [
(_('Search request'), (self.paraQuery, None), (0,1,2,)),
(_('Event type'), (self.param_event_types, config.mediaportal.yt_param_event_types_idx), (0,)),
(_('Sort by'), (self.param_time, config.mediaportal.yt_param_time_idx), (0,1,2,)),
(_('Language'), (self.param_metalang, config.mediaportal.yt_param_meta_idx), (0,1,2,3,7,9,10,11,12,13,14)),
(_('Search region'), (self.param_regionid, config.mediaportal.yt_param_regionid_idx), (0,1,2,3,7,9,10,11,12,13,14)),
(_('User name'), (self.paraAuthor, None), (0,1,2,)),
(_('3D Search'), (self.param_3d, config.mediaportal.yt_param_3d_idx), (0,)),
(_('Runtime'), (self.param_duration, config.mediaportal.yt_param_duration_idx), (0,)),
(_('Video definition'), (self.param_video_definition, config.mediaportal.yt_param_video_definition_idx), (0,)),
(_('Video type'), (self.param_video_type, config.mediaportal.yt_param_video_type_idx), (0,))
]
self.subCatUserChannel = [
(_('Featured'), '/featured?'),
(_('Videos'), '/videos?'),
(_('Playlists'), '/playlists?'),
(_('Channels'), '/channels?')
]
self.subCatUserChannelPlaylist = [
(_('Videos'), '/videos?')
]
self.subCatUserChannelPopularWorldwide = [
(_('Featured'), '/featured?'),
]
self.subCatUserChannelPopular = [
(_('Featured'), '/featured?'),
(_('Videos'), '/videos?'),
(_('Playlists'), '/playlists?')
]
self.subCatYourChannel = [
(_('Playlists'), 'https://www.googleapis.com/youtube/v3/playlists?part=snippet%2Cid&mine=true&access_token=%ACCESSTOKEN%'),
(_('Uploads'), 'https://www.googleapis.com/youtube/v3/channels?part=contentDetails&mine=true&access_token=%ACCESSTOKEN%%playlistId=uploads%'),
(_('Likes'), 'https://www.googleapis.com/youtube/v3/channels?part=contentDetails&mine=true&access_token=%ACCESSTOKEN%%playlistId=likes%'),
(_('Subscriptions'), 'https://www.googleapis.com/youtube/v3/subscriptions?part=snippet&mine=true&access_token=%ACCESSTOKEN%'),
]
self.mainGenres = [
(_('Video search'), 'https://www.googleapis.com/youtube/v3/search?part=snippet&q=%QR%&type=video&key=%KEY%'),
(_('Playlist search'), 'https://www.googleapis.com/youtube/v3/search?part=snippet&q=%QR%&type=playlist&key=%KEY%'),
(_('Channel search'), 'https://www.googleapis.com/youtube/v3/search?part=snippet&q=%QR%&type=channel&key=%KEY%'),
#(_('Categories'), 'https://www.googleapis.com/youtube/v3/guideCategories?part=snippet&key=%KEY%'),
(400 * "—", ''),
(_('My channel'), ''),
(_('Favorites'), ''),
(_('User Channels'), ''),
(400 * "—", ''),
(_('YouTube Channels'), ''),
(_('Selected Channels'), ''),
(_('Music Channels'), ''),
(_('Gaming Channels'), ''),
(_('Car & Vehicle Channels'), ''),
(_('Radio Play Channels'), ''),
]
self.YTChannels = [
(_('Popular on YouTube') + " - " + _('Worldwide'), 'http://www.youtube.com/channel/UCgGzSIa8zIsJHbSs0bLplag'),
(_('Popular on YouTube') + " - " + _('Germany'), 'http://www.youtube.com/channel/UCK274iXLZhs8MFGLsncOyZQ'),
(_('Popular on YouTube') + " - " + _('USA'), 'http://www.youtube.com/channel/UCF0pVplsI8R5kcAqgtoRqoA'),
(_('News'), 'https://www.youtube.com/channel/UCYfdidRxbB8Qhf0Nx7ioOYw'),
(_('Music'), 'https://www.youtube.com/channel/UC-9-kyTW8ZkZNDHQJ6FgpwQ'),
(_('Gaming'), 'https://www.youtube.com/channel/UCOpNcN46UbXVtpKMrmU4Abg'),
(_('Sports'), 'https://www.youtube.com/channel/UCEgdi0XIXXZ-qJOFPf4JSKw'),
(_('Live'), 'https://www.youtube.com/channel/UC4R8DWoMoI7CAwX8_LjQHig'),
(_('Education'), 'https://www.youtube.com/channel/UC3yA8nDwraeOfnYfBWun83g'),
('YouTube Spotlight', 'https://www.youtube.com/channel/UCBR8-60-B28hp2BmDPdntcQ'),
('YouTube Trends', 'https://www.youtube.com/channel/UCeNZlh03MyUkjRlLFpVQxsg'),
('YouTube Creators', 'https://www.youtube.com/channel/UCUZHFZ9jIKrLroW8LcyJEQQ'),
('YouTube Nation', 'https://www.youtube.com/channel/UCUD4yDVyM54QpfqGJX4S7ng'),
('YouTube Rewind', 'https://www.youtube.com/channel/UCnHXLLNHjNAnDQ50JANLG1g')
]
self.HoerspielChannels = [
('Audible Hörbücher', 'https://www.youtube.com/user/audibletrailer'),
('Björns Hörspiel-TV', 'https://www.youtube.com/user/BjoernsHoerspielTV'),
('Edgar Allan Poe´s Kaminzimmer', 'https://www.youtube.com/user/EAPoeProductions'),
('felix auris', 'https://www.youtube.com/user/mercuriius'),
('FRUITY - SOUND - DISASTER', 'https://www.youtube.com/user/MrFruitylooper'),
('Hein Bloed', 'https://www.youtube.com/user/Heinbloedful'),
('Hörbücher, Hörspiele und mehr', 'https://www.youtube.com/user/BestSound1000'),
('Hörspiele und Klassik', 'https://www.youtube.com/user/scyliorhinus'),
('LAUSCH - Phantastische Hörspiele', 'https://www.youtube.com/user/merlausch'),
('Lauschgoldladen', 'https://www.youtube.com/user/Lauschgoldladen'),
('Multipolizei2', 'https://www.youtube.com/user/Multipolizei2'),
('Multipolizei3', 'https://www.youtube.com/user/Multipolizei3'),
('Soundtales Productions', 'https://www.youtube.com/user/SoundtalesProduction'),
]
self.HoerspielChannels.sort(key=lambda t : t[0].lower())
self.subCatHoerspielChannels = []
for item in self.HoerspielChannels:
self.subCatHoerspielChannels.append(self.subCatUserChannel)
self.CarChannels = [
('Alfa Romeo Deutschland', 'https://www.youtube.com/user/AlfaRomeoDE'),
('Audi Deutschland', 'https://www.youtube.com/user/Audi'),
('BMW Deutschland', 'https://www.youtube.com/user/BMWDeutschland'),
('BMW Motorrad', 'https://www.youtube.com/user/bmwmotorrad'),
('CITROËN Deutschland', 'https://www.youtube.com/user/CitroenDeutschland'),
('Ducati Motor Official Channel', 'https://www.youtube.com/user/DucatiMotorHolding'),
('Fiat Deutschland', 'https://www.youtube.com/user/FiatDeutschland'),
('Ford Deutschland', 'https://www.youtube.com/user/fordindeutschland'),
('Harley-Davidson Europe', 'https://www.youtube.com/user/HarleyDavidsonEurope'),
('Honda Deutschland', 'https://www.youtube.com/user/HondaDeutschlandGmbH'),
('Kawasaki Motors Europe', 'https://www.youtube.com/user/Kawasakimotors'),
('Land Rover Deutschland', 'https://www.youtube.com/user/experiencegermany'),
('Mazda Deutschland', 'https://www.youtube.com/user/MazdaDeutschland'),
('Mercedes-Benz', 'https://www.youtube.com/user/mercedesbenz'),
('MITSUBISHI MOTORS Deutschland', 'https://www.youtube.com/user/MitsubishiMotorsDE'),
('Moto Guzzi', 'https://www.youtube.com/user/motoguzziofficial'),
('Nissan Deutschland', 'https://www.youtube.com/user/NissanDeutsch'),
('Porsche Channel', 'https://www.youtube.com/user/Porsche'),
('SEAT Deutschland', 'https://www.youtube.com/user/SEATde'),
('ŠKODA AUTO Deutschland', 'https://www.youtube.com/user/skodade'),
('WAYOFLIFE SUZUKI', 'https://www.youtube.com/user/GlobalSuzukiChannel'),
('Toyota Deutschland', 'https://www.youtube.com/user/toyota'),
('Official Triumph Motorcycles', 'https://www.youtube.com/user/OfficialTriumph'),
('Volkswagen', 'https://www.youtube.com/user/myvolkswagen'),
('Yamaha Motor Europe', 'https://www.youtube.com/user/YamahaMotorEurope'),
('AUTO BILD TV', 'https://www.youtube.com/user/Autobild'),
('autotouring-TV', 'https://www.youtube.com/user/autotouring'),
('ADAC e.V.', 'https://www.youtube.com/user/adac'),
('MOTORVISION BIKE', 'https://www.youtube.com/user/motorvisionbike'),
('www.MOTORRADonline.de', 'https://www.youtube.com/user/motorrad'),
('TOURENFAHRER', 'https://www.youtube.com/user/Tourenfahrer'),
('DEKRA Automobil GmbH', 'https://www.youtube.com/user/DEKRAAutomobil'),
('Motorvision', 'https://www.youtube.com/user/MOTORVISIONcom'),
('Auto Motor & Sport', 'https://www.youtube.com/user/automotorundsport'),
('1000PS Motorradvideos', 'https://www.youtube.com/user/1000ps'),
('Motorrad Online', 'https://www.youtube.com/user/motorrad'),
('DMAX MOTOR', 'https://www.youtube.com/user/DMAX'),
]
self.CarChannels.sort(key=lambda t : t[0].lower())
self.subCatCarChannels = []
for item in self.CarChannels:
self.subCatCarChannels.append(self.subCatUserChannel)
self.GamingChannels = [
('THCsGameChannel', 'https://www.youtube.com/user/THCsGameChannel'),
('Game Tube', 'https://www.youtube.com/user/GameTube'),
('Electronic Arts GmbH', 'https://www.youtube.com/user/ElectronicArtsDE'),
('Ubisoft', 'https://www.youtube.com/user/ubisoft'),
('PlayStation', 'https://www.youtube.com/user/PlayStation'),
('Game Star', 'https://www.youtube.com/user/GameStarDE'),
('Assassins Creed DE', 'https://www.youtube.com/user/AssassinsCreedDE'),
('XboxDE\'s channel', 'https://www.youtube.com/user/XboxDE'),
('Disney Deutschland', 'https://www.youtube.com/user/WaltDisneyStudiosDE'),
('GIGA', 'https://www.youtube.com/user/giga'),
('Gronkh', 'https://www.youtube.com/user/Gronkh'),
('Sarazar', 'https://www.youtube.com/user/SarazarLP'),
('RANDOM ENCOUNTER', 'https://www.youtube.com/user/thegeekmythology'),
('gameinside tv', 'https://www.youtube.com/user/gameinsideshow'),
('Comedy Gaming mit Pink Panter', 'https://www.youtube.com/user/WartimeDignity'),
('CommanderKrieger - Baff Disch', 'https://www.youtube.com/user/CommanderKrieger'),
('Danny Burnage - Darauf ein Snickers-Eis!', 'https://www.youtube.com/user/TheDannyBurnage'),
('m4xFPS - Keks mit ♥', 'https://www.youtube.com/user/m4xFPS'),
('Kanal von xTheSolution', 'https://www.youtube.com/user/xTheSolution'),
('TheDoctorKaboom', 'https://www.youtube.com/user/TheDoctorKaboom'),
]
self.GamingChannels.sort(key=lambda t : t[0].lower())
self.subCatGamingChannels = []
for item in self.GamingChannels:
self.subCatGamingChannels.append(self.subCatUserChannel)
self.MusicChannels = [
('Ultra Music', 'https://www.youtube.com/user/UltraRecords'),
('ArmadaMusic.TV', 'https://www.youtube.com/user/armadamusic'),
('YOU LOVE DANCE.TV', 'https://www.youtube.com/user/Planetpunkmusic'),
('Classical Music Only Channel', 'https://www.youtube.com/user/ClassicalMusicOnly'),
('Music Channel Romania', 'https://www.youtube.com/user/1musicchannel'),
('50 Cent Music', 'https://www.youtube.com/user/50CentMusic'),
('GMC Schlager', 'https://www.youtube.com/user/BlueSilverstar'),
('Classical Music Channel / Klassische', 'https://www.youtube.com/user/BPanther'),
('EMI Music Germany', 'https://www.youtube.com/user/EMIMusicGermany'),
('Sony Music Germany', 'https://www.youtube.com/user/SMECatalogGermany'),
('Kanal von MyWorldCharts', 'https://www.youtube.com/user/MyWorldCharts'),
('CaptainCharts', 'https://www.youtube.com/user/CaptainCharts'),
('PowerCharts', 'https://www.youtube.com/user/PowerCharts'),
('Kontor.TV', 'https://www.youtube.com/user/kontor'),
('Scooter Official', 'https://www.youtube.com/user/scooter'),
('ATZEN MUSIK TV', 'https://www.youtube.com/user/atzenmusiktv'),
('BigCityBeats', 'https://www.youtube.com/user/HammerDontHurtEm'),
('The Best Of', 'https://www.youtube.com/user/alltimebestofmusic'),
('Tomorrowland', 'https://www.youtube.com/user/TomorrowlandChannel'),
('DrDoubleT', 'https://www.youtube.com/user/DrDoubleT'),
('►Techno, HandsUp & Dance◄', 'https://www.youtube.com/user/DJFlyBeatMusic'),
('Zooland Records', 'https://www.youtube.com/user/zoolandMusicGmbH'),
('Bazooka Records', 'https://www.youtube.com/user/bazookalabel'),
('Crystal Lake Music', 'https://www.youtube.com/user/CrystaLakeTV'),
('SKRILLEX', 'https://www.youtube.com/user/TheOfficialSkrillex'),
('AggroTV', 'https://www.youtube.com/user/aggroTV'),
('Bands & ART-Ellie Goulding', 'https://www.youtube.com/user/EllieGouldingEmpire'),
('Bands & ART-Psyche', 'https://www.youtube.com/user/thandewye'),
('Bands & ART-Joint Venture', 'https://www.youtube.com/user/srudlak'),
('Bands & ART-Madonna', 'https://www.youtube.com/user/madonna'),
('BB Sound Production', 'https://www.youtube.com/user/b0ssy007'),
('Chill-out,Lounge,Jazz,Electronic,Psy,Piano,Trance', 'https://www.youtube.com/user/aliasmike2002'),
('Gothic1', 'https://www.youtube.com/user/AiratzuMusic'),
('Gothic2', 'https://www.youtube.com/user/INM0R4L'),
('Gothic-Industrial Mix', 'https://www.youtube.com/user/noetek'),
('Wave & Gothic', 'https://www.youtube.com/user/MrBelorix'),
('Indie', 'https://www.youtube.com/user/curie78'),
('Planetpunkmusic TV', 'https://www.youtube.com/user/Planetpunkmusic'),
('Selfmade Records', 'https://www.youtube.com/user/SelfmadeRecords'),
('UKF-DrumandBass', 'https://www.youtube.com/user/UKFDrumandBass'),
('UKF-Dubstep', 'https://www.youtube.com/user/UKFDubstep'),
('UKF-Music', 'https://www.youtube.com/user/UKFMusic'),
('UKF-Mixes', 'https://www.youtube.com/user/UKFMixes'),
('UKF-Live', 'https://www.youtube.com/user/UKFLive'),
('Smarty Music', 'https://www.youtube.com/user/smartymcfly'),
('MoMMusic Network', 'https://www.youtube.com/user/MrMoMMusic'),
('Schlager Affe', 'https://www.youtube.com/user/schlageraffe2011'),
('Elvis Presley', 'https://www.youtube.com/user/elvis'),
('Dj3P51LON', 'https://www.youtube.com/user/Dj3P51LON'),
('HeadhunterzMedia', 'https://www.youtube.com/user/HeadhunterzMedia'),
('GMC Volkstümlicher Schlager', 'https://www.youtube.com/user/gusbara'),
('GMC HQ Volkstümlicher Schlager', 'https://www.youtube.com/user/GMChq'),
]
self.MusicChannels.sort(key=lambda t : t[0].lower())
self.subCatMusicChannels = []
for item in self.MusicChannels:
self.subCatMusicChannels.append(self.subCatUserChannel)
self.SelectedChannels = [
('VEVO Music', 'https://www.youtube.com/user/VEVO'),
('KinoCheck', 'https://www.youtube.com/user/KinoCheck'),
('Rocket Beans TV', 'https://www.youtube.com/user/ROCKETBEANSTV'),
('Daheimkino', 'https://www.youtube.com/user/Daheimkino'),
('E2WORLD', 'https://www.youtube.com/channel/UC95hFgcA4hzKcOQHiEFX3UA'),
('The HDR Channel', 'https://www.youtube.com/channel/UCve7_yAZHFNipzeAGBI5t9g'),
('4K Relaxation Channel', 'https://www.youtube.com/channel/UCg72Hd6UZAgPBAUZplnmPMQ'),
]
self.SelectedChannels.sort(key=lambda t : t[0].lower())
self.subCatSelectedChannels = []
for item in self.SelectedChannels:
self.subCatSelectedChannels.append(self.subCatUserChannel)
try:
fname = mp_globals.pluginPath + "/userfiles/userchan.xml"
self.user_path = config.mediaportal.watchlistpath.value + "mp_userchan.xml"
from os.path import exists
if not exists(self.user_path):
shutil.copyfile(fname, self.user_path)
return
fp = open(self.user_path)
data = fp.read()
fp.close()
except IOError, e:
self.UserChannels = []
self.UserChannels.append((_('No channels found!'), ''))
else:
list = re.findall('<name>(.*?)</name>.*?<user>(.*?)</user>', data, re.S)
self.UserChannels = []
if list:
for (name, user) in list:
if user.strip().startswith('UC'):
self.UserChannels.append((name.strip(), 'https://www.youtube.com/channel/'+user.strip()))
elif user.strip().startswith('PL'):
self.UserChannels.append((name.strip(), 'gdata.youtube.com/feeds/api/users/'+user.strip()+'/uploads?'))
else:
self.UserChannels.append((name.strip(), 'https://www.youtube.com/user/'+user.strip()))
self.keyLocked = False
else:
self.UserChannels.append((_('No channels found!'), ''))
self.subCatUserChannels = []
for item in self.UserChannels:
if item[1].replace('gdata.youtube.com/feeds/api/users/', '').startswith('PL'):
self.subCatUserChannels.append(self.subCatUserChannelPlaylist)
elif item[1] != "":
self.subCatUserChannels.append(self.subCatUserChannel)
else:
self.subCatUserChannels.append(None)
MenuHelper.__init__(self, session, 2, None, "", "", self._defaultlistcenter, 'MP_YouTube')
self["yt_actions"] = ActionMap(["MP_Actions"], {
"yellow": self.keyYellow,
"blue": self.login
}, -1)
self['title'] = Label("YouTube")
self['ContentTitle'] = Label(_("VIDEOSEARCH"))
self['Query'] = Label(_("Search request"))
self['query'] = Label()
self['Time'] = Label(_("Sort by"))
self['time'] = Label()
self['Metalang'] = Label(_("Language"))
self['metalang'] = Label()
self['Regionid'] = Label(_("Search region"))
self['regionid'] = Label()
self['Author'] = Label(_("User name"))
self['author'] = Label()
self['Keywords'] = Label(_("Event type"))
self['keywords'] = Label()
self['Parameter'] = Label(_("Parameter"))
self['3D'] = Label(_("3D Search"))
self['3d'] = Label()
self['Duration'] = Label(_("Runtime"))
self['duration'] = Label()
self['Reserve1'] = Label(_("Video definition"))
self['reserve1'] = Label()
self['Reserve2'] = Label(_("Video type"))
self['reserve2'] = Label()
self['coverArt'] = Pixmap()
self['F3'] = Label(_("Edit Parameter"))
self['F4'] = Label(_("Request YT-Token"))
self.onLayoutFinish.append(self.initSubCat)
self.mh_On_setGenreStrTitle.append((self.keyYellow, [0]))
self.onClose.append(self.saveIdx)
self.channelId = None
def initSubCat(self):
CoverHelper(self['coverArt']).getCover(default_cover)
hl = param_hl[config.mediaportal.yt_param_meta_idx.value]
rc = self.param_regionid[config.mediaportal.yt_param_regionid_idx.value][1].split('=')[-1]
if not rc:
rc = 'US'
url = 'https://www.googleapis.com/youtube/v3/videoCategories?part=snippet%s®ionCode=%s&key=%s' % (hl, rc, APIKEYV3)
twAgentGetPage(url, agent=agent, headers=std_headers).addCallback(self.parseCats)
def parseCats(self, data):
data = json.loads(data)
for item in data.get('items', {}):
if item['snippet']['assignable']:
self.subCat.append((str(item['snippet']['title'].encode('utf-8')), '&videoCategoryId=%s' % str(item['id'])))
self.subCat_L2.append(None)
self.subCat.sort(key=lambda t : t[0].lower())
self.subCat.insert(0, ((_('No Category'), '')))
self.subCat_L2.insert(0, (None))
self.mh_genreMenu = [
self.mainGenres,
[
self.subCat,
None,
None,
#None,
None,
self.subCatYourChannel,
None,
self.UserChannels,
None,
self.YTChannels,
self.SelectedChannels,
self.MusicChannels,
self.GamingChannels,
self.CarChannels,
self.HoerspielChannels,
],
[
self.subCat_L2,
None,
None,
#None,
None,
[None, None, None, None],
None,
self.subCatUserChannels,
None,
[self.subCatUserChannelPopularWorldwide, self.subCatUserChannelPopular, self.subCatUserChannelPopular, self.subCatUserChannel, self.subCatUserChannel, self.subCatUserChannel, self.subCatUserChannel, self.subCatUserChannel, self.subCatUserChannel, self.subCatUserChannelPopular, self.subCatUserChannelPopular, self.subCatUserChannelPopular, self.subCatUserChannelPopular, self.subCatUserChannelPopular],
self.subCatSelectedChannels,
self.subCatMusicChannels,
self.subCatGamingChannels,
self.subCatCarChannels,
self.subCatHoerspielChannels,
]
]
self.mh_loadMenu()
def paraQuery(self):
self.session.openWithCallback(self.cb_paraQuery, VirtualKeyBoardExt, title = (_("Enter search criteria")), text = self.param_qr, is_dialog=True, auto_text_init=True, suggest_func=self.getSuggestions)
def cb_paraQuery(self, callback = None, entry = None):
if callback != None:
self.param_qr = callback.strip()
self.showParams()
def paraAuthor(self):
self.session.openWithCallback(self.cb_paraAuthor, VirtualKeyBoardExt, title = (_("Author")), text = self.param_author, is_dialog=True)
def cb_paraAuthor(self, callback = None, entry = None):
if callback != None:
self.param_author = callback.strip()
self.channelId = None
self.showParams()
def showParams(self):
try:
self['query'].setText(self.param_qr)
self['time'].setText(self.param_time[config.mediaportal.yt_param_time_idx.value][0])
self['reserve1'].setText(self.param_video_definition[config.mediaportal.yt_param_video_definition_idx.value][0])
self['reserve2'].setText(self.param_video_type[config.mediaportal.yt_param_video_type_idx.value][0])
self['metalang'].setText(self.param_metalang[config.mediaportal.yt_param_meta_idx.value][0])
self['regionid'].setText(self.param_regionid[config.mediaportal.yt_param_regionid_idx.value][0])
self['3d'].setText(self.param_3d[config.mediaportal.yt_param_3d_idx.value][0])
self['duration'].setText(self.param_duration[config.mediaportal.yt_param_duration_idx.value][0])
self['author'].setText(self.param_author)
self['keywords'].setText(self.param_event_types[config.mediaportal.yt_param_event_types_idx.value][0])
except:
pass
self.paramShowHide()
def paramShowHide(self):
if self.old_mainidx == self.mh_menuIdx[0]:
return
else:
self.old_mainidx = self.mh_menuIdx[0]
showCtr = 0
self['Parameter'].hide()
if self.mh_menuIdx[0] in self.paramList[0][2]:
self['query'].show()
self['Query'].show()
self['Parameter'].show()
showCtr = 1
else:
self['query'].hide()
self['Query'].hide()
if self.mh_menuIdx[0] in self.paramList[1][2]:
self['keywords'].show()
self['Keywords'].show()
showCtr = 1
else:
self['keywords'].hide()
self['Keywords'].hide()
if self.mh_menuIdx[0] in self.paramList[2][2]:
self['time'].show()
self['Time'].show()
showCtr = 1
else:
self['time'].hide()
self['Time'].hide()
if self.mh_menuIdx[0] in self.paramList[3][2]:
self['metalang'].show()
self['Metalang'].show()
self['Parameter'].show()
showCtr = 1
else:
self['metalang'].hide()
self['Metalang'].hide()
if self.mh_menuIdx[0] in self.paramList[4][2]:
self['regionid'].show()
self['Regionid'].show()
showCtr = 1
else:
self['regionid'].hide()
self['Regionid'].hide()
if self.mh_menuIdx[0] in self.paramList[5][2]:
self['author'].show()
self['Author'].show()
showCtr = 1
else:
self['author'].hide()
self['Author'].hide()
if self.mh_menuIdx[0] in self.paramList[6][2]:
self['3d'].show()
self['3D'].show()
showCtr = 1
else:
self['3d'].hide()
self['3D'].hide()
if self.mh_menuIdx[0] in self.paramList[7][2]:
self['duration'].show()
self['Duration'].show()
showCtr = 1
else:
self['duration'].hide()
self['Duration'].hide()
if self.mh_menuIdx[0] in self.paramList[8][2]:
self['reserve1'].show()
self['Reserve1'].show()
showCtr = 1
else:
self['reserve1'].hide()
self['Reserve1'].hide()
if self.mh_menuIdx[0] in self.paramList[9][2]:
self['reserve2'].show()
self['Reserve2'].show()
showCtr = 1
else:
self['reserve2'].hide()
self['Reserve2'].hide()
if showCtr:
self['F3'].show()
else:
self['F3'].hide()
def mh_loadMenu(self):
self.showParams()
self.mh_setMenu(0, True)
self.mh_keyLocked = False
def keyYellow(self, edit=1):
c = len(self.paramList)
list = []
if config.mediaportal.yt_paramListIdx.value not in range(0, c):
config.mediaportal.yt_paramListIdx.value = 0
old_idx = config.mediaportal.yt_paramListIdx.value
for i in range(c):
if self.mh_menuIdx[0] in self.paramList[i][2]:
list.append((self.paramList[i][0], i))
if list and edit:
self.session.openWithCallback(self.cb_handlekeyYellow, ChoiceBoxExt, title=_("Edit Parameter"), list = list, selection=old_idx)
else:
self.showParams()
def cb_handlekeyYellow(self, answer):
pidx = answer and answer[1]
if pidx != None:
config.mediaportal.yt_paramListIdx.value = pidx
if type(self.paramList[pidx][1][0]) == list:
self.changeListParam(self.paramList[pidx][0], *self.paramList[pidx][1])
else:
self.paramList[pidx][1][0]()
self.showParams()
def changeListParam(self, nm, l, idx):
if idx.value not in range(0, len(l)):
idx.value = 0
list = []
for i in range(len(l)):
list.append((l[i][0], (i, idx)))
if list:
self.session.openWithCallback(self.cb_handleListParam, ChoiceBoxExt, title=_("Edit Parameter") + " '%s'" % nm, list = list, selection=idx.value)
def cb_handleListParam(self, answer):
p = answer and answer[1]
if p != None:
p[1].value = p[0]
self.showParams()
def getUserChannelId(self, usernm, callback):
url = 'https://www.googleapis.com/youtube/v3/channels?part=id&forUsername=%s&key=%s' % (usernm, APIKEYV3)
twAgentGetPage(url, agent=agent, headers=std_headers).addCallback(self.parseChannelId).addCallback(lambda x: callback()).addErrback(self.parseChannelId, True)
def parseChannelId(self, data, err=False):
try:
data = json.loads(data)
self.channelId = str(data['items'][0]['id'])
except:
printl('No CID found.',self,'E')
self.channelId = 'none'
def openListScreen(self):
tm = self.param_time[config.mediaportal.yt_param_time_idx.value][1]
lr = self.param_metalang[config.mediaportal.yt_param_meta_idx.value][1]
regionid = self.param_regionid[config.mediaportal.yt_param_regionid_idx.value][1]
_3d = self.param_3d[config.mediaportal.yt_param_3d_idx.value][1]
dura = self.param_duration[config.mediaportal.yt_param_duration_idx.value][1]
vid_def = self.param_video_definition[config.mediaportal.yt_param_video_definition_idx.value][1]
event_type = self.param_event_types[config.mediaportal.yt_param_event_types_idx.value][1]
genreurl = self.mh_genreUrl[0] + self.mh_genreUrl[1]
if 'googleapis.com' in genreurl:
if '/guideCategories' in genreurl or '/playlists' in genreurl:
lr = param_hl[config.mediaportal.yt_param_meta_idx.value]
if not '%ACCESSTOKEN%' in genreurl:
if self.param_author:
if not self.channelId:
return self.getUserChannelId(self.param_author, self.openListScreen)
else:
channel_id = '&channelId=%s' % self.channelId
else: channel_id = ''
genreurl = genreurl.replace('%QR%', urllib.quote_plus(self.param_qr))
genreurl += regionid + lr + tm + channel_id + self.param_safesearch[0]
if 'type=video' in genreurl:
vid_type = self.param_video_type[config.mediaportal.yt_param_video_type_idx.value][1]
genreurl += _3d + dura + vid_def + event_type + vid_type
elif _('Favorites') in self.mh_genreTitle:
genreurl = ''
else:
genreurl = self.mh_genreUrl[0] + self.mh_genreUrl[1] + self.mh_genreUrl[2]
if self.mh_genreTitle != (400 * "—"):
self.session.open(YT_ListScreen, genreurl, self.mh_genreTitle)
def mh_callGenreListScreen(self):
global picker_lang
picker_lang = ''
if _('My channel') in self.mh_genreTitle:
if not config.mediaportal.yt_refresh_token.value:
self.session.open(MessageBoxExt, _("You need to request a token to allow access to your YouTube account."), MessageBoxExt.TYPE_INFO)
return
self.openListScreen()
def login(self):
if not config.mediaportal.yt_refresh_token.value:
yt_oauth2.requestDevCode(self.session)
else:
self.session.openWithCallback(self.cb_login, MessageBoxExt, _("Did you revoke the access?"), type=MessageBoxExt.TYPE_YESNO, default=False)
def cb_login(self, answer):
if answer is True:
yt_oauth2.requestDevCode(self.session)
def saveIdx(self):
config.mediaportal.yt_param_meta_idx.save()
yt_oauth2._tokenExpired()
def getSuggestions(self, text, max_res):
hl = param_ajax_hl[config.mediaportal.yt_param_meta_idx.value]
gl = param_ajax_gl[config.mediaportal.yt_param_regionid_idx.value]
url = "https://clients1.google.com/complete/search?client=youtube&hl=%s&gl=%s&ds=yt&q=%s" % (hl, gl, urllib.quote_plus(text))
d = twAgentGetPage(url, agent=agent, headers=std_headers, timeout=5)
d.addCallback(self.gotSuggestions, max_res)
d.addErrback(self.gotSuggestions, max_res, True)
return d
def gotSuggestions(self, suggestions, max_res, err=False):
list = []
if not err and suggestions:
i=suggestions.find(',[["')
if i > 0:
for m in re.finditer('"(.+?)",0', suggestions[i:]):
list.append(decodeHtml(m.group(1)))
max_res -= 1
if not max_res: break
elif err:
printl(str(suggestions),self,'E')
return list
class YT_ListScreen(MPScreen, ThumbsHelper):
param_regionid = (
('&gl=US'),
('&gl=GB'),
('&gl=DE'),
('&gl=FR'),
('&gl=IT')
)
def __init__(self, session, stvLink, stvGenre, title="YouTube"):
self.stvLink = stvLink
self.genreName = stvGenre
self.headers = std_headers
MPScreen.__init__(self, session, skin='MP_PluginDescr')
ThumbsHelper.__init__(self)
self.favoGenre = self.genreName.startswith(_('Favorites'))
self.apiUrl = 'gdata.youtube.com' in self.stvLink
self.apiUrlv3 = 'googleapis.com' in self.stvLink
self.ajaxUrl = '/c4_browse_ajax' in self.stvLink
self.c4_browse_ajax = ''
self.url_c4_browse_ajax_list = ['']
self["actions"] = ActionMap(["MP_Actions2", "MP_Actions"], {
"ok" : self.keyOK,
"red" : self.keyRed,
"cancel" : self.keyCancel,
"5" : self.keyShowThumb,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft,
"upUp" : self.key_repeatedUp,
"rightUp" : self.key_repeatedUp,
"leftUp" : self.key_repeatedUp,
"downUp" : self.key_repeatedUp,
"upRepeated" : self.keyUpRepeated,
"downRepeated" : self.keyDownRepeated,
"rightRepeated" : self.keyRightRepeated,
"leftRepeated" : self.keyLeftRepeated,
"nextBouquet" : self.keyPageUpFast,
"prevBouquet" : self.keyPageDownFast,
"green" : self.keyGreen,
"0" : self.closeAll,
"1" : self.key_1,
"3" : self.key_3,
"4" : self.key_4,
"6" : self.key_6,
"7" : self.key_7,
"9" : self.key_9
}, -1)
self['title'] = Label(title)
self['ContentTitle'] = Label(self.genreName)
if not self.favoGenre:
self['F2'] = Label(_("Favorite"))
else:
self['F2'] = Label(_("Delete"))
if ('order=' in self.stvLink) and ('type=video' in self.stvLink) or (self.apiUrl and '/uploads' in self.stvLink):
self['F1'] = Label(_("Sort by"))
self.key_sort = True
else:
self['F1'] = Label(_("Exit"))
self.key_sort = False
self['Page'] = Label(_("Page:"))
self['coverArt'].hide()
self.coverHelper = CoverHelper(self['coverArt'])
self.propertyImageUrl = None
self.keyLocked = True
self.baseUrl = "https://www.youtube.com"
self.lastUrl = None
self.setVideoPrio()
self.favo_path = config.mediaportal.watchlistpath.value + "mp_yt_favorites.xml"
self.keckse = CookieJar()
self.filmliste = []
self.start_idx = 1
self.max_res = int(config.mediaportal.youtube_max_items_pp.value)
self.max_pages = 1000 / self.max_res
self.total_res = 0
self.pages = 0
self.page = 0
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.load_more_href = None
self.onClose.append(self.youtubeExit)
self.modeShowThumb = 1
self.playAll = True
self.showCover = False
self.lastCover = ""
self.actType = None
if not self.apiUrl:
self.onLayoutFinish.append(self.loadPageData)
else:
self.onLayoutFinish.append(self.checkAPICallv2)
def checkAPICallv2(self):
m = re.search('/api/users/(.*?)/uploads\?', self.stvLink, re.S)
if m:
if m.group(1).startswith('PL'):
self.stvLink = "https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&order=date&playlistId=%s&key=%s" % (m.group(1), APIKEYV3)
self.apiUrl = False
self.apiUrlv3 = True
elif not m.group(1).startswith('UC'):
url = 'https://www.googleapis.com/youtube/v3/channels?part=contentDetails&forUsername=%s&key=%s' % (m.group(1), APIKEYV3)
return twAgentGetPage(url, agent=agent, headers=self.headers).addCallback(self.parsePlaylistId).addErrback(self.dataError)
else:
self.apiUrl = False
self.apiUrlv3 = True
self.stvLink = 'https://www.googleapis.com/youtube/v3/search?part=snippet&order=date&channelId=%s&key=%s' % (m.group(1), APIKEYV3)
reactor.callLater(0, self.loadPageData)
def parsePlaylistId(self, data):
data = json.loads(data)
try:
plid = data['items'][0]['contentDetails']['relatedPlaylists']['uploads']
except:
printl('No PLID found.',self,'E')
else:
self.stvLink = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&order=date&playlistId=%s&key=%s' % (str(plid), APIKEYV3)
self.apiUrl = False
self.apiUrlv3 = True
reactor.callLater(0, self.loadPageData)
def loadPageData(self):
if _('No channels found!') in self.genreName:
self.close()
return
self.keyLocked = True
self.ml.setList(map(self.YT_ListEntry, [(_('Please wait...'),'','','','','','')]))
hl = param_ajax_hl[config.mediaportal.yt_param_meta_idx.value]
if hl != picker_lang:
self.setLang("https://www.youtube.com", hl)
return
if self.favoGenre:
self.getFavos()
else:
url = self.stvLink
if self.apiUrlv3:
url = url.replace('%KEY%', APIKEYV3)
url += "&maxResults=%d" % (self.max_res,)
if self.c4_browse_ajax:
url += '&pageToken=' + self.c4_browse_ajax
elif self.ajaxUrl:
if not 'paging=' in url:
url += '&paging=%d' % max(1, self.page)
url = '%s%s' % (self.baseUrl, url)
elif self.c4_browse_ajax:
url = '%s%s' % (self.baseUrl, self.c4_browse_ajax)
else:
if url[-1] == '?' or url[-1] == '&':
url = '%sflow=list' % url
elif url[-1] != '?' or url[-1] != '&':
url = '%s&flow=list' % url
if not '&gl=' in url:
url += self.param_regionid[config.mediaportal.yt_param_regionid_idx.value]
self.lastUrl = url
if self.apiUrlv3 and '%ACT-' in url:
self.actType = re.search('(%ACT-.*?%)', url).group(1)
url = url.replace(self.actType, '', 1)
self.actType = unicode(re.search('%ACT-(.*?)%', self.actType).group(1))
if '%ACCESSTOKEN%' in url:
token = yt_oauth2.getAccessToken()
if not token:
yt_oauth2.refreshToken(self.session).addCallback(self.getData, url).addErrback(self.dataError)
else:
self.getData(token, url)
else:
self.getData(None, url)
def setLang(self, url, hl):
picker_url = "https://www.youtube.com/picker_ajax?action_language=1&base_url=" + urllib.quote(url)
twAgentGetPage(picker_url, cookieJar=self.keckse, agent=agent, headers=self.headers).addCallback(self.gotPickerData, hl).addErrback(self.dataError)
def gotPickerData(self, data, hl):
global picker_lang
try:
data = json.loads(data)["html"].encode('utf-8')
m = re.search('<form(.*?)</form>', data, re.S)
action_url = self.baseUrl + re.search('action="(.*?)"', m.group(1)).group(1).replace('&', '&')
base_url = re.search('<input.*?name="base_url" value="(.*?)"', m.group(1)).group(1).replace('&', '&')
session_token = re.search('<input.*?name="session_token" value="(.*?)"', m.group(1)).group(1)
except:
print 'html:',data
else:
picker_lang = hl
postdata = urllib.urlencode({
'base_url': base_url,
'session_token': session_token,
'hl': hl})
headers = self.headers.copy()
headers['Content-Type'] = 'application/x-www-form-urlencoded'
twAgentGetPage(action_url, method='POST', cookieJar=self.keckse, agent=agent, headers=headers, postdata=postdata).addCallback(lambda _: self.loadPageData()).addErrback(self.pickerError)
def pickerError(self, err):
printl('pickerError:%s' % err,self,'E')
def getData(self, token, url):
if token:
url = url.replace('%ACCESSTOKEN%', token, 1)
if '%playlistId=' in url:
return self.getRelatedUserPL(url, token)
twAgentGetPage(url, cookieJar=self.keckse, agent=agent, headers=self.headers).addCallback(self.genreData).addErrback(self.dataError)
def getRelatedUserPL(self, url, token):
pl = re.search('%playlistId=(.*?)%', url).group(1)
yt_url = re.sub('%playlistId=.*?%', '', url, 1)
twAgentGetPage(yt_url, cookieJar=self.keckse, agent=agent, headers=self.headers).addCallback(self.parseRelatedPL, token, pl).addErrback(self.dataError)
def parseRelatedPL(self, data, token, pl):
try:
data = json.loads(data)
except:
pass
else:
for item in data.get('items', {}):
playlist = item['contentDetails']['relatedPlaylists']
if pl in playlist:
yt_url = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&playlistId=%s&access_token=%s&order=date' % (str(playlist[pl]), token)
return twAgentGetPage(yt_url, cookieJar=self.keckse, agent=agent, headers=self.headers).addCallback(self.genreData).addErrback(self.dataError)
reactor.callLater(0, genreData, '')
def parsePagingUrl(self, data):
regex = re.compile('data-uix-load-more-href="(.*?)"')
m = regex.search(data)
if m:
if not self.page:
self.page = 1
self.c4_browse_ajax = m.group(1).replace('&', '&')
else:
if not 'load-more-text' in data:
self.c4_browse_ajax = ''
self.pages = self.page
def parsePagingUrlv3(self, jdata):
if not self.page:
self.page = 1
self.c4_browse_ajax = str(jdata.get('nextPageToken', ''))
def genreData(self, data):
if self.apiUrlv3:
data = json.loads(data)
self.parsePagingUrlv3(data)
elif not self.apiUrl:
try:
if "load_more_widget_html" in data:
data = json.loads(data)
self.parsePagingUrl(data["load_more_widget_html"].replace("\\n","").replace("\\","").encode('utf-8'))
data = data["content_html"].replace("\\n","").replace("\\","").encode('utf-8')
else:
data = json.loads(data)["content_html"].replace("\\n","").replace("\\","").encode('utf-8')
self.parsePagingUrl(data)
except:
self.parsePagingUrl(data)
elif not self.pages:
m = re.search('totalResults>(.*?)</', data)
if m:
a = int(m.group(1))
self.pages = a // self.max_res
if a % self.max_res:
self.pages += 1
if self.pages > self.max_pages:
self.pages = self.max_pages
self.page = 1
self.filmliste = []
if self.apiUrlv3:
def getThumbnail(thumbnails):
if 'standard' in thumbnails:
return str(thumbnails['standard']['url'])
elif 'high' in thumbnails:
return str(thumbnails['high']['url'])
elif 'medium' in thumbnails:
return str(thumbnails['medium']['url'])
else:
return str(thumbnails['default']['url'])
listType = re.search('ItemList|subscriptionList|activityList|playlistList|CategoryList|channelList', data.get('kind', '')) != None
for item in data.get('items', []):
if not listType:
kind = item['id'].get('kind')
else:
kind = item.get('kind')
if kind != None:
if item.has_key('snippet'):
localized = item['snippet'].has_key('localized')
if not localized:
title = str(item['snippet'].get('title', ''))
desc = str(item['snippet'].get('description', ''))
else:
loca = item['snippet']['localized']
title = str(loca.get('title', ''))
desc = str(loca.get('description', ''))
if kind.endswith('#video'):
try:
url = str(item['id']['videoId'])
img = getThumbnail(item['snippet']['thumbnails'])
except:
pass
else:
self.filmliste.append(('', title, url, img, desc, '', ''))
elif kind.endswith('#playlistItem'):
try:
url = str(item['snippet']['resourceId']['videoId'])
img = getThumbnail(item['snippet']['thumbnails'])
except:
pass
else:
self.filmliste.append(('', title, url, img, desc, '', ''))
elif kind.endswith('channel'):
if listType:
id = str(item['id'])
url = '/channel/%s/featured' % id
img = getThumbnail(item['snippet']['thumbnails'])
self.filmliste.append(('', title, url, img, desc, '', ''))
else:
url = str(item['id']['channelId'])
img = getThumbnail(item['snippet']['thumbnails'])
self.filmliste.append(('', title, url, img, desc, 'CV3', ''))
elif kind.endswith('#playlist'):
if not listType:
url = str(item['id']['playlistId'])
else:
url = str(item['id'])
img = getThumbnail(item['snippet']['thumbnails'])
self.filmliste.append(('', title, url, img, desc, 'PV3', ''))
elif kind.endswith('#subscription'):
url = str(item['snippet']['resourceId']['channelId'])
img = getThumbnail(item['snippet']['thumbnails'])
self.filmliste.append(('', title, url, img, desc, 'CV3', ''))
elif kind.endswith('#guideCategory'):
url = str(item['id'])
img = ''
self.filmliste.append(('', title, url, img, desc, 'GV3', ''))
elif kind.endswith('#activity'):
desc = str(item['snippet'].get('description', ''))
if item['snippet'].get('type') == self.actType:
try:
if self.actType == u'upload':
url = str(item['contentDetails'][self.actType]['videoId'])
else:
url = str(item['contentDetails'][self.actType]['resourceId']['videoId'])
img = getThumbnail(item['snippet']['thumbnails'])
except:
pass
else:
self.filmliste.append(('', title, url, img, desc, '', ''))
elif 'contentDetails' in item:
details = item['contentDetails']
if kind.endswith('#channel'):
if 'relatedPlaylists' in details:
for k, v in details['relatedPlaylists'].iteritems:
url = str(v)
img = ''
desc = ''
self.filmliste.append(('', str(k).title(), url, img, desc, 'PV3', ''))
else:
data = data.replace('\n', '')
entrys = None
list_item_cont = branded_item = shelf_item = yt_pl_thumb = list_item = pl_video_yt_uix_tile = yt_lockup_video = False
if self.genreName.endswith("Channels") and "branded-page-related-channels-item" in data:
branded_item = True
entrys = data.split("branded-page-related-channels-item")
elif "channels-browse-content-list-item" in data:
list_item = True
entrys = data.split("channels-browse-content-list-item")
elif "browse-list-item-container" in data:
list_item_cont = True
entrys = data.split("browse-list-item-container")
elif re.search('[" ]+shelf-item[" ]+', data):
shelf_item = True
entrys = data.split("shelf-item ")
elif "yt-pl-thumb " in data:
yt_pl_thumb = True
entrys = data.split("yt-pl-thumb ")
elif "pl-video yt-uix-tile " in data:
pl_video_yt_uix_tile = True
entrys = data.split("pl-video yt-uix-tile ")
elif "yt-lockup-video " in data:
yt_lockup_video = True
entrys = data.split("yt-lockup-video ")
if entrys and not self.propertyImageUrl:
m = re.search('"appbar-nav-avatar" src="(.*?)"', entrys[0])
property_img = m and m.group(1)
if property_img:
if property_img.startswith('//'):
property_img = 'http:' + property_img
self.propertyImageUrl = property_img
if list_item_cont or branded_item or shelf_item or list_item or yt_pl_thumb or pl_video_yt_uix_tile or yt_lockup_video:
for entry in entrys[1:]:
if 'data-item-type="V"' in entry:
vidcnt = '[Paid Content] '
elif 'data-title="[Private' in entry:
vidcnt = '[private Video] '
else:
vidcnt = ''
gid = 'S'
m = re.search('href="(.*?)" class=', entry)
vid = m and m.group(1).replace('&','&')
if not vid:
continue
if branded_item and not '/SB' in vid:
continue
img = title = ''
if '<span class="" ' in entry:
m = re.search('<span class="" .*?>(.*?)</span>', entry)
if m:
title += decodeHtml(m.group(1))
elif 'dir="ltr" title="' in entry:
m = re.search('dir="ltr" title="(.+?)"', entry, re.DOTALL)
if m:
title += decodeHtml(m.group(1).strip())
m = re.search('data-thumb="(.*?)"', entry)
img = m and m.group(1)
else:
m = re.search('dir="ltr".*?">(.+?)</a>', entry, re.DOTALL)
if m:
title += decodeHtml(m.group(1).strip())
m = re.search('data-thumb="(.*?)"', entry)
img = m and m.group(1)
if not img:
img = self.propertyImageUrl
if img and img.startswith('//'):
img = 'http:' + img
img = img.replace('&','&')
desc = ''
if not vidcnt and 'list=' in vid and not '/videos?' in self.stvLink:
m = re.search('formatted-video-count-label">\s+<b>(.*?)</b>', entry)
if m:
vidcnt = '[%s Videos] ' % m.group(1)
elif vid.startswith('/watch?'):
if not vidcnt:
vid = re.search('v=(.+)', vid).group(1)
gid = ''
m = re.search('video-time">(.+?)<', entry)
if m:
dura = m.group(1)
if len(dura)==4:
vtim = '0:0%s' % dura
elif len(dura)==5:
vtim = '0:%s' % dura
else:
vtim = dura
vidcnt = '[%s] ' % vtim
m = re.search('data-name=.*?>(.*?)</.*?<li>(.*?)</li>\s+</ul>', entry)
if m:
desc += 'von ' + decodeHtml(m.group(1)) + ' · ' + m.group(2).replace('</li>', ' ').replace('<li>', '· ') + '\n'
m = re.search('dir="ltr">(.+?)</div>', entry)
if (shelf_item or list_item_cont) and not desc and not m:
m = re.search('shelf-description.*?">(.+?)</div>', entry)
if m:
desc += decodeHtml(m.group(1).strip())
splits = desc.split('<br />')
desc = ''
for split in splits:
if not '<a href="' in split:
desc += split + '\n'
if list_item and not vidcnt:
m = re.search('yt-lockup-meta-info"><li>(.*?)</ul>', entry)
if m:
vidcnt = re.sub('<.*?>', '', m.group(1))
vidcnt = '[%s] ' % vidcnt
self.filmliste.append((vidcnt, str(title), vid, img, desc, gid, ''))
reactor.callLater(0, self.checkListe)
def checkListe(self):
if len(self.filmliste) == 0:
self.filmliste.append(('',_('No contents / results found!'),'','','','',''))
self.keyLocked = True
if self.page <= 1:
self.page = 0
self.pages = self.page
self.c4_browse_ajax = ''
else:
if not self.page:
self.page = self.pages = 1
menu_len = len(self.filmliste)
self.keyLocked = False
self.ml.setList(map(self.YT_ListEntry, self.filmliste))
self.th_ThumbsQuery(self.filmliste, 1, 2, 3, None, None, self.page, self.pages, mode=self.modeShowThumb)
self.showInfos()
def dataError(self, error):
self.ml.setList(map(self.YT_ListEntry, [('',_('No contents / results found!'),'','','','','')]))
self['handlung'].setText("")
def showInfos(self):
if (self.c4_browse_ajax and not self.pages) and self.page:
self['page'].setText("%d" % self.page)
else:
self['page'].setText("%d / %d" % (self.page,max(self.page, self.pages)))
stvTitle = self['liste'].getCurrent()[0][1]
stvImage = self['liste'].getCurrent()[0][3]
desc = self['liste'].getCurrent()[0][4]
self['name'].setText(stvTitle)
self['handlung'].setText(desc)
if self.lastCover != stvImage:
self.lastCover = stvImage
self.coverHelper.getCover(stvImage)
def youtubeErr(self, error):
self['handlung'].setText(_("Unfortunately, this video can not be played!\n")+str(error))
def setVideoPrio(self):
self.videoPrio = int(config.mediaportal.youtubeprio.value)
def delFavo(self):
i = self['liste'].getSelectedIndex()
c = j = 0
l = len(self.filmliste)
try:
f1 = open(self.favo_path, 'w')
while j < l:
if j != i:
c += 1
dura = self.filmliste[j][0]
dhTitle = self.filmliste[j][1]
dhVideoId = self.filmliste[j][2]
dhImg = self.filmliste[j][3]
desc = urllib.quote(self.filmliste[j][4])
gid = self.filmliste[j][5]
wdat = '<i>%d</i><n>%s</n><v>%s</v><im>%s</im><d>%s</d><g>%s</g><desc>%s</desc>\n' % (c, dhTitle, dhVideoId, dhImg, dura, gid, desc)
f1.write(wdat)
j += 1
f1.close()
self.getFavos()
except IOError, e:
print "Fehler:\n",e
print "eCode: ",e
self['handlung'].setText(_("Error!\n")+str(e))
f1.close()
def addFavo(self):
dhTitle = self['liste'].getCurrent()[0][1]
dura = self['liste'].getCurrent()[0][0]
dhImg = self['liste'].getCurrent()[0][3]
gid = self['liste'].getCurrent()[0][5]
desc = urllib.quote(self['liste'].getCurrent()[0][4])
dhVideoId = self['liste'].getCurrent()[0][2]
if not self.favoGenre and gid in ('S','P','C'):
dura = ''
dhTitle = self.genreName + ':' + dhTitle
try:
if not fileExists(self.favo_path):
f1 = open(self.favo_path, 'w')
f_new = True
else:
f_new = False
f1 = open(self.favo_path, 'a+')
max_i = 0
if not f_new:
data = f1.read()
for m in re.finditer('<i>(\d*?)</i>.*?<v>(.*?)</v>', data):
v_found = False
i, v = m.groups()
ix = int(i)
if ix > max_i:
max_i = ix
if v == dhVideoId:
v_found = True
if v_found:
f1.close()
self.session.open(MessageBoxExt, _("Favorite already exists"), MessageBoxExt.TYPE_INFO, timeout=5)
return
wdat = '<i>%d</i><n>%s</n><v>%s</v><im>%s</im><d>%s</d><g>%s</g><desc>%s</desc>\n' % (max_i + 1, dhTitle, dhVideoId, dhImg, dura, gid, desc)
f1.write(wdat)
f1.close()
self.session.open(MessageBoxExt, _("Favorite added"), MessageBoxExt.TYPE_INFO, timeout=5)
except IOError, e:
print "Fehler:\n",e
print "eCode: ",e
self['handlung'].setText(_("Error!\n")+str(e))
f1.close()
def getFavos(self):
self.filmliste = []
try:
if not fileExists(self.favo_path):
f_new = True
else:
f_new = False
f1 = open(self.favo_path, 'r')
if not f_new:
data = f1.read()
f1.close()
for m in re.finditer('<n>(.*?)</n><v>(.*?)</v><im>(.*?)</im><d>(.*?)</d><g>(.*?)</g><desc>(.*?)</desc>', data):
n, v, img, dura, gid, desc = m.groups()
if dura and not dura.startswith('['):
dura = '[%s] ' % dura.rstrip()
self.filmliste.append((dura, n, v, img, urllib.unquote(desc), gid, ''))
if len(self.filmliste) == 0:
self.pages = self.page = 0
self.filmliste.append((_('No videos found!'),'','','','','',''))
self.keyLocked = True
if not f_new and len(data) > 0:
os.remove(self.favo_path)
else:
self.pages = self.page = 1
self.keyLocked = False
self.ml.setList(map(self.YT_ListEntry, self.filmliste))
self.showInfos()
except IOError, e:
print "Fehler:\n",e
print "eCode: ",e
self['handlung'].setText(_("Error!\n")+str(e))
f1.close()
def changeSort(self):
list = (
(_("Date"), ("order=date", 0)),
(_("Rating"), ("order=rating", 1)),
(_("Relevance"), ("order=relevance", 2)),
(_("Title"), ("order=title", 3)),
(_("Video count"), ("order=videoCount", 4)),
(_("View count"), ("order=viewCount", 5))
)
self.session.openWithCallback(self.cb_handleSortParam, ChoiceBoxExt, title=_("Sort by"), list = list, selection=config.mediaportal.yt_param_time_idx.value)
def cb_handleSortParam(self, answer):
p = answer and answer[1]
if p != None:
config.mediaportal.yt_param_time_idx.value = p[1]
self.stvLink = re.sub('order=([a-zA-Z]+)', p[0], self.stvLink)
self.keckse.clear()
self.c4_browse_ajax = ''
self.url_c4_browse_ajax_list = ['']
self.page = self.pages = 0
self.loadPageData()
def keyRed(self):
if not self.key_sort:
self.keyCancel()
elif not self.keyLocked:
self.changeSort()
def keyUpRepeated(self):
if self.keyLocked:
return
self['liste'].up()
def keyDownRepeated(self):
if self.keyLocked:
return
self['liste'].down()
def key_repeatedUp(self):
if self.keyLocked:
return
self.showInfos()
def keyLeftRepeated(self):
if self.keyLocked:
return
self['liste'].pageUp()
def keyRightRepeated(self):
if self.keyLocked:
return
self['liste'].pageDown()
def keyUp(self):
if self.keyLocked:
return
i = self['liste'].getSelectedIndex()
if not i:
self.keyPageDownFast()
self['liste'].up()
self.showInfos()
def keyDown(self):
if self.keyLocked:
return
i = self['liste'].getSelectedIndex()
l = len(self.filmliste) - 1
if l == i:
self.keyPageUpFast()
self['liste'].down()
self.showInfos()
def keyTxtPageUp(self):
if self.keyLocked:
return
self['handlung'].pageUp()
def keyTxtPageDown(self):
if self.keyLocked:
return
self['handlung'].pageDown()
def keyPageUpFast(self,step=1):
if self.keyLocked:
return
oldpage = self.page
if not self.c4_browse_ajax and not self.apiUrlv3:
if not self.page or not self.pages:
return
if (self.page + step) <= self.pages:
self.page += step
self.start_idx += self.max_res * step
else:
self.page = 1
self.start_idx = 1
elif self.c4_browse_ajax:
self.url_c4_browse_ajax_list.append(self.c4_browse_ajax)
self.page += 1
else:
return
if oldpage != self.page:
self.loadPageData()
def keyPageDownFast(self,step=1):
if self.keyLocked:
return
oldpage = self.page
if not self.c4_browse_ajax and not self.apiUrlv3:
if not self.page or not self.pages:
return
if (self.page - step) >= 1:
self.page -= step
self.start_idx -= self.max_res * step
else:
self.page = self.pages
self.start_idx = self.max_res * (self.pages - 1) + 1
else:
if self.page <= 1:
return
self.url_c4_browse_ajax_list.pop()
self.c4_browse_ajax = self.url_c4_browse_ajax_list[-1]
self.page -= 1
if oldpage != self.page:
self.loadPageData()
def key_1(self):
self.keyPageDownFast(2)
def keyGreen(self):
if self.keyLocked:
return
if self.favoGenre:
self.delFavo()
else:
self.addFavo()
def key_4(self):
self.keyPageDownFast(5)
def key_7(self):
self.keyPageDownFast(10)
def key_3(self):
self.keyPageUpFast(2)
def key_6(self):
self.keyPageUpFast(5)
def key_9(self):
self.keyPageUpFast(10)
def keyOK(self):
if self.keyLocked:
return
url = self['liste'].getCurrent()[0][2]
gid = self['liste'].getCurrent()[0][5]
if gid == 'P' or gid == 'C':
dhTitle = 'Videos: ' + self['liste'].getCurrent()[0][1]
genreurl = self['liste'].getCurrent()[0][2]
if genreurl.startswith('http'):
genreurl = genreurl.replace('v=2', '')
else:
genreurl = 'gdata.youtube.com/feeds/api/playlists/'+self['liste'].getCurrent()[0][2]+'?'
dhTitle = 'Videos: ' + self['liste'].getCurrent()[0][1]
if self.favoGenre:
self.session.openWithCallback(self.getFavos, YT_ListScreen, genreurl, dhTitle)
else:
self.session.open(YT_ListScreen, genreurl, dhTitle)
elif gid == 'CV3':
dhTitle = 'Ergebnisse: ' + self['liste'].getCurrent()[0][1]
genreurl = self['liste'].getCurrent()[0][2]
genreurl = 'https://www.googleapis.com/youtube/v3/search?part=snippet%2Cid&type=video&order=date&channelId='+self['liste'].getCurrent()[0][2]+'&key=%KEY%'
if self.favoGenre:
self.session.openWithCallback(self.getFavos, YT_ListScreen, genreurl, dhTitle)
else:
self.session.open(YT_ListScreen, genreurl, dhTitle)
elif gid == 'GV3':
dhTitle = 'Ergebnisse: ' + self['liste'].getCurrent()[0][1]
genreurl = self['liste'].getCurrent()[0][2]
hl = param_hl[config.mediaportal.yt_param_meta_idx.value]
genreurl = 'https://www.googleapis.com/youtube/v3/channels?part=snippet&categoryId='+self['liste'].getCurrent()[0][2]+hl+'&key=%KEY%'
if self.favoGenre:
self.session.openWithCallback(self.getFavos, YT_ListScreen, genreurl, dhTitle)
else:
self.session.open(YT_ListScreen, genreurl, dhTitle)
elif gid == 'PV3':
dhTitle = 'Videos: ' + self['liste'].getCurrent()[0][1]
genreurl = self['liste'].getCurrent()[0][2]
genreurl = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&order=date&playlistId='+self['liste'].getCurrent()[0][2]+'&key=%KEY%'
if self.favoGenre:
self.session.openWithCallback(self.getFavos, YT_ListScreen, genreurl, dhTitle)
else:
self.session.open(YT_ListScreen, genreurl, dhTitle)
elif not self.apiUrl or gid == 'S':
global picker_lang
if url.startswith('/playlist?'):
m = re.search('list=(.+)', url)
if m:
url = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&playlistId=%s&order=date&key=' % m.group(1)
url += '%KEY%'
dhTitle = 'Playlist: ' + self['liste'].getCurrent()[0][1]
self.session.open(YT_ListScreen, url, dhTitle)
elif url.startswith('/user/') or url.startswith('/channel/'):
url = url.replace('&', '&')
if '?' in url:
url += '&'
else:
url += '?'
url = self.baseUrl + url
dhTitle = self.genreName + ':' + self['liste'].getCurrent()[0][1]
picker_lang = ''
self.session.open(YT_ListScreen, url, dhTitle)
elif url.startswith('/watch?v='):
if not 'list=' in url or '/videos?' in self.stvLink:
url = re.search('v=(.+)', url).group(1)
listitem = self.filmliste[self['liste'].getSelectedIndex()]
liste = [(listitem[0], listitem[1], url, listitem[3], listitem[4], listitem[5], listitem[6])]
self.session.openWithCallback(
self.setVideoPrio,
YoutubePlayer,
liste,
0,
playAll = False,
listTitle = self.genreName,
plType='local',
title_inr=1,
showCover=self.showCover
)
else:
url = re.search('list=(.+)', url).group(1)
url = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&playlistId=%s&order=date&key=' % url
url += '%KEY%'
dhTitle = 'Playlist: ' + self['liste'].getCurrent()[0][1]
self.session.open(YT_ListScreen, url, dhTitle)
else:
self.session.openWithCallback(
self.setVideoPrio,
YoutubePlayer,
self.filmliste,
self['liste'].getSelectedIndex(),
playAll = self.playAll,
listTitle = self.genreName,
plType='local',
title_inr=1,
showCover=self.showCover
)
elif not self['liste'].getCurrent()[0][6]:
self.session.openWithCallback(
self.setVideoPrio,
YoutubePlayer,
self.filmliste,
self['liste'].getSelectedIndex(),
playAll = self.playAll,
listTitle = self.genreName,
plType='local',
title_inr=1,
showCover=self.showCover
)
def youtubeExit(self):
self.keckse.clear()
del self.filmliste[:]
class YT_Oauth2:
OAUTH2_URL = 'https://accounts.google.com/o/oauth2'
CLIENT_ID = mp_globals.yt_i
CLIENT_SECRET = mp_globals.yt_s
SCOPE = '&scope=https://www.googleapis.com/auth/youtube'
GRANT_TYPE = '&grant_type=http://oauth.net/grant_type/device/1.0'
TOKEN_PATH = '/etc/enigma2/mp_yt-access-tokens.json'
accessToken = None
def __init__(self):
import os.path
self._interval = None
self._code = None
self._expiresIn = None
self._refreshTimer = None
self.autoRefresh = False
self.abortPoll = False
self.waitingBox = None
self.session = None
if not config.mediaportal.yt_refresh_token.value:
self._recoverToken()
def _recoverToken(self):
if os.path.isfile(self.TOKEN_PATH):
with open(self.TOKEN_PATH) as data_file:
data = json.load(data_file)
config.mediaportal.yt_refresh_token.value = data['refresh_token'].encode('utf-8')
config.mediaportal.yt_refresh_token.save()
return True
def requestDevCode(self, session):
self.session = session
postData = self.CLIENT_ID + self.SCOPE
twAgentGetPage(self.OAUTH2_URL+'/device/code', method='POST', postdata=postData, headers={'Content-Type': 'application/x-www-form-urlencoded'}).addCallback(self._cb_requestDevCode, False).addErrback(self._cb_requestDevCode)
def _cb_requestDevCode(self, data, error=True):
if error:
self.session.open(MessageBoxExt, _("Error: Unable to request the Device code"), MessageBoxExt.TYPE_ERROR)
printl(_("Error: Unable to request the Device code"),self,'E')
else:
googleData = json.loads(data)
self._interval = googleData['interval']
self._code = '&code=%s' % googleData['device_code'].encode('utf-8')
self._expiresIn = googleData['expires_in']
self.session.openWithCallback(self.cb_request, MessageBoxExt, _("You've to visit:\n{url}\nand enter the code: {code}\nCancel action?").format(url=googleData["verification_url"].encode('utf-8'), code=googleData["user_code"].encode('utf-8')), type = MessageBoxExt.TYPE_YESNO, default = False)
def cb_request(self, answer):
if answer is False:
self.waitingBox = self.session.openWithCallback(self.cb_cancelPoll, MessageBoxExt, _("Waiting for response from the server.\nCancel action?"), type = MessageBoxExt.TYPE_YESNO, default = True, timeout = self._expiresIn - 30)
self.abortPoll = False
reactor.callLater(self._interval, self._pollOauth2Server)
def cb_cancelPoll(self, answer):
if answer is True:
self.abortPoll = True
def _pollOauth2Server(self):
self._tokenExpired()
postData = self.CLIENT_ID + self.CLIENT_SECRET + self._code + self.GRANT_TYPE
twAgentGetPage(self.OAUTH2_URL+'/token', method='POST', postdata=postData, headers={'Content-Type': 'application/x-www-form-urlencoded'}).addCallback(self._cb_poll, False).addErrback(self._cb_poll)
def _cb_poll(self, data, error=True):
if error:
self.waitingBox.cancel()
self.session.open(MessageBoxExt, _('Error: Unable to get tokens!'), MessageBoxExt.TYPE_ERROR)
printl(_('Error: Unable to get tokens!'),self,'E')
else:
try:
tokenData = json.loads(data)
except:
self.waitingBox.cancel()
self.session.open(MessageBoxExt, _('Error: Unable to get tokens!'), MessageBoxExt.TYPE_ERROR)
printl('json data error:%s' % str(data),self,'E')
else:
if not tokenData.get('error',''):
self.accessToken = tokenData['access_token'].encode('utf-8')
config.mediaportal.yt_refresh_token.value = tokenData['refresh_token'].encode('utf-8')
config.mediaportal.yt_refresh_token.value = tokenData['refresh_token'].encode('utf-8')
config.mediaportal.yt_refresh_token.save()
self._expiresIn = tokenData['expires_in']
self._startRefreshTimer()
f = open(self.TOKEN_PATH, 'w')
f.write(json.dumps(tokenData))
f.close()
self.waitingBox.cancel()
self.session.open(MessageBoxExt, _('Access granted :)\nFor safety you should create backup\'s of enigma2 settings and \'/etc/enigma2/mp_yt-access-tokens.json\'.\nThe tokens are valid until they are revoked in Your Google Account.'), MessageBoxExt.TYPE_INFO)
elif not self.abortPoll:
print tokenData.get('error','').encode('utf-8')
reactor.callLater(self._interval, self._pollOauth2Server)
def refreshToken(self, session, skip=False):
self.session = session
if not skip:
self._tokenExpired()
if config.mediaportal.yt_refresh_token.value:
postData = self.CLIENT_ID + self.CLIENT_SECRET + '&refresh_token=%s&grant_type=refresh_token' % config.mediaportal.yt_refresh_token.value
d = twAgentGetPage(self.OAUTH2_URL+'/token', method='POST', postdata=postData, headers={'Content-Type': 'application/x-www-form-urlencoded'}).addCallback(self._cb_refresh, False).addErrback(self._cb_refresh)
return d
def _cb_refresh(self, data, error=True):
if error:
printl(_('Error: Unable to refresh token!'),self,'E')
return data
else:
try:
tokenData = json.loads(data)
self.accessToken = tokenData['access_token'].encode('utf-8')
self._expiresIn = tokenData['expires_in']
except:
printl('json data error!',self,'E')
return ""
else:
self._startRefreshTimer()
return self.accessToken
def revokeToken(self):
if config.mediaportal.yt_refresh_token.value:
twAgentGetPage(self.OAUTH2_URL+'/revoke?token=%s' % config.mediaportal.yt_refresh_token.value).addCallback(self._cb_revoke, False).addErrback(self._cb_revoke)
def _cb_revoke(self, data, error=True):
if error:
printl('Error: Unable to revoke!',self,'E')
def _startRefreshTimer(self):
if self._refreshTimer != None and self._refreshTimer.active():
self._refreshTimer.cancel()
self._refreshTimer = reactor.callLater(self._expiresIn - 10, self._tokenExpired)
def _tokenExpired(self):
if self._refreshTimer != None and self._refreshTimer.active():
self._refreshTimer.cancel()
self._expiresIn = 0
self.accessToken = None
def getAccessToken(self):
if self.accessToken == None:
return ""
else:
return self.accessToken
yt_oauth2 = YT_Oauth2() | gpl-2.0 | -8,659,979,360,545,974,000 | 35.764026 | 405 | 0.650682 | false |
south-coast-science/scs_core | src/scs_core/position/nmea/gpgga.py | 1 | 4215 | """
Created on 30 Dec 2016
@author: Bruno Beloff ([email protected])
Global positioning system fix data
$xxGGA,time,lat,NS,long,EW,quality,numSV,HDOP,alt,M,sep,M,diffAge,diffStation*cs
example sentence:
$GPGGA,092725.00,4717.11399,N,00833.91590,E,1,08,1.01,499.6,M,48.0,M,,*5B
example values:
GPGGA:{time:GPTime:{time:141058.00}, loc:GPLoc:{lat:5049.38432, ns:N, lng:00007.37801, ew:W}, quality:2, num_sv:06,
hdop:3.10, alt:37.5, sep:45.4, diff_age:None, diff_station:0000}
GPGGA:{time:GPTime:{time:140047.00}, loc:GPLoc:{lat:None, ns:None, lng:None, ew:None}, quality:0, num_sv:00,
hdop:99.99, alt:None, sep:None, diff_age:None, diff_station:None}
https://www.nmea.org
https://en.wikipedia.org/wiki/NMEA_0183
"""
from scs_core.position.nmea.gploc import GPLoc
from scs_core.position.nmea.gptime import GPTime
from scs_core.position.nmea.nmea_sentence import NMEASentence
# --------------------------------------------------------------------------------------------------------------------
class GPGGA(NMEASentence):
"""
classdocs
"""
MESSAGE_IDS = ("$GNGGA", "$GPGGA")
QUALITY_NO_FIX = 0
QUALITY_AUTONOMOUS_GNSS = 1
QUALITY_DIFFERENTIAL_GNSS = 2
QUALITY_ESTIMATED_FIX = 6
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def construct(cls, r):
if r.message_id not in cls.MESSAGE_IDS:
raise TypeError("invalid sentence:%s" % r)
time = GPTime(r.str(1))
lat = r.str(2)
ns = r.str(3)
lng = r.str(4)
ew = r.str(5)
loc = GPLoc(lat, ns, lng, ew)
quality = r.int(6)
num_sv = r.int(7)
hdop = r.float(8, 3)
alt = r.float(9, 2)
sep = r.float(11, 2)
diff_age = r.float(13, 3)
diff_station = r.str(14)
return GPGGA(r.message_id, time, loc, quality, num_sv, hdop, alt, sep, diff_age, diff_station)
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, message_id, time, loc, quality, num_sv, hdop, alt, sep, diff_age, diff_station):
"""
Constructor
"""
super().__init__(message_id)
self.__time = time # GPTime
self.__loc = loc # GPLoc
self.__quality = quality # int
self.__num_sv = num_sv # int
self.__hdop = hdop # float(2)
self.__alt = alt # float(1) - altitude (metres)
self.__sep = sep # float(1) - geoid separation (metres)
self.__diff_age = diff_age # float(3) - age of differential corrections (seconds)
self.__diff_station = diff_station # string - ID of station providing differential corrections
# ----------------------------------------------------------------------------------------------------------------
@property
def time(self):
return self.__time
@property
def loc(self):
return self.__loc
@property
def quality(self):
return self.__quality
@property
def num_sv(self):
return self.__num_sv
@property
def hdop(self):
return self.__hdop
@property
def alt(self):
return self.__alt
@property
def sep(self):
return self.__sep
@property
def diff_age(self):
return self.__diff_age
@property
def diff_station(self):
return self.__diff_station
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "GPGGA:{source:%s, time:%s, loc:%s, quality:%s, num_sv:%s, hdop:%s, alt:%s, sep:%s, " \
"diff_age:%s, diff_station:%s}" % \
(self.source, self.time, self.loc, self.quality, self.num_sv, self.hdop, self.alt, self.sep,
self.diff_age, self.diff_station)
| mit | 7,338,479,952,412,405,000 | 27.869863 | 118 | 0.474259 | false |
knights-lab/SHOGUN | shogun/utils/tree.py | 1 | 2735 | from collections import defaultdict
import csv
class Taxonomy:
def __init__(self, filename: str):
self.tax = self.parse_taxonomy(filename)
@classmethod
def parse_taxonomy(cls, filename: str) -> dict:
with open(filename) as inf:
csv_inf = csv.reader(inf, delimiter='\t')
taxa_map = dict(csv_inf)
return taxa_map
def __call__(self, id: str):
return self.tax[id]
class LCATaxonomy:
def __init__(self,
node_id_to_taxa_name: dict,
ref_to_node_id_ix_level: dict,
ref_to_taxa_name: dict,
node_id_to_ancestors
):
self.node_id_to_taxa_name = node_id_to_taxa_name
self.ref_to_node_id_ix_level = ref_to_node_id_ix_level
self.ref_to_taxa_name = ref_to_taxa_name
self.num_nodes = len(self.node_id_to_taxa_name)
self.node_id_to_ancestors = node_id_to_ancestors
TAX_LEVELS = ['k', 'p', 'c', 'o', 'f', 'g', 's', 't']
def tree(): return defaultdict(tree)
def add_tree(t, path):
for node in path.split(';'):
t = t[node]
def build_tree_from_tax_file(filename: str) -> LCATaxonomy:
with open(filename) as inf:
csv_inf = csv.reader(inf, delimiter='\t')
ref_to_taxa_name = dict(csv_inf)
taxa_name_to_node_id_ix_level = {"root": (0, 0, 0)}
current_node_id = 1
node_id_to_ancestors = [{0}]
for ix, (ref, taxa_name) in enumerate(ref_to_taxa_name.items()):
split = taxa_name.split(";")
ancestors = [0]
for level in range(len(split)):
taxa_name = ";".join(split[:level+1])
if taxa_name in taxa_name_to_node_id_ix_level:
found_node_id, _, _ = taxa_name_to_node_id_ix_level[taxa_name]
# Check if blank level
if len(split[level]) > 3:
ancestors.append(found_node_id)
else:
taxa_name_to_node_id_ix_level[taxa_name] = (current_node_id, ix, level + 1)
# Check if blank level
if len(split[level]) > 3:
ancestors.append(current_node_id)
current_node_id += 1
node_id_to_ancestors.append(set(ancestors))
ref_to_node_id_ix_level = {ref: taxa_name_to_node_id_ix_level[taxa_name] for ref, taxa_name in ref_to_taxa_name.items()}
node_id_to_taxa_name = {node_id: taxa_name for taxa_name, (node_id, ix, level) in taxa_name_to_node_id_ix_level.items()}
return LCATaxonomy(
node_id_to_taxa_name=node_id_to_taxa_name,
ref_to_node_id_ix_level=ref_to_node_id_ix_level,
ref_to_taxa_name=ref_to_taxa_name,
node_id_to_ancestors=node_id_to_ancestors
)
| agpl-3.0 | -8,318,682,223,261,966,000 | 34.064103 | 124 | 0.564534 | false |
google-research/task_adaptation | task_adaptation/data/caltech_test.py | 1 | 2376 | # coding=utf-8
# Copyright 2019 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for caltech.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from task_adaptation.data import caltech
from task_adaptation.data import data_testing_lib
import tensorflow.compat.v1 as tf
class Caltech101Test(data_testing_lib.BaseVTABDataTest):
"""See base class for usage and test descriptions."""
def setUp(self):
self.dataset = caltech.Caltech101()
super(Caltech101Test, self).setUp(
data_wrapper=self.dataset,
num_classes=102, # N.b. Caltech101 has 102 classes (1 for background).
expected_num_samples=dict(
train=2754,
val=306,
trainval=2754 + 306, # 3060 (30 images / class).
test=6084,
train800val200=1000,
train800=800,
val200=200,
),
required_tensors_shapes={
"image": (None, None, 3),
"label": (),
})
def test_all_classes_in_train(self):
"""Tests that the train set has at least one element in every class."""
# Runs over the small validation set, rather than the full train set.
# For each class, there should be fewer than 30 items for there to be at
# least one in the training set.
ds = self.dataset.get_tf_data("val", batch_size=1, epochs=1)
ds.repeat(1)
next_element = tf.data.make_one_shot_iterator(ds).get_next()
class_count = collections.defaultdict(int)
with tf.Session() as sess:
while True:
try:
value = sess.run(next_element)
class_count[value["label"][0]] += 1
except tf.errors.OutOfRangeError:
break
self.assertGreater(30, max(class_count.values()))
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | 1,945,133,791,800,647,400 | 32.464789 | 79 | 0.659512 | false |
ramovsky/lunch | tests/test_config.py | 1 | 1313 | import unittest
from tempfile import NamedTemporaryFile
from lunch.session import Session, SessionFinished, Config, User
class TestConfig(unittest.TestCase):
def test_add_place(self):
config = Config()
config.add_place('Sisaket')
self.assertAlmostEqual(1, len(config))
config.add_place('Indian')
self.assertAlmostEqual(2, len(config))
config.add_place('Sisaket')
self.assertAlmostEqual(2, len(config))
def test_update_place(self):
config = Config()
config.add_place('Sisaket')
self.assertAlmostEqual(.5, config.places['Sisaket'])
config.add_place('Sisaket', .7)
self.assertAlmostEqual(.7, config.places['Sisaket'])
def test_save_load(self):
file = NamedTemporaryFile().name
config = Config(file)
config.add_place('Sisaket')
config.save()
places = config.places
config = Config(file)
config.load()
self.assertEqual(places, config.places)
def test_history_crop(self):
file = NamedTemporaryFile().name
config = Config(file)
for i in range(10):
config.add_winner('Sisaket')
config.save()
config = Config(file)
config.load()
self.assertEqual(7, len(config._history))
| mit | -7,776,214,906,230,288,000 | 29.534884 | 64 | 0.621478 | false |
jpfairbanks/streaming | moving_average.py | 1 | 2010 | from __future__ import print_function
"""
This module produces a stream of random variables from a moving average model.
The first command line argument is the number of samples negative means infinite.
The second argument is the window size. The moving average is uniform over the window.
The third argument is the file destination for the data it should be a filename.
the output to the user goes on stderr and the data generated goes onto a variable fp,
which defaults to stdout.
"""
import sys
from time import time
import stream
import random as rand
from stream import chop, repeatcall, item
# handle command line flags
view_len = int(sys.argv[1])
print("num_samples: %d" % view_len, file=sys.stderr)
if view_len < 0:
print("infinite samples", file=sys.stderr)
win_len = int(sys.argv[2])
print("window_length: %d" % win_len, file=sys.stderr)
if len(sys.argv) < 4 :
fp = sys.stdout
else:
try:
fp = open(sys.argv[3], 'w')
except IOError:
print("couldn't open file; using stdout")
fp = sys.stdout
print(str(fp), file=sys.stderr)
#define what we need to do moving averages
weights = [1.0/win_len for i in range(win_len)]
def inner(window):
""" Computes the inner product of window and weights.
weights must be defined outside to avoid a useless rezipping
when using this in a stream.
"""
acc = sum((i*w for i,w in zip(window, weights)))
return acc
#get an infinite stream of uniform random floats
zsource = repeatcall(rand.random)
# WIRING
# make our moving average window
winstream = ( zsource >> chop(win_len) )
# compute the windowed average
xstream = ( winstream >> stream.map(inner) )
# EXECUTING
if view_len > 0:
ts = time()
for i in range(view_len):
fp.write(str(next(xstream.iterator))+'\n')
print("time: %f" % (time()-ts), file=sys.stderr)
print("items_per_sec: %f" % (view_len/(time()-ts)), file=sys.stderr)
if view_len < 0:
while True:
fp.write(str(next(xstream.iterator))+'\n')
| bsd-3-clause | -7,765,423,684,961,808,000 | 29.923077 | 86 | 0.687562 | false |
MarkusHackspacher/unknown-horizons | horizons/world/building/settler.py | 1 | 19857 | # ###################################################
# Copyright (C) 2008-2017 The Unknown Horizons Team
# [email protected]
# This file is part of Unknown Horizons.
#
# Unknown Horizons is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# ###################################################
import logging
import math
from horizons.command.building import Build
from horizons.command.production import ToggleActive
from horizons.component.collectingcomponent import CollectingComponent
from horizons.component.storagecomponent import StorageComponent
from horizons.constants import BUILDINGS, GAME, RES, TIER
from horizons.gui.tabs import SettlerOverviewTab
from horizons.messaging import (
AddStatusIcon, RemoveStatusIcon, SettlerInhabitantsChanged, SettlerUpdate,
UpgradePermissionsChanged)
from horizons.scheduler import Scheduler
from horizons.util.pathfinding.pather import StaticPather
from horizons.util.python.callback import Callback
from horizons.world.building.buildable import BuildableRect, BuildableSingle
from horizons.world.building.building import BasicBuilding
from horizons.world.building.buildingresourcehandler import BuildingResourceHandler
from horizons.world.production.producer import Producer
from horizons.world.production.production import SettlerProduction
from horizons.world.status import SettlerNotConnectedStatus, SettlerUnhappyStatus
class SettlerRuin(BasicBuilding, BuildableSingle):
"""Building that appears when a settler got unhappy. The building does nothing.
NOTE: Inheriting from BuildableSingle is necessary, cause it's built via Build Command, which
checks for buildability
"""
buildable_upon = True
walkable = True
class Settler(BuildableRect, BuildingResourceHandler, BasicBuilding):
"""Represents a settlers house, that uses resources and creates inhabitants."""
log = logging.getLogger("world.building.settler")
production_class = SettlerProduction
tabs = (SettlerOverviewTab, )
default_level_on_build = 0
def __init__(self, x, y, owner, instance=None, **kwargs):
kwargs['level'] = self.__class__.default_level_on_build # settlers always start in first level
super().__init__(x=x, y=y, owner=owner, instance=instance, **kwargs)
def __init(self, loading=False, last_tax_payed=0):
self.level_max = TIER.CURRENT_MAX # for now
self._update_level_data(loading=loading, initial=True)
self.last_tax_payed = last_tax_payed
UpgradePermissionsChanged.subscribe(self._on_change_upgrade_permissions, sender=self.settlement)
self._upgrade_production = None # referenced here for quick access
def initialize(self):
super().initialize()
SettlerInhabitantsChanged.broadcast(self, self.inhabitants)
happiness = self.__get_data("happiness_init_value")
if happiness is not None:
self.get_component(StorageComponent).inventory.alter(RES.HAPPINESS, happiness)
if self.has_status_icon:
self.get_component(StorageComponent).inventory.add_change_listener(self._update_status_icon)
# give the user a month (about 30 seconds) to build a main square in range
if self.owner.is_local_player:
Scheduler().add_new_object(self._check_main_square_in_range, self, Scheduler().get_ticks_of_month(), loops=-1)
self.__init()
self.run()
def save(self, db):
super().save(db)
db("INSERT INTO settler(rowid, inhabitants, last_tax_payed) VALUES (?, ?, ?)",
self.worldid, self.inhabitants, self.last_tax_payed)
remaining_ticks = Scheduler().get_remaining_ticks(self, self._tick)
db("INSERT INTO remaining_ticks_of_month(rowid, ticks) VALUES (?, ?)",
self.worldid, remaining_ticks)
def load(self, db, worldid):
super().load(db, worldid)
self.inhabitants, last_tax_payed = \
db("SELECT inhabitants, last_tax_payed FROM settler WHERE rowid=?", worldid)[0]
remaining_ticks = \
db("SELECT ticks FROM remaining_ticks_of_month WHERE rowid=?", worldid)[0][0]
self.__init(loading=True, last_tax_payed=last_tax_payed)
self._load_upgrade_data(db)
SettlerUpdate.broadcast(self, self.level, self.level)
self.run(remaining_ticks)
def _load_upgrade_data(self, db):
"""Load the upgrade production and relevant stored resources"""
upgrade_material_prodline = SettlerUpgradeData.get_production_line_id(self.level + 1)
if not self.get_component(Producer).has_production_line(upgrade_material_prodline):
return
self._upgrade_production = self.get_component(Producer)._get_production(upgrade_material_prodline)
# readd the res we already had, they can't be loaded since storage slot limits for
# the special resources aren't saved
resources = {}
for resource, amount in db.get_storage_rowids_by_ownerid(self.worldid):
resources[resource] = amount
for res, amount in self._upgrade_production.get_consumed_resources().items():
# set limits to what we need
self.get_component(StorageComponent).inventory.add_resource_slot(res, abs(amount))
if res in resources:
self.get_component(StorageComponent).inventory.alter(res, resources[res])
self._upgrade_production.add_production_finished_listener(self.level_up)
self.log.debug("%s: Waiting for material to upgrade from %s", self, self.level)
def _add_upgrade_production_line(self):
"""
Add a production line that gets the necessary upgrade material.
When the production finishes, it calls upgrade_materials_collected.
"""
upgrade_material_prodline = SettlerUpgradeData.get_production_line_id(self.level + 1)
self._upgrade_production = self.get_component(
Producer).add_production_by_id(upgrade_material_prodline)
self._upgrade_production.add_production_finished_listener(self.level_up)
# drive the car out of the garage to make space for the building material
for res, amount in self._upgrade_production.get_consumed_resources().items():
self.get_component(StorageComponent).inventory.add_resource_slot(res, abs(amount))
self.log.debug("%s: Waiting for material to upgrade from %s", self, self.level)
def remove(self):
SettlerInhabitantsChanged.broadcast(self, -self.inhabitants)
UpgradePermissionsChanged.unsubscribe(self._on_change_upgrade_permissions, sender=self.settlement)
super().remove()
@property
def upgrade_allowed(self):
return self.session.world.get_settlement(self.position.origin).upgrade_permissions[self.level]
def _on_change_upgrade_permissions(self, message):
production = self._upgrade_production
if production is not None:
if production.is_paused() == self.upgrade_allowed:
ToggleActive(self.get_component(Producer), production).execute(self.session, True)
@property
def happiness(self):
difficulty = self.owner.difficulty
result = int(round(difficulty.extra_happiness_constant + self.get_component(StorageComponent).inventory[RES.HAPPINESS] * difficulty.happiness_multiplier))
return max(0, min(result, self.get_component(StorageComponent).inventory.get_limit(RES.HAPPINESS)))
@property
def capacity_utilization(self):
# this concept does not make sense here, so spare us the calculations
return 1.0
def _update_level_data(self, loading=False, initial=False):
"""Updates all settler-related data because of a level change or as initialization
@param loading: whether called to set data after loading
@param initial: whether called to set data initially
"""
# taxes, inhabitants
self.tax_base = self.session.db.get_settler_tax_income(self.level)
self.inhabitants_max = self.session.db.get_tier_inhabitants_max(self.level)
self.inhabitants_min = self.session.db.get_tier_inhabitants_min(self.level)
#TODO This crops inhabitants at level down, but when can they exceed the limit?
if self.inhabitants > self.inhabitants_max:
self.inhabitants = self.inhabitants_max
# consumption:
# Settler productions are specified to be disabled by default in the db, so we can enable
# them here per level. Production data is save/loaded, so we don't need to do anything in that case
if not loading:
prod_comp = self.get_component(Producer)
current_lines = prod_comp.get_production_lines()
for prod_line in prod_comp.get_production_lines_by_level(self.level):
if not prod_comp.has_production_line(prod_line):
prod_comp.add_production_by_id(prod_line)
# cross out the new lines from the current lines, so only the old ones remain
if prod_line in current_lines:
current_lines.remove(prod_line)
for line in current_lines[:]: # iterate over copy for safe removal
# all lines, that were added here but are not used due to the current level
# NOTE: this contains the upgrade material production line
prod_comp.remove_production_by_id(line)
# Make sure to set _upgrade_production to None in case we are removing it
if self._upgrade_production is not None and line == self._upgrade_production.get_production_line_id():
self._upgrade_production = None
if not initial:
# update instance graphics
# only do it when something has actually change
# TODO: this probably also isn't necessary on loading, but it's
# not touched before the relase (2012.1)
self.update_action_set_level(self.level)
def run(self, remaining_ticks=None):
"""Start regular tick calls"""
interval = self.session.timer.get_ticks(GAME.INGAME_TICK_INTERVAL)
run_in = remaining_ticks if remaining_ticks is not None else interval
Scheduler().add_new_object(self._tick, self, run_in=run_in, loops=-1, loop_interval=interval)
def _tick(self):
"""Here we collect the functions, that are called regularly (every "month")."""
self.pay_tax()
self.inhabitant_check()
self.level_check()
def pay_tax(self):
"""Pays the tax for this settler"""
# the money comes from nowhere, settlers seem to have an infinite amount of money.
# see http://wiki.unknown-horizons.org/w/Settler_taxing
# calc taxes http://wiki.unknown-horizons.org/w/Settler_taxing#Formulae
happiness_tax_modifier = 0.5 + (float(self.happiness) / 70.0)
inhabitants_tax_modifier = float(self.inhabitants) / self.inhabitants_max
taxes = self.tax_base * self.settlement.tax_settings[self.level] * happiness_tax_modifier * inhabitants_tax_modifier
real_taxes = int(round(taxes * self.owner.difficulty.tax_multiplier))
self.settlement.owner.get_component(StorageComponent).inventory.alter(RES.GOLD, real_taxes)
self.last_tax_payed = real_taxes
# decrease happiness http://wiki.unknown-horizons.org/w/Settler_taxing#Formulae
difference = 1.0 - self.settlement.tax_settings[self.level]
happiness_decrease = 10 * difference - 6 * abs(difference)
happiness_decrease = int(round(happiness_decrease))
# NOTE: this formula was actually designed for a different use case, where the happiness
# is calculated from the number of available goods -/+ a certain tax factor.
# to simulate the more dynamic, currently implemented approach (where every event changes
# the happiness), we simulate discontent of taxes by this:
happiness_decrease -= 6
self.get_component(StorageComponent).inventory.alter(RES.HAPPINESS, happiness_decrease)
self._changed()
self.log.debug("%s: pays %s taxes, -happy: %s new happiness: %s", self, real_taxes,
happiness_decrease, self.happiness)
def inhabitant_check(self):
"""Checks whether or not the population of this settler should increase or decrease"""
sad = self.session.db.get_lower_happiness_limit()
happy = self.session.db.get_upper_happiness_limit()
change = 0
if self.happiness > happy and self.inhabitants < self.inhabitants_max:
change = 1
self.log.debug("%s: inhabitants increase to %s", self, self.inhabitants)
elif self.happiness < sad and self.inhabitants > 1:
change = -1
self.log.debug("%s: inhabitants decrease to %s", self, self.inhabitants)
if change != 0:
# see http://wiki.unknown-horizons.org/w/Supply_citizens_with_resources
self.get_component(Producer).alter_production_time(
6.0 / 7.0 * math.log(1.5 * (self.inhabitants + 1.2)))
self.inhabitants += change
SettlerInhabitantsChanged.broadcast(self, change)
self._changed()
def can_level_up(self):
return self.happiness > self.__get_data("happiness_level_up_requirement") and \
self.inhabitants >= self.inhabitants_min and not self._has_disaster()
def level_check(self):
"""Checks whether we should level up or down.
Ignores buildings with a active disaster. """
if self.can_level_up():
if self.level >= self.level_max:
# max level reached already, can't allow an update
if self.owner.max_tier_notification < self.level_max:
if self.owner.is_local_player:
self.session.ingame_gui.message_widget.add(
point=self.position.center, string_id='MAX_TIER_REACHED')
self.owner.max_tier_notification = self.level_max
return
if self._upgrade_production:
return # already waiting for res
self._add_upgrade_production_line()
if not self.upgrade_allowed:
ToggleActive(self.get_component(Producer), self._upgrade_production).execute(self.session, True)
elif self.happiness < self.__get_data("happiness_level_down_limit") or \
self.inhabitants < self.inhabitants_min:
self.level_down()
self._changed()
def level_up(self, production=None):
"""Actually level up (usually called when the upgrade material has arrived)"""
self._upgrade_production = None
# just level up later that tick, it could disturb other code higher in the call stack
def _do_level_up():
self.level += 1
self.log.debug("%s: Levelling up to %s", self, self.level)
self._update_level_data()
# update the level of our inhabitants so graphics can change
if self.has_component(CollectingComponent):
for collector in self.get_component(CollectingComponent).get_local_collectors():
collector.level_upgrade(self.level)
# Notify the world about the level up
SettlerUpdate.broadcast(self, self.level, 1)
# reset happiness value for new level
new_happiness = self.__get_data("happiness_init_value") - self.happiness
self.get_component(StorageComponent).inventory.alter(RES.HAPPINESS, new_happiness)
self._changed()
Scheduler().add_new_object(_do_level_up, self, run_in=0)
def level_down(self):
if self.level == TIER.LOWEST:
# Can't level down any more.
self.make_ruin()
self.log.debug("%s: Destroyed by lack of happiness", self)
if self.owner.is_local_player:
# check_duplicate: only trigger once for different settlers of a neighborhood
self.session.ingame_gui.message_widget.add(point=self.position.center,
string_id='SETTLERS_MOVED_OUT', check_duplicate=True)
else:
self.level -= 1
self._update_level_data()
# reset happiness value for new level
new_happiness = self.__get_data("happiness_init_value") - self.happiness
self.get_component(StorageComponent).inventory.alter(RES.HAPPINESS, new_happiness)
self.log.debug("%s: Level down to %s", self, self.level)
self._changed()
# update the level of our inhabitants so graphics can change
if self.has_component(CollectingComponent):
for collector in self.get_component(CollectingComponent).get_local_collectors():
collector.level_upgrade(self.level)
# Notify the world about the level down
SettlerUpdate.broadcast(self, self.level, -1)
def make_ruin(self):
""" Replaces itself with a ruin.
"""
command = Build(BUILDINGS.SETTLER_RUIN, self.position.origin.x,
self.position.origin.y, island=self.island, settlement=self.settlement)
# Remove the building and then place the Ruin
Scheduler().add_new_object(Callback.ChainedCallbacks(
self.remove, Callback(command, self.owner)), self, run_in=0)
def _has_disaster(self):
return hasattr(self, "disaster") and self.disaster
def _check_main_square_in_range(self):
"""Notifies the user via a message in case there is no main square in range"""
if not self.owner.is_local_player:
return # only check this for local player
for building in self.get_buildings_in_range():
if building.id == BUILDINGS.MAIN_SQUARE:
if StaticPather.get_path_on_roads(self.island, self, building) is not None:
# a main square is in range
if hasattr(self, "_main_square_status_icon"):
RemoveStatusIcon.broadcast(self, self, SettlerNotConnectedStatus)
del self._main_square_status_icon
return
if not hasattr(self, "_main_square_status_icon"):
self._main_square_status_icon = SettlerNotConnectedStatus(self) # save ref for removal later
AddStatusIcon.broadcast(self, self._main_square_status_icon)
# no main square found
# check_duplicate: only trigger once for different settlers of a neighborhood
self.session.ingame_gui.message_widget.add(point=self.position.origin,
string_id='NO_MAIN_SQUARE_IN_RANGE', check_duplicate=True)
def level_upgrade(self, lvl):
"""Settlers only level up by themselves"""
pass
def _update_status_icon(self):
if self.has_status_icon:
unhappy = self.happiness < self.__get_data("happiness_inhabitants_decrease_limit")
# check for changes
if unhappy and not hasattr(self, "_settler_status_icon"):
self._settler_status_icon = SettlerUnhappyStatus(self) # save ref for removal later
AddStatusIcon.broadcast(self, self._settler_status_icon)
if not unhappy and hasattr(self, "_settler_status_icon"):
RemoveStatusIcon.broadcast(self, self, SettlerUnhappyStatus)
del self._settler_status_icon
def __str__(self):
try:
return "{}(l:{};ihab:{};hap:{})".format(
super().__str__(), self.level,
self.inhabitants, self.happiness)
except AttributeError: # an attribute hasn't been set up
return super().__str__()
#@decorators.cachedmethod TODO: replace this with a version that doesn't leak
def __get_data(self, key):
"""Returns constant settler-related data from the db.
The values are cached by python, so the underlying data must not change."""
return int(
self.session.db("SELECT value FROM balance_values WHERE name = ?", key)[0][0]
)
class SettlerUpgradeData:
"""This is used as glue between the old upgrade system based on sqlite data used in a non-component environment
and the current component version with data in yaml"""
# basically, this is arbitrary as long as it's not the same as any of the regular
# production lines of the settler. We reuse data that has arbitrarily been set earlier
# to preserve savegame compatibility.
production_line_ids = {1: 24, 2: 35, 3: 23451, 4: 34512, 5: 45123}
def __init__(self, producer_component, upgrade_material_data):
self.upgrade_material_data = upgrade_material_data
def get_production_lines(self):
d = {}
for level, prod_line_id in self.__class__.production_line_ids.items():
d[prod_line_id] = self.get_production_line_data(level)
return d
def get_production_line_data(self, level):
"""Returns production line data for the upgrade to this level"""
prod_line_data = {'time': 1,
'changes_animation': 0,
'enabled_by_default': False,
'save_statistics': False,
'consumes': self.upgrade_material_data[level]}
return prod_line_data
@classmethod
def get_production_line_id(cls, level):
"""Returns production line id for the upgrade to this level"""
return cls.production_line_ids[level]
| gpl-2.0 | -3,310,390,831,025,852,400 | 42.737885 | 156 | 0.733646 | false |
petry/django-press | press/migrations/0008_auto__add_field_author_photo.py | 1 | 9023 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Author.photo'
db.add_column(u'press_author', 'photo',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['photologue.Photo'], null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Author.photo'
db.delete_column(u'press_author', 'photo_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'photologue.photo': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'Photo'},
'caption': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'crop_from': ('django.db.models.fields.CharField', [], {'default': "'center'", 'max_length': '10', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photo_related'", 'null': 'True', 'to': u"orm['photologue.PhotoEffect']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'title_slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'photologue.photoeffect': {
'Meta': {'object_name': 'PhotoEffect'},
'background_color': ('django.db.models.fields.CharField', [], {'default': "'#FFFFFF'", 'max_length': '7'}),
'brightness': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'color': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'contrast': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'filters': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'reflection_size': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'reflection_strength': ('django.db.models.fields.FloatField', [], {'default': '0.6'}),
'sharpness': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'transpose_method': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'})
},
u'press.article': {
'Meta': {'ordering': "['modified_date']", 'object_name': 'Article'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['press.Author']", 'null': 'True', 'blank': 'True'}),
'body': ('django.db.models.fields.TextField', [], {}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 2, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 2, 0, 0)', 'auto_now': 'True', 'blank': 'True'}),
'public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'draft'", 'unique': 'True', 'null': 'True', 'to': u"orm['press.Article']"}),
'publish_state': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'section': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['press.Section']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'press.author': {
'Meta': {'ordering': "['user__first_name', 'user__last_name']", 'object_name': 'Author'},
'about': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'photo': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['photologue.Photo']", 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'press.section': {
'Meta': {'object_name': 'Section'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['press'] | bsd-3-clause | 4,100,511,375,027,724,300 | 74.2 | 187 | 0.548155 | false |
wtbarnes/synthesizAR | synthesizAR/analysis/eis.py | 1 | 9639 | """
Helpers for analyzing synthetic EIS data
"""
import os
import numpy as np
from sunpy.util.metadata import MetaDict
from sunpy.map import Map
from sunpy.io.fits import get_header
from sunpy.visualization.colormaps.cm import hinodexrt
import astropy.units as u
import astropy.io.fits
import h5py
__all__ = ['EISCube']
class EISCube(object):
"""
Spectral and spatial cube for holding Hinode EIS data
"""
def __init__(self, *args, **kwargs):
if len(args) == 1 and os.path.exists(args[0]):
data, header, wavelength = self._restore_from_file(args[0], **kwargs)
elif all([k in kwargs for k in ['data', 'header', 'wavelength']]):
data = kwargs.get('data')
header = kwargs.get('header')
wavelength = kwargs.get('wavelength')
else:
raise ValueError('''EISCube can only be initialized with a valid FITS file or NumPy
array with an associated wavelength and header.''')
# check dimensions
if data.shape[-1] != wavelength.shape[0]:
raise ValueError('''Third dimension of data cube must have the same length as
wavelength.''')
self.meta = header.copy()
self.wavelength = wavelength
self.data = data
self.cmap = kwargs.get('cmap', hinodexrt)
self._fix_header()
def __repr__(self):
return f'''synthesizAR {type(self).__name__}
-----------------------------------------
Telescope : {self.meta['telescop']}
Instrument : {self.meta['instrume']}
Area : x={self[0].xrange}, y={self[0].yrange}
Dimension : {u.Quantity(self[0].dimensions)}
Scale : {u.Quantity(self[0].scale)}
Wavelength range : {u.Quantity([self.wavelength[0], self.wavelength[-1]])}
Wavelength dimension : {len(self.wavelength)}'''
def __getitem__(self, key):
"""
Overriding indexing. If key is just one index, returns a normal `Map` object. Otherwise,
another `EISCube` object is returned.
"""
if type(self.wavelength[key].value) == np.ndarray and len(self.wavelength[key].value) > 1:
new_meta = self.meta.copy()
new_meta['wavelnth'] = (self.wavelength[key][0].value+self.wavelength[key][-1].value)/2.
return EISCube(data=self.data[:, :, key], header=new_meta,
wavelength=self.wavelength[key])
else:
meta_map2d = self.meta.copy()
meta_map2d['naxis'] = 2
for k in ['naxis3', 'ctype3', 'cunit3', 'cdelt3']:
del meta_map2d[k]
meta_map2d['wavelnth'] = self.wavelength[key].value
tmp_map = Map(self.data[:, :, key], meta_map2d)
tmp_map.plot_settings.update({'cmap': self.cmap})
return tmp_map
def submap(self, bottom_left_corner, top_right_corner):
"""
Crop to spatial area designated by corners
.. warning:: It is faster to crop in wavelength space first and then crop in
coordinate space.
"""
# call submap on each slice in wavelength
new_data = []
for i in range(self.wavelength.shape[0]):
new_data.append(self[i].submap(bottom_left_corner, top_right_corner).data)
new_data = np.stack(new_data, axis=2)*self.data.unit
# fix metadata
new_meta = self[0].submap(bottom_left_corner, top_right_corner).meta.copy()
for key in ['wavelnth', 'naxis3', 'ctype3', 'cunit3', 'cdelt3']:
new_meta[key] = self.meta[key]
return EISCube(data=new_data, header=new_meta, wavelength=self.wavelength)
def __add__(self, x):
"""
Allow EISCubes to be added together
"""
if isinstance(x, EISCube):
assert np.all(self.wavelength == x.wavelength), 'Wavelength ranges must be equal in order to add EISCubes'
key_checks = ['cdelt1', 'cdelt2', 'crpix1', 'crpix2', 'ctype1', 'ctype2', 'crval1',
'crval2']
for k in key_checks:
assert self.meta[k] == x.meta[k], f'{k} keys in metadata do not match'
data = self.data + x.data
else:
# if x is not an instance of EISCube, let numpy/astropy decide whether it can
# be added to the data attribute, e.g. a scalar or some 3D array with
# appropriate units
data = self.data + x
return EISCube(data=data, header=self.meta.copy(), wavelength=self.wavelength)
def __radd__(self, x):
"""
Define reverse addition in the same way as addition.
"""
return self.__add__(x)
def __mul__(self, x):
"""
Allow for multiplication of data in the cube.
"""
x = u.Quantity(x)
data = self.data*x
header = self.meta.copy()
header['bunit'] = (data.unit).to_string()
return EISCube(data=data, header=header, wavelength=self.wavelength)
def __rmul__(self, x):
"""
Define reverse multiplication in the same way as multiplication.
"""
return self.__mul__(x)
def _fix_header(self):
"""
Set any missing keys, reset any broken ones
"""
# assuming y is rows, x is columns
self.meta['naxis1'] = self.data.shape[1]
self.meta['naxis2'] = self.data.shape[0]
self.meta['naxis3'] = self.wavelength.shape[0]
def save(self, filename, use_fits=False, **kwargs):
"""
Save to FITS or HDF5 file. Default is HDF5 because this is faster and produces smaller
files.
"""
if use_fits:
self._save_to_fits(filename, **kwargs)
else:
# change extension for clarity
filename = '.'.join([os.path.splitext(filename)[0], 'h5'])
self._save_to_hdf5(filename, **kwargs)
def _save_to_hdf5(self, filename, **kwargs):
"""
Save to HDF5 file.
"""
dset_save_kwargs = kwargs.get(
'hdf5_save_params', {'compression': 'gzip', 'dtype': np.float32})
with h5py.File(filename, 'x') as hf:
meta_group = hf.create_group('meta')
for key in self.meta:
meta_group.attrs[key] = self.meta[key]
dset_wvl = hf.create_dataset('wavelength', data=self.wavelength.value)
dset_wvl.attrs['unit'] = self.wavelength.unit.to_string()
dset_intensity = hf.create_dataset('intensity', data=self.data, **dset_save_kwargs)
dset_intensity.attrs['unit'] = self.data.unit.to_string()
def _save_to_fits(self, filename, **kwargs):
"""
Save to FITS file
"""
# sanitize header
header = self.meta.copy()
if 'keycomments' in header:
del header['keycomments']
# create table to hold wavelength array
table_hdu = astropy.io.fits.BinTableHDU.from_columns(
[astropy.io.fits.Column(name='wavelength',
format='D',
unit=self.wavelength.unit.to_string(),
array=self.wavelength.value)])
# create image to hold 3D array
image_hdu = astropy.io.fits.PrimaryHDU(np.swapaxes(self.data.value.T, 1, 2),
header=astropy.io.fits.Header(header))
# write to file
hdulist = astropy.io.fits.HDUList([image_hdu, table_hdu])
hdulist.writeto(filename, output_verify='silentfix')
def _restore_from_file(self, filename, **kwargs):
"""
Load from HDF5 or FITS file
"""
use_fits = kwargs.get('use_fits', os.path.splitext(filename)[-1] == '.fits')
use_hdf5 = kwargs.get('use_hdf5', os.path.splitext(filename)[-1] == '.h5')
if use_fits:
data, header, wavelength = self._restore_from_fits(filename)
elif use_hdf5:
data, header, wavelength = self._restore_from_hdf5(filename)
else:
raise ValueError('Cube can only be initialized with a FITS or HDF5 file.')
return data, header, wavelength
def _restore_from_hdf5(self, filename):
"""
Helper to load cube from HDF5 file
"""
header = MetaDict()
with h5py.File(filename, 'r') as hf:
for key in hf['meta'].attrs:
header[key] = hf['meta'].attrs[key]
wavelength = u.Quantity(hf['wavelength'],
get_keys(hf['wavelength'].attrs, ('unit', 'units')))
data = u.Quantity(hf['intensity'], get_keys(hf['intensity'].attrs, ('unit', 'units')))
return data, header, wavelength
def _restore_from_fits(self, filename):
"""
Helper to load cube from FITS file
"""
tmp = astropy.io.fits.open(filename)
header = MetaDict(get_header(tmp)[0])
data = tmp[0].data*u.Unit(header['bunit'])
wavelength = tmp[1].data.field(0)*u.Unit(tmp[1].header['TUNIT1'])
tmp.close()
return np.swapaxes(data.T, 0, 1), header, wavelength
@property
def integrated_intensity(self):
"""
Map of the intensity integrated over wavelength.
"""
tmp = np.dot(self.data, np.gradient(self.wavelength.value))
tmp_meta = self[0].meta.copy()
tmp_meta['wavelnth'] = self.meta['wavelnth']
tmp_meta['bunit'] = (u.Unit(self.meta['bunit'])*self.wavelength.unit).to_string()
tmp_map = Map(tmp, tmp_meta)
tmp_map.plot_settings.update({'cmap': self.cmap})
return tmp_map
| gpl-3.0 | 2,225,193,771,007,478,800 | 38.504098 | 118 | 0.5651 | false |
sridevikoushik31/nova | nova/tests/test_imagebackend.py | 1 | 18280 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Grid Dynamics
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import fixtures
from oslo.config import cfg
from nova import exception
from nova.openstack.common import uuidutils
from nova import test
from nova.tests import fake_libvirt_utils
from nova.tests import fake_processutils
from nova.virt.libvirt import imagebackend
CONF = cfg.CONF
class _ImageTestCase(object):
INSTANCES_PATH = '/instances_path'
def mock_create_image(self, image):
def create_image(fn, base, size, *args, **kwargs):
fn(target=base, *args, **kwargs)
image.create_image = create_image
def setUp(self):
super(_ImageTestCase, self).setUp()
self.flags(disable_process_locking=True,
instances_path=self.INSTANCES_PATH)
self.INSTANCE = {'name': 'instance',
'uuid': uuidutils.generate_uuid()}
self.NAME = 'fake.vm'
self.TEMPLATE = 'template'
self.OLD_STYLE_INSTANCE_PATH = \
fake_libvirt_utils.get_instance_path(self.INSTANCE, forceold=True)
self.PATH = os.path.join(
fake_libvirt_utils.get_instance_path(self.INSTANCE), self.NAME)
# TODO(mikal): rename template_dir to base_dir and template_path
# to cached_image_path. This will be less confusing.
self.TEMPLATE_DIR = os.path.join(CONF.instances_path, '_base')
self.TEMPLATE_PATH = os.path.join(self.TEMPLATE_DIR, 'template')
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.imagebackend.libvirt_utils',
fake_libvirt_utils))
def test_cache(self):
self.mox.StubOutWithMock(os.path, 'exists')
if self.OLD_STYLE_INSTANCE_PATH:
os.path.exists(self.OLD_STYLE_INSTANCE_PATH).AndReturn(False)
os.path.exists(self.TEMPLATE_DIR).AndReturn(False)
os.path.exists(self.PATH).AndReturn(False)
os.path.exists(self.TEMPLATE_PATH).AndReturn(False)
fn = self.mox.CreateMockAnything()
fn(target=self.TEMPLATE_PATH)
self.mox.StubOutWithMock(imagebackend.fileutils, 'ensure_tree')
imagebackend.fileutils.ensure_tree(self.TEMPLATE_DIR)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
self.mock_create_image(image)
image.cache(fn, self.TEMPLATE)
self.mox.VerifyAll()
def test_cache_image_exists(self):
self.mox.StubOutWithMock(os.path, 'exists')
if self.OLD_STYLE_INSTANCE_PATH:
os.path.exists(self.OLD_STYLE_INSTANCE_PATH).AndReturn(False)
os.path.exists(self.TEMPLATE_DIR).AndReturn(True)
os.path.exists(self.PATH).AndReturn(True)
os.path.exists(self.TEMPLATE_PATH).AndReturn(True)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
image.cache(None, self.TEMPLATE)
self.mox.VerifyAll()
def test_cache_base_dir_exists(self):
self.mox.StubOutWithMock(os.path, 'exists')
if self.OLD_STYLE_INSTANCE_PATH:
os.path.exists(self.OLD_STYLE_INSTANCE_PATH).AndReturn(False)
os.path.exists(self.TEMPLATE_DIR).AndReturn(True)
os.path.exists(self.PATH).AndReturn(False)
os.path.exists(self.TEMPLATE_PATH).AndReturn(False)
fn = self.mox.CreateMockAnything()
fn(target=self.TEMPLATE_PATH)
self.mox.StubOutWithMock(imagebackend.fileutils, 'ensure_tree')
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
self.mock_create_image(image)
image.cache(fn, self.TEMPLATE)
self.mox.VerifyAll()
def test_cache_template_exists(self):
self.mox.StubOutWithMock(os.path, 'exists')
if self.OLD_STYLE_INSTANCE_PATH:
os.path.exists(self.OLD_STYLE_INSTANCE_PATH).AndReturn(False)
os.path.exists(self.TEMPLATE_DIR).AndReturn(True)
os.path.exists(self.PATH).AndReturn(False)
os.path.exists(self.TEMPLATE_PATH).AndReturn(True)
fn = self.mox.CreateMockAnything()
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
self.mock_create_image(image)
image.cache(fn, self.TEMPLATE)
self.mox.VerifyAll()
def test_prealloc_image(self):
CONF.set_override('preallocate_images', 'space')
fake_processutils.fake_execute_clear_log()
fake_processutils.stub_out_processutils_execute(self.stubs)
image = self.image_class(self.INSTANCE, self.NAME)
def fake_fetch(target, *args, **kwargs):
return
self.stubs.Set(os.path, 'exists', lambda _: True)
# Call twice to verify testing fallocate is only called once.
image.cache(fake_fetch, self.TEMPLATE_PATH, self.SIZE)
image.cache(fake_fetch, self.TEMPLATE_PATH, self.SIZE)
self.assertEqual(fake_processutils.fake_execute_get_log(),
['fallocate -n -l 1 %s.fallocate_test' % self.PATH,
'fallocate -n -l %s %s' % (self.SIZE, self.PATH),
'fallocate -n -l %s %s' % (self.SIZE, self.PATH)])
class RawTestCase(_ImageTestCase, test.TestCase):
SIZE = 1024
def setUp(self):
self.image_class = imagebackend.Raw
super(RawTestCase, self).setUp()
self.stubs.Set(imagebackend.Raw, 'correct_format', lambda _: None)
def prepare_mocks(self):
fn = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(imagebackend.utils.synchronized,
'__call__')
self.mox.StubOutWithMock(imagebackend.libvirt_utils, 'copy_image')
self.mox.StubOutWithMock(imagebackend.disk, 'extend')
return fn
def test_create_image(self):
fn = self.prepare_mocks()
fn(target=self.TEMPLATE_PATH, image_id=None)
imagebackend.libvirt_utils.copy_image(self.TEMPLATE_PATH, self.PATH)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
image.create_image(fn, self.TEMPLATE_PATH, None, image_id=None)
self.mox.VerifyAll()
def test_create_image_generated(self):
fn = self.prepare_mocks()
fn(target=self.PATH)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
image.create_image(fn, self.TEMPLATE_PATH, None)
self.mox.VerifyAll()
def test_create_image_extend(self):
fn = self.prepare_mocks()
fn(target=self.TEMPLATE_PATH, image_id=None)
imagebackend.libvirt_utils.copy_image(self.TEMPLATE_PATH, self.PATH)
imagebackend.disk.extend(self.PATH, self.SIZE)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
image.create_image(fn, self.TEMPLATE_PATH, self.SIZE, image_id=None)
self.mox.VerifyAll()
def test_correct_format(self):
info = self.mox.CreateMockAnything()
self.stubs.UnsetAll()
self.mox.StubOutWithMock(os.path, 'exists')
self.mox.StubOutWithMock(imagebackend.images, 'qemu_img_info')
os.path.exists(self.PATH).AndReturn(True)
info = self.mox.CreateMockAnything()
info.file_format = 'foo'
imagebackend.images.qemu_img_info(self.PATH).AndReturn(info)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME, path=self.PATH)
self.assertEqual(image.driver_format, 'foo')
self.mox.VerifyAll()
class Qcow2TestCase(_ImageTestCase, test.TestCase):
SIZE = 1024 * 1024 * 1024
def setUp(self):
self.image_class = imagebackend.Qcow2
super(Qcow2TestCase, self).setUp()
self.QCOW2_BASE = (self.TEMPLATE_PATH +
'_%d' % (self.SIZE / (1024 * 1024 * 1024)))
def prepare_mocks(self):
fn = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(imagebackend.utils.synchronized,
'__call__')
self.mox.StubOutWithMock(imagebackend.libvirt_utils,
'create_cow_image')
self.mox.StubOutWithMock(imagebackend.libvirt_utils, 'copy_image')
self.mox.StubOutWithMock(imagebackend.disk, 'extend')
return fn
def test_create_image(self):
fn = self.prepare_mocks()
fn(target=self.TEMPLATE_PATH)
imagebackend.libvirt_utils.create_cow_image(self.TEMPLATE_PATH,
self.PATH)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
image.create_image(fn, self.TEMPLATE_PATH, None)
self.mox.VerifyAll()
def test_create_image_with_size(self):
fn = self.prepare_mocks()
fn(target=self.TEMPLATE_PATH)
self.mox.StubOutWithMock(os.path, 'exists')
self.mox.StubOutWithMock(imagebackend.disk, 'get_disk_size')
if self.OLD_STYLE_INSTANCE_PATH:
os.path.exists(self.OLD_STYLE_INSTANCE_PATH).AndReturn(False)
os.path.exists(self.TEMPLATE_PATH).AndReturn(False)
imagebackend.disk.get_disk_size(self.TEMPLATE_PATH
).AndReturn(self.SIZE)
os.path.exists(self.PATH).AndReturn(False)
imagebackend.libvirt_utils.create_cow_image(self.TEMPLATE_PATH,
self.PATH)
imagebackend.disk.extend(self.PATH, self.SIZE)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
image.create_image(fn, self.TEMPLATE_PATH, self.SIZE)
self.mox.VerifyAll()
def test_create_image_too_small(self):
fn = self.prepare_mocks()
fn(target=self.TEMPLATE_PATH)
self.mox.StubOutWithMock(os.path, 'exists')
self.mox.StubOutWithMock(imagebackend.disk, 'get_disk_size')
if self.OLD_STYLE_INSTANCE_PATH:
os.path.exists(self.OLD_STYLE_INSTANCE_PATH).AndReturn(False)
os.path.exists(self.TEMPLATE_PATH).AndReturn(False)
imagebackend.disk.get_disk_size(self.TEMPLATE_PATH
).AndReturn(self.SIZE)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
self.assertRaises(exception.ImageTooLarge, image.create_image, fn,
self.TEMPLATE_PATH, 1)
self.mox.VerifyAll()
class LvmTestCase(_ImageTestCase, test.TestCase):
VG = 'FakeVG'
TEMPLATE_SIZE = 512
SIZE = 1024
def setUp(self):
self.image_class = imagebackend.Lvm
super(LvmTestCase, self).setUp()
self.flags(libvirt_images_volume_group=self.VG)
self.LV = '%s_%s' % (self.INSTANCE['name'], self.NAME)
self.OLD_STYLE_INSTANCE_PATH = None
self.PATH = os.path.join('/dev', self.VG, self.LV)
self.disk = imagebackend.disk
self.utils = imagebackend.utils
self.libvirt_utils = imagebackend.libvirt_utils
def prepare_mocks(self):
fn = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(self.disk, 'resize2fs')
self.mox.StubOutWithMock(self.libvirt_utils, 'create_lvm_image')
self.mox.StubOutWithMock(self.disk, 'get_disk_size')
self.mox.StubOutWithMock(self.utils, 'execute')
return fn
def _create_image(self, sparse):
fn = self.prepare_mocks()
fn(target=self.TEMPLATE_PATH)
self.libvirt_utils.create_lvm_image(self.VG,
self.LV,
self.TEMPLATE_SIZE,
sparse=sparse)
self.disk.get_disk_size(self.TEMPLATE_PATH
).AndReturn(self.TEMPLATE_SIZE)
cmd = ('qemu-img', 'convert', '-O', 'raw', self.TEMPLATE_PATH,
self.PATH)
self.utils.execute(*cmd, run_as_root=True)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
image.create_image(fn, self.TEMPLATE_PATH, None)
self.mox.VerifyAll()
def _create_image_generated(self, sparse):
fn = self.prepare_mocks()
self.libvirt_utils.create_lvm_image(self.VG, self.LV,
self.SIZE, sparse=sparse)
fn(target=self.PATH, ephemeral_size=None)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
image.create_image(fn, self.TEMPLATE_PATH,
self.SIZE, ephemeral_size=None)
self.mox.VerifyAll()
def _create_image_resize(self, sparse):
fn = self.prepare_mocks()
fn(target=self.TEMPLATE_PATH)
self.libvirt_utils.create_lvm_image(self.VG, self.LV,
self.SIZE, sparse=sparse)
self.disk.get_disk_size(self.TEMPLATE_PATH
).AndReturn(self.TEMPLATE_SIZE)
cmd = ('qemu-img', 'convert', '-O', 'raw', self.TEMPLATE_PATH,
self.PATH)
self.utils.execute(*cmd, run_as_root=True)
self.disk.resize2fs(self.PATH, run_as_root=True)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
image.create_image(fn, self.TEMPLATE_PATH, self.SIZE)
self.mox.VerifyAll()
def test_create_image(self):
self._create_image(False)
def test_create_image_sparsed(self):
self.flags(libvirt_sparse_logical_volumes=True)
self._create_image(True)
def test_create_image_generated(self):
self._create_image_generated(False)
def test_create_image_generated_sparsed(self):
self.flags(libvirt_sparse_logical_volumes=True)
self._create_image_generated(True)
def test_create_image_resize(self):
self._create_image_resize(False)
def test_create_image_resize_sparsed(self):
self.flags(libvirt_sparse_logical_volumes=True)
self._create_image_resize(True)
def test_create_image_negative(self):
fn = self.prepare_mocks()
fn(target=self.TEMPLATE_PATH)
self.libvirt_utils.create_lvm_image(self.VG,
self.LV,
self.SIZE,
sparse=False
).AndRaise(RuntimeError())
self.disk.get_disk_size(self.TEMPLATE_PATH
).AndReturn(self.TEMPLATE_SIZE)
self.mox.StubOutWithMock(self.libvirt_utils, 'remove_logical_volumes')
self.libvirt_utils.remove_logical_volumes(self.PATH)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
self.assertRaises(RuntimeError, image.create_image, fn,
self.TEMPLATE_PATH, self.SIZE)
self.mox.VerifyAll()
def test_create_image_generated_negative(self):
fn = self.prepare_mocks()
fn(target=self.PATH,
ephemeral_size=None).AndRaise(RuntimeError())
self.libvirt_utils.create_lvm_image(self.VG,
self.LV,
self.SIZE,
sparse=False)
self.mox.StubOutWithMock(self.libvirt_utils, 'remove_logical_volumes')
self.libvirt_utils.remove_logical_volumes(self.PATH)
self.mox.ReplayAll()
image = self.image_class(self.INSTANCE, self.NAME)
self.assertRaises(RuntimeError, image.create_image, fn,
self.TEMPLATE_PATH, self.SIZE,
ephemeral_size=None)
self.mox.VerifyAll()
def test_prealloc_image(self):
CONF.set_override('preallocate_images', 'space')
fake_processutils.fake_execute_clear_log()
fake_processutils.stub_out_processutils_execute(self.stubs)
image = self.image_class(self.INSTANCE, self.NAME)
def fake_fetch(target, *args, **kwargs):
return
self.stubs.Set(os.path, 'exists', lambda _: True)
image.cache(fake_fetch, self.TEMPLATE_PATH, self.SIZE)
self.assertEqual(fake_processutils.fake_execute_get_log(), [])
class BackendTestCase(test.TestCase):
INSTANCE = {'name': 'fake-instance',
'uuid': uuidutils.generate_uuid()}
NAME = 'fake-name.suffix'
def get_image(self, use_cow, image_type):
return imagebackend.Backend(use_cow).image(self.INSTANCE,
self.NAME,
image_type)
def _test_image(self, image_type, image_not_cow, image_cow):
image1 = self.get_image(False, image_type)
image2 = self.get_image(True, image_type)
def assertIsInstance(instance, class_object):
failure = ('Expected %s,' +
' but got %s.') % (class_object.__name__,
instance.__class__.__name__)
self.assertTrue(isinstance(instance, class_object), failure)
assertIsInstance(image1, image_not_cow)
assertIsInstance(image2, image_cow)
def test_image_raw(self):
self._test_image('raw', imagebackend.Raw, imagebackend.Raw)
def test_image_qcow2(self):
self._test_image('qcow2', imagebackend.Qcow2, imagebackend.Qcow2)
def test_image_lvm(self):
self.flags(libvirt_images_volume_group='FakeVG')
self._test_image('lvm', imagebackend.Lvm, imagebackend.Lvm)
def test_image_default(self):
self._test_image('default', imagebackend.Raw, imagebackend.Qcow2)
| apache-2.0 | -7,723,475,165,103,364,000 | 37.242678 | 78 | 0.611543 | false |
t3dev/odoo | addons/website_slides/tests/test_statistics.py | 1 | 5815 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import math
from odoo.addons.website_slides.tests import common
from odoo.exceptions import AccessError, UserError
from odoo.tests import tagged
from odoo.tests.common import users
from odoo.tools import mute_logger, float_compare
@tagged('functional')
class TestStatistics(common.SlidesCase):
def setUp(self):
super(TestStatistics, self).setUp()
self.slide_2 = self.env['slide.slide'].sudo(self.user_publisher).create({
'name': 'How To Cook For Humans',
'channel_id': self.channel.id,
'slide_type': 'presentation',
'website_published': True,
'completion_time': 3.0,
})
self.slide_3 = self.env['slide.slide'].sudo(self.user_publisher).create({
'name': 'How To Cook Humans For Humans',
'channel_id': self.channel.id,
'slide_type': 'document',
'website_published': True,
'completion_time': 1.5,
})
@mute_logger('odoo.models')
def test_channel_statistics(self):
channel_publisher = self.channel.sudo(self.user_publisher)
# slide type computation
self.assertEqual(channel_publisher.total_slides, len(channel_publisher.slide_ids))
self.assertEqual(channel_publisher.nbr_infographic, len(channel_publisher.slide_ids.filtered(lambda s: s.slide_type == 'infographic')))
self.assertEqual(channel_publisher.nbr_presentation, len(channel_publisher.slide_ids.filtered(lambda s: s.slide_type == 'presentation')))
self.assertEqual(channel_publisher.nbr_document, len(channel_publisher.slide_ids.filtered(lambda s: s.slide_type == 'document')))
self.assertEqual(channel_publisher.nbr_video, len(channel_publisher.slide_ids.filtered(lambda s: s.slide_type == 'video')))
# slide statistics computation
self.assertEqual(float_compare(channel_publisher.total_time, sum(s.completion_time for s in channel_publisher.slide_ids), 3), 0)
# members computation
self.assertEqual(channel_publisher.members_count, 1)
channel_publisher.action_add_member()
self.assertEqual(channel_publisher.members_count, 1)
channel_publisher._action_add_members(self.user_emp.partner_id)
self.assertEqual(channel_publisher.members_count, 2)
self.assertEqual(channel_publisher.partner_ids, self.user_publisher.partner_id | self.user_emp.partner_id)
@mute_logger('odoo.models')
def test_channel_user_statistics(self):
channel_publisher = self.channel.sudo(self.user_publisher)
channel_publisher.write({
'enroll': 'invite',
})
channel_publisher._action_add_members(self.user_emp.partner_id)
channel_emp = self.channel.sudo(self.user_emp)
slides_emp = (self.slide | self.slide_2).sudo(self.user_emp)
slides_emp.action_set_viewed()
self.assertEqual(channel_emp.completion, 0)
slides_emp.action_set_completed()
channel_emp.invalidate_cache()
self.assertEqual(
channel_emp.completion,
math.ceil(100.0 * len(slides_emp) / len(channel_publisher.slide_ids)))
self.assertFalse(channel_emp.completed)
self.slide_3.sudo(self.user_emp).action_set_completed()
self.assertEqual(channel_emp.completion, 100)
self.assertTrue(channel_emp.completed)
@mute_logger('odoo.models')
def test_channel_user_statistics_complete_check_member(self):
(self.slide | self.slide_2).write({'is_preview': True})
slides_emp = (self.slide | self.slide_2).sudo(self.user_emp)
slides_emp.read(['name'])
with self.assertRaises(UserError):
slides_emp.action_set_completed()
@mute_logger('odoo.models')
def test_channel_user_statistics_view_check_member(self):
(self.slide | self.slide_2).write({'is_preview': True})
slides_emp = (self.slide | self.slide_2).sudo(self.user_emp)
slides_emp.read(['name'])
with self.assertRaises(UserError):
slides_emp.action_set_viewed()
def test_slide_user_statistics(self):
channel_publisher = self.channel.sudo(self.user_publisher)
channel_publisher._action_add_members(self.user_emp.partner_id)
slide_emp = self.slide.sudo(self.user_emp)
self.assertEqual(slide_emp.likes, 0)
self.assertEqual(slide_emp.dislikes, 0)
self.assertEqual(slide_emp.user_vote, 0)
slide_emp.action_like()
self.assertEqual(slide_emp.likes, 1)
self.assertEqual(slide_emp.dislikes, 0)
self.assertEqual(slide_emp.user_vote, 1)
slide_emp.action_dislike()
self.assertEqual(slide_emp.likes, 0)
self.assertEqual(slide_emp.dislikes, 0)
self.assertEqual(slide_emp.user_vote, 0)
slide_emp.action_dislike()
self.assertEqual(slide_emp.likes, 0)
self.assertEqual(slide_emp.dislikes, 1)
self.assertEqual(slide_emp.user_vote, -1)
def test_slide_statistics(self):
channel_publisher = self.channel.sudo(self.user_publisher)
channel_publisher._action_add_members(self.user_emp.partner_id)
self.assertEqual(self.slide.slide_views, 0)
self.assertEqual(self.slide.public_views, 0)
self.slide.write({'public_views': 4})
self.assertEqual(self.slide.slide_views, 0)
self.assertEqual(self.slide.public_views, 4)
self.assertEqual(self.slide.total_views, 4)
slide_emp = self.slide.sudo(self.user_emp)
slide_emp.action_set_viewed()
self.assertEqual(slide_emp.slide_views, 1)
self.assertEqual(slide_emp.public_views, 4)
self.assertEqual(slide_emp.total_views, 5)
| gpl-3.0 | 6,279,237,426,971,799,000 | 42.721805 | 145 | 0.663629 | false |
gonzalolarralde/FakeARMVE | fake_firmware/messages.py | 1 | 8599 | # -*- coding: utf-8 -*-
import platform
import struct
from construct import Container
from construct.core import FieldError
from datetime import datetime
from time import sleep
from zlib import crc32
from constants import *
from structs import *
from helpers import *
MESSAGE_HANDLERS = []
DEBUG = True
def prepare_container(device, command, data="", msg_type=MSG_COMMAND):
return Container(
version=PROTOCOL_VERSION_1,
device=device,
msg_type=msg_type,
size=7 + len(data),
command=command,
data=data)
def debug_msg(*args):
if DEBUG:
print(args)
def message_handler(device, commands):
def message_handler_call(f):
MESSAGE_HANDLERS.append((device, commands, f))
return f
return message_handler_call
def handler_for(device, command):
for handler in MESSAGE_HANDLERS:
valid_commands = handler[1] if isinstance(handler[1], list) else [handler[1]]
if handler[0] == device and command in valid_commands:
return handler[2]
return None
def process_message(common_data, client):
if common_data.msg_type != MSG_ERROR:
print "date", datetime.now()
print "msg_type", common_data.msg_type
print "device", common_data.device
print "command", common_data.command
print "data", string_to_array(common_data.data)
possible_handler = handler_for(common_data.device, common_data.command)
if possible_handler is not None:
possible_handler(common_data, client)
else:
print "command name", "UNKNOWN"
print "-------------"
else:
print "err", common_data.command
print "data", string_to_array(common_data.data)
print "-------------"
# --------------------- #
def send_initialize_ok(client):
res = Container(
response_code=INIT_RESPONSE_OK,
protocol_size=1,
protocols=[1],
model=string_to_array("RogelitoEV "),
serial_number=string_to_array("12345678"),
build=string_to_array("123"),
watchdog=0,
free_ram=65535,
free_print_mem=65535,
free_page_mem=65535,
machine_type=1)
cmd = prepare_container(DEV_AGENT, CMD_AGENT_INIT, struct_initialize_ok.build(res))
client.send(cmd)
def send_tags_list(tags, client, as_event = False):
res = Container(
number = len(tags),
serial_number = [string_to_array(x) for x in tags],
reception_level = [[100] for x in tags])
cmd = prepare_container(DEV_RFID, CMD_RFID_GET_TAGS if not as_event else EVT_RFID_NEW_TAG, \
struct_tags_list.build(res), MSG_COMMAND if not as_event else MSG_EV_PUB)
client.send(cmd)
def send_block_data(tag, block_from, block_qty, multi_block, client):
blocks_to_send = [Container(bytes=x) for x in tag["blocks"][block_from:block_qty+1]] # OOPS, for some reason the service expects one additional block to be sent, to compute CRC
if not multi_block:
cmd = prepare_container(DEV_RFID, CMD_RFID_READ_BLOCK, struct_rfid_block.build (blocks_to_send[0]))
else:
cmd = prepare_container(DEV_RFID, CMD_RFID_READ_BLOCKS, struct_rfid_blocks.build (blocks_to_send))
client.send(cmd)
def send_printer_status(paper_out_1, paper_out_2, lever_open, msg_type, command, client):
cmd = prepare_container(DEV_PRINTER, command, \
struct_printer_get_status.build(Container(paper_out_1=paper_out_1, paper_out_2=paper_out_2, lever_open=lever_open)), \
msg_type)
client.send(cmd)
def send_paper_remove(client):
cmd = prepare_container(DEV_PRINTER, CMD_PRINTER_PAPER_REMOVE, "", MSG_EV_PUB)
client.send(cmd)
# --------------------- #
@message_handler(DEV_AGENT, CMD_AGENT_INIT)
def _(common_data, client):
print "command name", "AAAA CMD_AGENT_INIT"
send_initialize_ok(client)
@message_handler(DEV_PRINTER, CMD_PRINTER_GET_STATUS)
def _(common_data, client):
print "command name", "CMD_PRINTER_GET_STATUS"
current_printer_status = client.current_printer_status
send_printer_status(current_printer_status[0], current_printer_status[1], current_printer_status[2], \
MSG_COMMAND, CMD_PRINTER_GET_STATUS, client)
@message_handler(DEV_RFID, CMD_RFID_GET_TAGS)
def _(common_data, client):
print "command name", "CMD_RFID_GET_TAGS"
send_tags_list(client.current_tags, client)
@message_handler(DEV_RFID, CMD_RFID_READ_BLOCK)
def _(common_data, client):
print "command name", "CMD_RFID_READ_BLOCK"
x = struct_read_block.parse(common_data.data)
print "serial_number", x.serial_number
print "block", x.block
send_block_data(client.get_tag(array_to_string(x.serial_number)), x.block, 1, False, client)
@message_handler(DEV_RFID, CMD_RFID_READ_BLOCKS)
def _(common_data, client):
print "command name", "CMD_RFID_READ_BLOCKS"
x = struct_read_blocks.parse(common_data.data)
print "serial_number", x.serial_number
print "block", x.block
print "number", x.number
# ToDo: Fix - For some reason I'm reading a block less than the number sent by the service
send_block_data(client.get_tag(array_to_string(x.serial_number)), x.block, x.number+1, True, client)
@message_handler(DEV_PRINTER, CMD_PRINTER_PAPER_REMOVE)
def _(common_data, client):
print "command name", "CMD_PRINTER_PAPER_REMOVE"
client.current_printer_status = [0,0,0]
send_printer_status(client.current_printer_status[0], client.current_printer_status[1], client.current_printer_status[2], \
MSG_COMMAND, CMD_PRINTER_PAPER_REMOVE, client)
client.printer_ejected()
@message_handler(DEV_RFID, CMD_RFID_WRITE_BLOCK)
def _(common_data, client):
print "command name", "CMD_RFID_WRITE_BLOCK"
x = struct_write_block.parse(common_data.data)
print "serial_number", array_to_string(x.serial_number)
print "block", x.block
print "bytes", x.rfid_block.bytes
client.write_tag(array_to_string(x.serial_number), x.block, [x.rfid_block.bytes])
client.send(prepare_container(common_data.device, common_data.command))
@message_handler(DEV_RFID, CMD_RFID_WRITE_BLOCKS)
def _(common_data, client):
print "command name", "CMD_RFID_WRITE_BLOCKS"
x = struct_write_blocks.parse(common_data.data)
print "serial_number", array_to_string(x.serial_number)
print "block", x.block
print "number", x.number
print "bytes", [i.bytes for i in x.rfid_block]
client.write_tag(array_to_string(x.serial_number), x.block, [i.bytes for i in x.rfid_block])
client.send(prepare_container(common_data.device, common_data.command))
@message_handler(DEV_RFID, CMD_RFID_SET_RO_BLOCK)
def _(common_data, client):
print "command name", "CMD_RFID_SET_RO_BLOCK"
x = struct_read_block.parse(common_data.data)
print "serial_number", array_to_string(x.serial_number)
print "block", x.block
client.mark_tag_ro_blocks(array_to_string(x.serial_number), x.block, 1)
@message_handler(DEV_RFID, CMD_RFID_SET_RO_BLOCKS)
def _(common_data, client):
print "command name", "CMD_RFID_SET_RO_BLOCKS"
x = struct_read_blocks.parse(common_data.data)
print "serial_number", array_to_string(x.serial_number)
print "block", x.block
print "number", x.number
client.mark_tag_ro_blocks(array_to_string(x.serial_number), x.block, x.number)
@message_handler(DEV_RFID, CMD_RFID_IS_READONLY)
def _(common_data, client):
print "command name", "CMD_RFID_IS_READONLY"
x = struct_read_blocks.parse(common_data.data)
print "serial_number", array_to_string(x.serial_number)
print "block", x.block
print "number", x.number
ro_blocks = client.get_tag(x.serial_number)["ro_blocks"]
security_data = struct_security_status.build(Container(byte=[1 if x in ro_blocks else 0 for x in range(x.block, x.number)]))
client.send(prepare_container(common_data.device, common_data.command, security_data))
@message_handler(DEV_PRINTER, CMD_PRINTER_CLEAR_BUFFER)
def _(common_data, client):
client.reset_printer_buffer()
@message_handler(DEV_PRINTER, [CMD_PRINTER_LOAD_COMP_BUFFER, CMD_PRINTER_LOAD_BUFFER])
def _(common_data, client):
x = struct_print_buffer.parse(common_data.data)
if x.clear_buffer > 0:
client.reset_printer_buffer()
# print len(data), len(x.stream), size, x.size
stream_data = x.stream
if common_data.command == CMD_PRINTER_LOAD_COMP_BUFFER: # Expand the data if it compressed
stream_data = expand_printer_data(stream_data)
client.add_data_to_printer_buffer(stream_data)
if x.do_print > 0:
client.do_print()
| agpl-3.0 | 1,663,717,867,268,342,800 | 34.979079 | 180 | 0.674148 | false |
justas-/pyledbat | pyledbat/ledbat/baseledbat.py | 1 | 6716 | """
Copyright 2017, J. Poderys, Technical University of Denmark
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
This is a base implementation of LEDBAT following the [RFC6817] for LEDBAT
specification. This file is not enough on its own, and must be extended to
gate the sending. An example of such extending is provided by simpleledbat
implementation and in the test application.
"""
import time
import datetime
import math
import logging
class BaseLedbat(object):
"""Base class with constante defined"""
CURRENT_FILTER = 8 # Number of elements in current delay filter
BASE_HISTORY = 10 # Number of elements in base delay history
INIT_CWND = 2 # Number of MSSes in initial cwnd value
MSS = 1500 # Maximum segment size
TARGET = 50 # Target in milliseconds. Per [RFC6817] must be <= 100ms
GAIN = 1 # Congestion window to delay response rate
ALLOWED_INCREASE = 1
MIN_CWND = 2
def __init__(self, **kwargs):
"""Initialize the instance"""
self._current_delays = BaseLedbat.CURRENT_FILTER * [1000000]
self._base_delays = BaseLedbat.BASE_HISTORY * [float('inf')]
self._flightsize = 0
self._cwnd = BaseLedbat.INIT_CWND * BaseLedbat.MSS # Congestion window
self._last_rollover = time.time() # Time last base-delay rollover occured
self._cto = 1 # Congestion timeout (seconds)
self._queuing_delay = 0
self._rtt = None # Round Trip Time
self._last_data_loss = 0 # When was latest dataloss event observed
self._last_ack_received = None # When was the last ACK received
# Change defaults if given:
for key, value in kwargs.items():
if key == 'set_current_filter':
BaseLedbat.CURRENT_FILTER = value
elif key == 'set_base_history':
BaseLedbat.BASE_HISTORY = value
elif key == 'set_init_cwnd':
BaseLedbat.INIT_CWND = value
elif key == 'set_mss':
BaseLedbat.MSS = value
elif key == 'set_target':
BaseLedbat.TARGET = value
elif key == 'set_gain':
BaseLedbat.GAIN = value
elif key == 'set_allowed_increase':
BaseLedbat.ALLOWED_INCREASE = value
elif key == 'set_min_cwnd':
BaseLedbat.MIN_CWND = value
else:
# Fall through option so logging is not done
continue
logging.info('LEDBAT parameter changed: %s => %s', key, value)
def _ack_received(self, bytes_acked, ow_delays, rtt_delays):
"""Parse the received delay sample(s)
delays is milliseconds, rt_measurements in seconds!
"""
# Update time of last ACK
self._last_ack_received = time.time()
# Process all received delay samples
for delay_sample in ow_delays:
self._update_base_delay(delay_sample)
self._update_current_delay(delay_sample)
# Update values
self._queuing_delay = self._filter_alg(self._current_delays) - min(self._base_delays)
off_target = (BaseLedbat.TARGET - self._queuing_delay) / BaseLedbat.TARGET
self._cwnd += int(BaseLedbat.GAIN * off_target * bytes_acked * BaseLedbat.MSS / self._cwnd)
max_allowed_cwnd = self._flightsize + BaseLedbat.ALLOWED_INCREASE * BaseLedbat.MSS
self._cwnd = min([self._cwnd, max_allowed_cwnd])
self._cwnd = max([self._cwnd, BaseLedbat.MIN_CWND * BaseLedbat.MSS])
self._flightsize = max([0, self._flightsize - bytes_acked])
self._update_cto(rtt_delays)
def data_loss(self, will_retransmit=True, loss_size=None):
"""Reduce cwnd if data loss is experienced"""
# Get the current time
t_now = time.time()
if loss_size is None:
loss_size = BaseLedbat.MSS
# Prevent calling too often
if self._last_data_loss != 0:
if t_now - self._last_data_loss < self._rtt:
# At most once per RTT
return
# Save time when last dataloss event happened
self._last_data_loss = t_now
# Reduce the congestion window size
self._cwnd = min([
self._cwnd,
int(max([self._cwnd / 2, BaseLedbat.MIN_CWND * BaseLedbat.MSS]))
])
# Account for data in-flight
if not will_retransmit:
self._flightsize = self._flightsize - loss_size
def _no_ack_in_cto(self):
"""Update CWND if no ACK was received in CTO"""
self._cwnd = 1 * BaseLedbat.MSS
self._cto = 2 * self._cto
def _update_cto(self, rtt_values):
"""Calculate congestion timeout (CTO)"""
pass
def _filter_alg(self, filter_data):
"""Implements FILTER() algorithm"""
# Implemented per [RFC6817] MIN filter over a small window
# multiplied by -1 to get latest window_size values
window_size = -1 * math.ceil(self.BASE_HISTORY/4)
return min(filter_data[window_size:])
def _update_base_delay(self, delay):
"""Update value in base_delay tracker list"""
t_now = time.time()
# Implemented per [RFC6817]
minute_now = datetime.datetime.fromtimestamp(t_now).minute
minute_then = datetime.datetime.fromtimestamp(self._last_rollover).minute
if minute_now != minute_then:
# Shift value at next minute
self._last_rollover = t_now
self._base_delays = self._base_delays[1:]
self._base_delays.append(delay)
else:
# For each measurements during the same minute keep minimum value
# at the end of the list
self._base_delays[-1] = min([self._base_delays[-1], delay])
def _update_current_delay(self, delay):
"""Add new value to the current delays list"""
# Implemented per [RFC6817]
self._current_delays = self._current_delays[1:]
self._current_delays.append(delay)
| apache-2.0 | 1,560,566,274,252,504,300 | 38.274854 | 99 | 0.599315 | false |
xdlinux/xidian-scripts | Python/get_xidian_news.py | 1 | 4476 | # Copyright (C) 2020 by the XiDian Open Source Community.
#
# This file is part of xidian-scripts.
#
# xidian-scripts is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# xidian-scripts is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with xidian-scripts. If not, see <http://www.gnu.org/licenses/>.
'''
爬取西电新闻网首页新闻
python3.x + request + pyquery
'''
import requests as rq
from pyquery import PyQuery as pq
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36'}
root_url = 'https://news.xidian.edu.cn/'
news_dic = []
def req(url):
#获取页面函数
response = rq.get(url, headers=headers)
response.encoding = 'utf-8'
html = response.text
doc = pq(html)
return doc
def extract(doc):
'''
提取首页url函数
doc1是banner头条新闻部分,banner下面有三栏,
doc2是第一栏工作动态
doc3是第二栏是全网新闻
第三栏是热点新闻,调用API方法所以另写函数
其他页面没啥意思,如果想写的话可以自己写,parse可以通用
'''
urls = []
doc1 = doc('.content1_left')
doc1('.content1_left_top tbody tr:first-child').remove()
doc1('.content1_left_bottom_top').remove()
for url in doc1('a').items():
urls.append(url.attr.href)
doc2 = doc('.gzdt_bottom ul')
for url in doc2('li a:last-child').items():
urls.append(url.attr.href)
doc3 = doc('.mtxd_bottom')
for url in doc3('a').items():
if(url.attr.href[0]=='i'):
urls.append(url.attr.href)
dic4 = get_hot_news()
for dic in dic4:
urls.append(dic['linkurl'])
return urls
def parse(url):
#子页面处理函数
doc = req(root_url + url)
doc('#wz_zw img').remove()
doc('#wz_zw span').remove()
tag = doc('.yaowen-a').text()
title = doc('.neirong-bt').text()
date = doc('#date').text()[5:21] #发布时间:2020-12-01 08:52:41 自行调整切片
source = doc('#from').text()
author = doc('.editor').text() #责任编辑:XXX 自行调整切片
content = doc('#wz_zw p').text() #如果需要换行符请手写re,默认段与段直接以空格间隔
#首个图片的链接
if doc('.img_vsb_content').attr.src:
picurl = root_url[0:-1] + doc('.img_vsb_content').attr.src
else:
picurl = ''
news_dic.append(dict(zip(["tag", "title", "date", "author", "content", "picurl"],
[ tag , title , date , author, content , picurl ])))
def get_hot_news():
#因为这个热点新闻是调用API获取的,所以另写函数
data = {
'owner':'1271716923',
'treeid':'1001',
'viewid':'189460',
'mode':'10',
'locale':'zh_CN',
'pageUrl':'%2Findex.htm',
'uniqueId':'u38',
'actionmethod':'getnewslist'
}
json_raw = rq.post('https://news.xidian.edu.cn/system/resource/js/news/hotdynpullnews.jsp',data=data)
return eval(json_raw.text)
if __name__ == '__main__':
doc = req(root_url)
urls = extract(doc)
#爱护学校服务器,测试请取urls切片
for url in urls[25:30]:
parse(url)
print(news_dic)
'''
输出格式示例
[{
'tag': '西电要闻',
'title': '西电举办第五届“三好三有”研究生导学团队评审会',
'date': '2020-11-30 09:38',
'author': ' 责任编辑:冯毓璇',
'content': '西电新闻网讯(通讯员 霍学浩 高宇星)11月27日下午,西安电子科技大学第五届“三好三有”研究 生导学团队评审会在北校区大礼堂举行...',
'picurl': 'https://news.xidian.edu.cn/__local/F/9A/57/DD2D65A251C04AE5C33ADA469B3_E66B88F8_4CA34.jpg'
}, {
'tag': '西电要闻',
'title': '师德标兵|秦枫:知行合一的“大先生”',
'date': '2020-12-01 10:26',
'author': '责任编辑:冯毓璇',
'content': ' ■学生记者 彭怡乐 宫懿伦 赵晨晋 顾启宇 自1992年任教以来,秦枫已在三尺讲台上辛勤耕耘了28年,她精进自身、知行合一、严谨治学...',
'picurl': 'https://news.xidian.edu.cn/__local/E/EC/25/D514D9A10754ADA29CCDB064439_93C52D97_7C020.jpg'
}]
'''
| lgpl-3.0 | -4,134,377,156,185,125,400 | 24.333333 | 141 | 0.669441 | false |
dutradda/myreco | tests/integration/engine_objects/test_engine_objects_integration.py | 1 | 27200 | # MIT License
# Copyright (c) 2016 Diogo Dutra <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import asyncio
import tempfile
from datetime import datetime
from time import sleep
from unittest import mock
from swaggerit.models._base import _all_models
from tests.integration.fixtures import TopSellerArrayTest
import pytest
import ujson
@pytest.fixture
def init_db(models, session, api):
user = {
'name': 'test',
'email': 'test',
'password': 'test',
'admin': True
}
session.loop.run_until_complete(models['users'].insert(session, user))
tmp = tempfile.TemporaryDirectory()
store = {
'name': 'test',
'country': 'test',
'configuration': {}
}
session.loop.run_until_complete(models['stores'].insert(session, store))
item_type = {
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'stores': [{'id': 1}]
}
session.loop.run_until_complete(models['item_types'].insert(session, item_type))
strategy = {
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest'
}
session.loop.run_until_complete(models['engine_strategies'].insert(session, strategy))
engine_object = {
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}
session.loop.run_until_complete(models['engine_objects'].insert(session, engine_object))
yield tmp.name
tmp.cleanup()
_all_models.pop('store_items_products_1', None)
class TestEngineObjectsModelPost(object):
async def test_post_without_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.post('/engine_objects/', headers=headers)
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is missing'}
async def test_post_with_invalid_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.post('/engine_objects/', headers=headers, data='[{}]')
assert resp.status == 400
assert (await resp.json()) == {
'message': "'name' is a required property. "\
"Failed validating instance['0'] for schema['items']['required']",
'schema': {
'type': 'object',
'additionalProperties': False,
'required': ['name', 'type', 'configuration', 'strategy_id', 'item_type_id', 'store_id'],
'properties': {
'name': {'type': 'string'},
'type': {'type': 'string'},
'strategy_id': {'type': 'integer'},
'item_type_id': {'type': 'integer'},
'store_id': {'type': 'integer'},
'configuration': {}
}
}
}
async def test_post(self, init_db, client, headers, headers_without_content_type):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers=headers, data=ujson.dumps(body))
resp_json = (await resp.json())
body[0]['id'] = 2
body[0]['store'] = resp_json[0]['store']
body[0]['strategy'] = resp_json[0]['strategy']
body[0]['item_type'] = resp_json[0]['item_type']
assert resp.status == 201
assert resp_json == body
async def test_post_with_invalid_grant(self, client):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers={'Authorization': 'invalid'}, data=ujson.dumps(body))
assert resp.status == 401
assert (await resp.json()) == {'message': 'Invalid authorization'}
class TestEngineObjectsModelGet(object):
async def test_get_not_found(self, init_db, headers_without_content_type, client):
client = await client
resp = await client.get(
'/engine_objects/?store_id=2&item_type_id=1&strategy_id=1',
headers=headers_without_content_type
)
assert resp.status == 404
async def test_get_invalid_with_body(self, init_db, headers, client):
client = await client
resp = await client.get(
'/engine_objects/?store_id=1&item_type_id=1&strategy_id=1',
headers=headers,
data='{}'
)
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_get_valid(self, init_db, headers, headers_without_content_type, client):
body = [{
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {"days_interval": 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'id': 1,
'store': {
'id': 1,
'name': 'test',
'country': 'test',
'configuration': {}
},
'item_type': {
'id': 1,
'store_items_class': None,
'stores': [{
'configuration': {},
'country': 'test',
'id': 1,
'name': 'test'
}],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
},
'strategy': {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
}]
client = await client
resp = await client.get(
'/engine_objects/?store_id=1&item_type_id=1&strategy_id=1',
headers=headers_without_content_type
)
assert resp.status == 200
assert await resp.json() == body
class TestEngineObjectsModelUriTemplatePatch(object):
async def test_patch_without_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.patch('/engine_objects/1/', headers=headers, data='')
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is missing'}
async def test_patch_with_invalid_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.patch('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert (await resp.json()) == {
'message': '{} does not have enough properties. '\
"Failed validating instance for schema['minProperties']",
'schema': {
'type': 'object',
'additionalProperties': False,
'minProperties': 1,
'properties': {
'name': {'type': 'string'},
'configuration': {}
}
}
}
async def test_patch_with_invalid_config(self, init_db, client, headers, headers_without_content_type):
client = await client
body = {
'configuration': {}
}
resp = await client.patch('/engine_objects/1/', headers=headers, data=ujson.dumps(body))
assert resp.status == 400
print(ujson.dumps(await resp.json(), indent=4))
assert (await resp.json()) == {
'message': "'days_interval' is a required property. "\
"Failed validating instance for schema['required']",
'schema': {
'type': 'object',
'required': ['days_interval'],
'additionalProperties': False,
'properties': {
'days_interval': {'type': 'integer'}
}
}
}
async def test_patch_not_found(self, init_db, client, headers, headers_without_content_type):
client = await client
body = {
'name': 'Top Seller Object Test'
}
resp = await client.patch('/engine_objects/2/', headers=headers, data=ujson.dumps(body))
assert resp.status == 404
async def test_patch(self, init_db, client, headers, headers_without_content_type):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers=headers, data=ujson.dumps(body))
obj = (await resp.json())[0]
body = {
'name': 'test2'
}
resp = await client.patch('/engine_objects/2/', headers=headers, data=ujson.dumps(body))
obj['name'] = 'test2'
assert resp.status == 200
assert (await resp.json()) == obj
class TestEngineObjectsModelUriTemplateGet(object):
async def test_get_with_body(self, init_db, headers, client):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_get_not_found(self, init_db, headers_without_content_type, client):
client = await client
resp = await client.get('/engine_objects/2/', headers=headers_without_content_type)
assert resp.status == 404
async def test_get(self, init_db, headers, headers_without_content_type, client):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
body = {
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {"days_interval": 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'id': 1,
'store': {
'id': 1,
'name': 'test',
'country': 'test',
'configuration': {}
},
'item_type': {
'id': 1,
'store_items_class': None,
'stores': [{
'configuration': {},
'country': 'test',
'id': 1,
'name': 'test'
}],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
},
'strategy': {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
}
assert resp.status == 200
assert await resp.json() == body
class TestEngineObjectsModelUriTemplateDelete(object):
async def test_delete_with_body(self, init_db, client, headers):
client = await client
resp = await client.delete('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is not acceptable'}
async def test_delete_valid(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 200
resp = await client.delete('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 204
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 404
def datetime_mock():
mock_ = mock.MagicMock()
mock_.now.return_value = datetime(1900, 1, 1)
return mock_
async def _wait_job_finish(client, headers_without_content_type, job_name='export'):
sleep(0.05)
while True:
resp = await client.get(
'/engine_objects/1/{}?job_hash=6342e10bd7dca3240c698aa79c98362e'.format(job_name),
headers=headers_without_content_type)
if (await resp.json())['status'] != 'running':
break
return resp
def set_patches(monkeypatch):
monkeypatch.setattr('swaggerit.models.orm._jobs_meta.random.getrandbits',
mock.MagicMock(return_value=131940827655846590526331314439483569710))
monkeypatch.setattr('swaggerit.models.orm._jobs_meta.datetime', datetime_mock())
class TestEngineObjectsModelsDataImporter(object):
async def test_importer_post(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
resp = await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
assert resp.status == 201
assert await resp.json() == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
await _wait_job_finish(client, headers_without_content_type, 'import_data')
async def test_importer_get_running(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await client.get('/engine_objects/1/import_data?job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type, 'import_data')
async def test_importer_get_done(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type, 'import_data')
assert await resp.json() == {
'status': 'done',
'result': {'lines_count': 3},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_importer_get_with_error(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
monkeypatch.setattr('tests.integration.fixtures.TopSellerArrayTest.get_data',
mock.MagicMock(side_effect=Exception('testing')))
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type, 'import_data')
assert await resp.json() == {
'status': 'error',
'result': {'message': 'testing', 'name': 'Exception'},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def _post_products(client, headers, headers_without_content_type, products=[{'sku': 'test'}]):
resp = await client.post('/item_types/1/items?store_id=1',
data=ujson.dumps(products), headers=headers)
resp = await client.post('/item_types/1/update_filters?store_id=1',
headers=headers_without_content_type)
sleep(0.05)
while True:
resp = await client.get(
'/item_types/1/update_filters?store_id=1&job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
if (await resp.json())['status'] != 'running':
break
return resp
def set_readers_builders_patch(monkeypatch, values=None):
if values is None:
values = [[ujson.dumps({'value': 1, 'item_key': 'test'}).encode()]]
readers_builder = values
mock_ = mock.MagicMock()
mock_.return_value = readers_builder
monkeypatch.setattr(
'myreco.engine_objects.object_base.EngineObjectBase._build_csv_readers',
mock_
)
class TestEngineObjectsModelsObjectsExporter(object):
async def test_exporter_post(self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
set_readers_builders_patch(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
resp = await client.post('/engine_objects/1/export', headers=headers_without_content_type)
assert await resp.json() == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_running(self, init_db, headers_without_content_type, headers, client, monkeypatch, loop):
set_patches(monkeypatch)
prods = [ujson.dumps({'value': i, 'item_key': 'test{}'.format(i)}).encode() for i in range(100)]
set_readers_builders_patch(monkeypatch, [[b'\n'.join(prods)]])
client = await client
products = [{'sku': 'test{}'.format(i)} for i in range(10)]
await _post_products(client, headers, headers_without_content_type, products)
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e', headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_done(self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type)
assert await resp.json() == {
'status': 'done',
'result': {'length': 1, 'max_sells': 1, 'min_sells': 1},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error(
self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch, [])
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {
'message': "No data found for engine object 'Top Seller Object'",
'name': 'EngineError'
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
def CoroMock():
coro = mock.MagicMock(name="CoroutineResult")
corofunc = mock.MagicMock(name="CoroutineFunction", side_effect=asyncio.coroutine(coro))
corofunc.coro = coro
return corofunc
def set_data_importer_patch(monkeypatch, mock_=None):
if mock_ is None:
mock_ = mock.MagicMock()
monkeypatch.setattr('tests.integration.fixtures.TopSellerArrayTest.get_data', mock_)
return mock_
class TestEngineObjectsModelsObjectsExporterWithImport(object):
async def test_exporter_post_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
get_data_patch = set_data_importer_patch(monkeypatch)
get_data_patch.return_value = {}
resp = await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
hash_ = await resp.json()
await _wait_job_finish(client, headers_without_content_type)
called = bool(TopSellerArrayTest.get_data.called)
TopSellerArrayTest.get_data.reset_mock()
assert hash_ == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
assert called
async def test_exporter_get_running_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
def func(x, y, z):
sleep(1)
return {}
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
set_data_importer_patch(monkeypatch, func)
await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_done_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
assert await resp.json() == {
'status': 'done',
'result': {
'importer': {'lines_count': 3},
'exporter': {
'length': 1,
'max_sells': 1,
'min_sells': 1
}
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error_in_import_with_import(
self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
get_data_patch = set_data_importer_patch(monkeypatch)
get_data_patch.side_effect = Exception('testing')
await client.post('/engine_objects/1/export?import_data=true', headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e', headers=headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {'message': 'testing', 'name': 'Exception'},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error_in_export_with_import(
self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch, [])
await client.post('/engine_objects/1/export?import_data=true', headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e', headers=headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {
'message': "No data found for engine object 'Top Seller Object'",
'name': 'EngineError'
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
| mit | 4,970,314,107,579,558,000 | 36.935844 | 127 | 0.575846 | false |
ashleywaite/django-more | django_enum/operations.py | 1 | 17196 |
from enum import Enum
from operator import attrgetter
from django.db import models
from django.db.models import sql
from django.db.models.deletion import Collector
from django.utils import six
from django_types.operations import CustomTypeOperation
from .fields import EnumField
"""
Use a symbol = value style as per Enum expectations.
Where a value is the human readable or sensible value, and the symbol is the
constant or programming flag to use.
For readbility of the database values, the human readable values are used.
"""
class EnumState:
@classmethod
def values(cls):
return [em.value for em in cls]
@classmethod
def values_set(cls):
return set(cls.values())
def enum_state(values, name=None, app_label=None):
""" Create an EnumState representing the values or Enum """
if isinstance(values, type) and issubclass(values, Enum):
if not name:
name = values.__name__
values = (em.value for em in values)
elif not name:
name = 'Unnamed Enum'
e = Enum(name, [(v, v) for v in values], type=EnumState)
e.Meta = type('Meta', (object,), {})
e.Meta.app_label = app_label
return e
class SQLCollector(Collector):
""" Collector that generates the required deletion SQL instead of performing it """
def as_sql(self):
""" Generate SQL queries that perform related deletion """
# List of (sql, params) tuples to perform deletion
query_list = []
for model, instances in self.data.items():
self.data[model] = sorted(instances, key=attrgetter("pk"))
self.sort()
# Do not send pre_delete signals as in .delete()
# Fast deletes
for qs in self.fast_deletes:
# TODO Check for any potential caveats from complex queries - assume none are generated by Collector
# Clone queryset into DeleteQuery to use .as_sql()
query_list.append(qs.query.clone(klass=sql.DeleteQuery).get_compiler(self.using).as_sql())
# update fields
for model, instances_for_fieldvalues in six.iteritems(self.field_updates):
query = sql.UpdateQuery(model)
for (field, value), instances in six.iteritems(instances_for_fieldvalues):
query.add_update_values({field.name: value})
query.add_q(models.Q(pk__in=[obj.pk for obj in instances]))
query_list.append(query.get_compiler(using=self.using).as_sql())
# reverse instance collections
for instances in six.itervalues(self.data):
instances.reverse()
# delete instances
for model, instances in six.iteritems(self.data):
query = sql.DeleteQuery(model)
pk_list = [obj.pk for obj in instances]
query.where = query.where_class()
query.add_q(models.Q(pk__in=pk_list))
query_list.append(query.get_compiler(using=self.using).as_sql())
# Do not update instances as in .delete()
return query_list
class EnumOperation(CustomTypeOperation):
field_type = EnumField
class CreateEnum(EnumOperation):
def __init__(self, db_type, values):
# Values follow Enum functional API options to specify
self.db_type = db_type
self.values = values
def describe(self):
return 'Create enum type {db_type}'.format(db_type=self.db_type)
def state_forwards(self, app_label, state):
enum = enum_state(self.values, name=self.db_type, app_label=app_label)
state.add_type(self.db_type, enum)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.features.requires_enum_declaration:
enum = to_state.db_types[self.db_type]
sql = schema_editor.sql_create_enum % {
'enum_type': self.db_type,
'values': ', '.join(['%s'] * len(enum))}
schema_editor.execute(sql, enum.values())
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.features.requires_enum_declaration:
sql = schema_editor.sql_delete_enum % {
'enum_type': self.db_type}
schema_editor.execute(sql)
class RemoveEnum(EnumOperation):
def __init__(self, db_type):
self.db_type = db_type
def describe(self):
return 'Remove enum type {db_type}'.format(db_type=self.db_type)
def state_forwards(self, app_label, state):
# TODO Add dependency checking and cascades
state.remove_type(self.db_type)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.features.requires_enum_declaration:
sql = schema_editor.sql_delete_enum % {
'enum_type': self.db_type}
schema_editor.execute(sql)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.features.requires_enum_declaration:
enum = to_state.db_types[self.db_type]
sql = schema_editor.sql_create_enum % {
'enum_type': self.db_type,
'values': ', '.join(['%s'] * len(enum))}
schema_editor.execute(sql, enum.values())
class RenameEnum(EnumOperation):
def __init__(self, old_type, new_type):
self.old_db_type = old_type
self.db_type = new_type
def describe(self):
return 'Rename enum type {old} to {new}'.format(
old=self.old_db_type,
new=self.db_type)
def state_forwards(self, app_label, state):
old_enum = state.db_types[self.old_db_type]
enum = enum_state(old_enum, name=self.db_type, app_label=app_label)
state.remove_type(self.old_db_type)
state.add_type(self.db_type, enum)
# Update all fields using this enum
for info in self.get_fields(state, db_type=self.old_db_type):
changed_field = info.field.clone()
changed_field.type_name = self.db_type
info.model_state.fields[info.field_index] = (info.field_name, changed_field)
state.reload_model(info.model_app_label, info.model_name)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.features.requires_enum_declaration:
sql = schema_editor.sql_rename_enum % {
'old_type': self.old_db_type,
'enum_type': self.db_type}
schema_editor.execute(sql)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.old_db_type, self.db_type = self.db_type, self.old_db_type
self.database_forwards(app_label, schema_editor, from_state, to_state)
self.old_db_type, self.db_type = self.db_type, self.old_db_type
class AlterEnum(EnumOperation):
temp_db_type = 'django_enum_temp'
transition_db_type = 'django_enum_transition'
def __init__(self, db_type, add_values=None, remove_values=None, on_delete=models.PROTECT):
self.db_type = db_type
self.add_values = set(add_values or ())
self.remove_values = set(remove_values or ())
self.on_delete = on_delete
def describe(self):
return 'Alter enum type {db_type},{added}{removed}'.format(
db_type=self.db_type,
added=' added {} value(s)'.format(len(self.add_values)) if self.add_values else '',
removed=' removed {} value(s)'.format(len(self.remove_values)) if self.remove_values else '')
def state_forwards(self, app_label, state):
from_enum = state.db_types[self.db_type]
to_enum = enum_state((from_enum.values_set() | self.add_values) - self.remove_values, name=self.db_type, app_label=app_label)
state.add_type(self.db_type, to_enum)
# Update all fields using this enum
for info in self.get_fields(state):
changed_field = info.field.clone()
changed_field.type_def = to_enum
info.model_state.fields[info.field_index] = (info.field_name, changed_field)
state.reload_model(info.model_app_label, info.model_name)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
# Compare from_state and to_state and generate the appropriate ALTER commands
pre_actions = []
post_actions = []
# Make sure ORM is ready for use
from_state.clear_delayed_apps_cache()
db_alias = schema_editor.connection.alias
# Get field/model list
fields = [
(from_model, to_model, from_field, self.on_delete or from_field.on_delete)
for info in self.get_fields(from_state)
for from_model in [from_state.apps.get_model(info.model_app_label, info.model_name)]
for from_field in [from_model._meta.get_field(info.field_name)]
for to_model in [to_state.apps.get_model(info.model_app_label, info.model_name)]
]
if self.remove_values:
# The first post delete actions are to finalise the field types
if schema_editor.connection.features.has_enum:
if schema_editor.connection.features.requires_enum_declaration:
sql_alter_column_type = getattr(
schema_editor,
'sql_alter_column_type_using',
schema_editor.sql_alter_column_type)
for (from_model, to_model, field, on_delete) in fields:
db_table = schema_editor.quote_name(from_model._meta.db_table)
db_field = schema_editor.quote_name(field.column)
sql = schema_editor.sql_alter_column % {
'table': db_table,
'changes': sql_alter_column_type % {
'column': db_field,
'type': self.temp_db_type,
'old_type': self.db_type}}
post_actions.append((sql, []))
else:
for (from_model, to_model, field, on_delete) in fields:
db_table = schema_editor.quote_name(from_model._meta.db_table)
db_field = schema_editor.quote_name(field.column)
new_field = to_model._meta.get_field(field.name)
db_type, params = new_field.db_type(schema_editor.connection).paramatized
sql = schema_editor.sql_alter_column % {
'table': db_table,
'changes': schema_editor.sql_alter_column_type % {
'column': db_field,
'type': db_type}}
post_actions.append((sql, params))
if self.add_values:
# If there's the possibility of inconsistent actions, use transition type
# ie, ADD VALUE 'new_val' and REMOVE VALUE 'rem_val' ON DELETE SET('new_val')
# On DB's without enum support this isn't necessary as they are always CHAR
transition_fields = [
(from_model, field)
for (from_model, to_model, field, on_delete) in fields
if hasattr(on_delete, 'deconstruct')
or (on_delete == models.SET_DEFAULT and field.get_default() in self.add_values)]
if transition_fields and schema_editor.connection.features.has_enum:
transition_values = to_state.db_types[self.db_type].values_set() | self.remove_values
transition_enum = enum_state(transition_values, 'transitional_enum')
if schema_editor.connection.features.requires_enum_declaration:
# Create transition type
sql = schema_editor.sql_create_enum % {
'enum_type': self.transition_db_type,
'choices': ', '.join(['%s'] * len(transition_values))}
pre_actions.append((sql, list(transition_values)))
# Drop transition type after done
sql = schema_editor.sql_delete_enum % {
'enum_type': self.transition_db_type}
post_actions.append((sql, []))
# Set fields to transition type
for (model, field) in transition_fields:
db_table = schema_editor.quote_name(model._meta.db_table)
db_field = schema_editor.quote_name(field.column)
field.type_name = self.transition_db_type
field.type_def = transition_enum
db_type, params = field.db_type(schema_editor.connection).paramatized
sql = schema_editor.sql_alter_column % {
'table': db_table,
'changes': schema_editor.sql_alter_column_type % {
'column': db_field,
'type': db_type}}
pre_actions.append((sql, params))
if schema_editor.connection.features.requires_enum_declaration:
# Create new type with temporary name
to_enum = to_state.db_types[self.db_type]
sql = schema_editor.sql_create_enum % {
'enum_type': self.temp_db_type,
'values': ', '.join(['%s'] * len(to_enum))}
pre_actions.append((sql, to_enum.values()))
# Clean up original type and rename new one to replace it
sql = schema_editor.sql_delete_enum % {
'enum_type': self.db_type}
post_actions.append((sql, []))
sql = schema_editor.sql_rename_enum % {
'old_type': self.temp_db_type,
'enum_type': self.db_type}
post_actions.append((sql, []))
elif self.add_values:
# Just adding values? Directly modify types, no hassle!
if schema_editor.connection.features.requires_enum_declaration:
for value in self.add_values:
sql = schema_editor.sql_alter_enum % {
'enum_type': self.db_type,
'value': '%s'}
post_actions.append((sql, [value]))
elif schema_editor.connection.features.has_enum:
for (from_model, to_model, field, on_delete) in fields:
db_table = schema_editor.quote_name(from_model._meta.db_table)
db_field = schema_editor.quote_name(field.column)
new_field = to_model._meta.get_field(field.name)
db_type, params = new_field.db_type(schema_editor.connection).paramatized
schema_editor.sql_alter_column % {
'table': db_table,
'changes': schema_editor.sql_alter_column_type % {
'column': db_field,
'type': db_type}}
post_actions.append((sql, params))
# Prepare database for data to be migrated
for sql, params in pre_actions:
schema_editor.execute(sql, params)
# Apply all on_delete actions making data consistent with to_state values
if self.remove_values:
# Cheap hack to allow on_delete to work
for (from_model, to_model, field, on_delete) in fields:
field.remote_field = self
# Records affected by on_delete action
on_delete_gen = ((
field,
from_model.objects.using(db_alias).filter(
models.Q(('{}__in'.format(field.name), self.remove_values))
).only('pk'),
on_delete)
for (from_model, to_model, field, on_delete) in fields)
# Validate on_delete constraints
collector = SQLCollector(using=db_alias)
for (field, qs, on_delete) in on_delete_gen:
if qs:
# Trigger the on_delete collection directly
on_delete(collector, field, qs, db_alias)
for sql, params in collector.as_sql():
# Use SQLCollector.as_sql() instead of directly executing
# Such that manage.py sqlmigration correctly reflects all actions
schema_editor.execute(sql, params)
# Apply final changes
for sql, params in post_actions:
schema_editor.execute(sql, params)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.add_values, self.remove_values = self.remove_values, self.add_values
self.database_forwards(app_label, schema_editor, from_state, to_state)
self.add_values, self.remove_values = self.remove_values, self.add_values
| bsd-3-clause | 4,471,117,172,481,644,500 | 44.734043 | 133 | 0.573447 | false |
fukun07/neural-image-captioning | codes/pycoco/bleu/bleu.py | 1 | 1260 | #!/usr/bin/env python
#
# File Name : bleu.py
#
# Description : Wrapper for BLEU scorer.
#
# Creation Date : 06-01-2015
# Last Modified : Thu 19 Mar 2015 09:13:28 PM PDT
# Authors : Hao Fang <[email protected]> and Tsung-Yi Lin <[email protected]>
from bleu_scorer import BleuScorer
class Bleu:
def __init__(self, n=4):
# default compute Blue score up to 4
self._n = n
self._hypo_for_image = {}
self.ref_for_image = {}
def compute_score(self, gts, res):
assert(sorted(gts.keys()) == sorted(res.keys()))
imgIds = gts.keys()
bleu_scorer = BleuScorer(n=self._n)
for id in imgIds:
hypo = res[id]
ref = gts[id]
# Sanity check.
assert(type(hypo) is list)
assert(len(hypo) == 1)
assert(type(ref) is list)
assert(len(ref) > 1)
bleu_scorer += (hypo[0], ref)
#score, scores = bleu_scorer.compute_score(option='shortest')
score, scores = bleu_scorer.compute_score(option='closest', verbose=0)
#score, scores = bleu_scorer.compute_score(option='average', verbose=0)
# return (bleu, bleu_info)
return score, scores
def method(self):
return "Bleu"
| mit | -4,045,538,868,026,867,700 | 26.391304 | 79 | 0.569841 | false |
silly-wacky-3-town-toon/SOURCE-COD | toontown/catalog/CatalogChatItem.py | 1 | 5689 | from panda3d.core import *
from panda3d.direct import *
import CatalogItem
from toontown.toonbase import ToontownGlobals
from otp.otpbase import OTPLocalizer
from toontown.toonbase import TTLocalizer
bannedPhrases = [11009]
class CatalogChatItem(CatalogItem.CatalogItem):
def makeNewItem(self, customIndex):
self.customIndex = customIndex
CatalogItem.CatalogItem.makeNewItem(self)
def getPurchaseLimit(self):
return 1
def reachedPurchaseLimit(self, avatar):
if self in avatar.onOrder or self in avatar.mailboxContents or self in avatar.onGiftOrder or self in avatar.awardMailboxContents or self in avatar.onAwardOrder:
return 1
return avatar.customMessages.count(self.customIndex) != 0
def getTypeName(self):
return TTLocalizer.ChatTypeName
def getName(self):
return TTLocalizer.ChatItemQuotes % OTPLocalizer.CustomSCStrings[self.customIndex]
def getDisplayName(self):
return OTPLocalizer.CustomSCStrings[self.customIndex]
def recordPurchase(self, avatar, optional):
if avatar.customMessages.count(self.customIndex) != 0:
return ToontownGlobals.P_ReachedPurchaseLimit
if len(avatar.customMessages) >= ToontownGlobals.MaxCustomMessages:
if optional >= 0 and optional < len(avatar.customMessages):
del avatar.customMessages[optional]
if len(avatar.customMessages) >= ToontownGlobals.MaxCustomMessages:
return ToontownGlobals.P_NoRoomForItem
avatar.customMessages.append(self.customIndex)
avatar.d_setCustomMessages(avatar.customMessages)
return ToontownGlobals.P_ItemAvailable
def getAcceptItemErrorText(self, retcode):
if retcode == ToontownGlobals.P_ItemAvailable:
return TTLocalizer.CatalogAcceptChat
return CatalogItem.CatalogItem.getAcceptItemErrorText(self, retcode)
def output(self, store = -1):
return 'CatalogChatItem(%s%s)' % (self.customIndex, self.formatOptionalData(store))
def compareTo(self, other):
return self.customIndex - other.customIndex
def getHashContents(self):
return self.customIndex
def getBasePrice(self):
if self.customIndex >= 10000:
return 150
return 100
def decodeDatagram(self, di, versionNumber, store):
CatalogItem.CatalogItem.decodeDatagram(self, di, versionNumber, store)
self.customIndex = di.getUint16()
text = OTPLocalizer.CustomSCStrings[self.customIndex]
def encodeDatagram(self, dg, store):
CatalogItem.CatalogItem.encodeDatagram(self, dg, store)
dg.addUint16(self.customIndex)
def acceptItem(self, mailbox, index, callback):
if len(base.localAvatar.customMessages) < ToontownGlobals.MaxCustomMessages:
mailbox.acceptItem(self, index, callback)
else:
self.showMessagePickerOnAccept(mailbox, index, callback)
def requestPurchase(self, phone, callback):
if len(base.localAvatar.customMessages) < ToontownGlobals.MaxCustomMessages:
CatalogItem.CatalogItem.requestPurchase(self, phone, callback)
else:
self.showMessagePicker(phone, callback)
def showMessagePicker(self, phone, callback):
self.phone = phone
self.callback = callback
import CatalogChatItemPicker
self.messagePicker = CatalogChatItemPicker.CatalogChatItemPicker(self.__handlePickerDone, self.customIndex)
self.messagePicker.show()
def showMessagePickerOnAccept(self, mailbox, index, callback):
self.mailbox = mailbox
self.callback = callback
self.index = index
import CatalogChatItemPicker
self.messagePicker = CatalogChatItemPicker.CatalogChatItemPicker(self.__handlePickerOnAccept, self.customIndex)
self.messagePicker.show()
def __handlePickerOnAccept(self, status, pickedMessage = None):
print 'Picker Status%s' % status
if status == 'pick':
self.mailbox.acceptItem(self, self.index, self.callback, pickedMessage)
else:
print 'picker canceled'
self.callback(ToontownGlobals.P_UserCancelled, None, self.index)
self.messagePicker.hide()
self.messagePicker.destroy()
del self.messagePicker
del self.callback
del self.mailbox
return
def __handlePickerDone(self, status, pickedMessage = None):
if status == 'pick':
CatalogItem.CatalogItem.requestPurchase(self, self.phone, self.callback, pickedMessage)
self.messagePicker.hide()
self.messagePicker.destroy()
del self.messagePicker
del self.callback
del self.phone
def getPicture(self, avatar):
chatBalloon = loader.loadModel('phase_3/models/props/chatbox')
chatBalloon.find('**/top').setPos(1, 0, 5)
chatBalloon.find('**/middle').setScale(1, 1, 3)
frame = self.makeFrame()
chatBalloon.reparentTo(frame)
chatBalloon.setPos(-2.19, 0, -1.74)
chatBalloon.setScale(0.4)
self.hasPicture = True
return (frame, None)
def getChatRange(fromIndex, toIndex, *otherRanges):
list = []
froms = [fromIndex]
tos = [toIndex]
i = 0
while i < len(otherRanges):
froms.append(otherRanges[i])
tos.append(otherRanges[i + 1])
i += 2
for chatId in OTPLocalizer.CustomSCStrings.keys():
for fromIndex, toIndex in zip(froms, tos):
if chatId >= fromIndex and chatId <= toIndex and chatId not in bannedPhrases:
list.append(CatalogChatItem(chatId))
return list
| apache-2.0 | 6,819,379,785,983,695,000 | 37.181208 | 168 | 0.683951 | false |
PierreRaybaut/PythonQwt | qwt/plot_directpainter.py | 1 | 10691 | # -*- coding: utf-8 -*-
#
# Licensed under the terms of the Qwt License
# Copyright (c) 2002 Uwe Rathmann, for the original C++ code
# Copyright (c) 2015 Pierre Raybaut, for the Python translation/optimization
# (see LICENSE file for more details)
"""
QwtPlotDirectPainter
--------------------
.. autoclass:: QwtPlotDirectPainter
:members:
"""
from qtpy.QtGui import QPainter, QRegion
from qtpy.QtCore import QObject, Qt, QEvent
from qtpy import QtCore as QC
QT_MAJOR_VERSION = int(QC.__version__.split(".")[0])
from qwt.plot import QwtPlotItem
from qwt.plot_canvas import QwtPlotCanvas
def qwtRenderItem(painter, canvasRect, seriesItem, from_, to):
# TODO: A minor performance improvement is possible with caching the maps
plot = seriesItem.plot()
xMap = plot.canvasMap(seriesItem.xAxis())
yMap = plot.canvasMap(seriesItem.yAxis())
painter.setRenderHint(
QPainter.Antialiasing, seriesItem.testRenderHint(QwtPlotItem.RenderAntialiased)
)
seriesItem.drawSeries(painter, xMap, yMap, canvasRect, from_, to)
def qwtHasBackingStore(canvas):
return (
canvas.testPaintAttribute(QwtPlotCanvas.BackingStore) and canvas.backingStore()
)
class QwtPlotDirectPainter_PrivateData(object):
def __init__(self):
self.attributes = 0
self.hasClipping = False
self.seriesItem = None # QwtPlotSeriesItem
self.clipRegion = QRegion()
self.painter = QPainter()
self.from_ = None
self.to = None
class QwtPlotDirectPainter(QObject):
"""
Painter object trying to paint incrementally
Often applications want to display samples while they are
collected. When there are too many samples complete replots
will be expensive to be processed in a collection cycle.
`QwtPlotDirectPainter` offers an API to paint
subsets (f.e all additions points) without erasing/repainting
the plot canvas.
On certain environments it might be important to calculate a proper
clip region before painting. F.e. for Qt Embedded only the clipped part
of the backing store will be copied to a (maybe unaccelerated)
frame buffer.
.. warning::
Incremental painting will only help when no replot is triggered
by another operation (like changing scales) and nothing needs
to be erased.
Paint attributes:
* `QwtPlotDirectPainter.AtomicPainter`:
Initializing a `QPainter` is an expensive operation.
When `AtomicPainter` is set each call of `drawSeries()` opens/closes
a temporary `QPainter`. Otherwise `QwtPlotDirectPainter` tries to
use the same `QPainter` as long as possible.
* `QwtPlotDirectPainter.FullRepaint`:
When `FullRepaint` is set the plot canvas is explicitly repainted
after the samples have been rendered.
* `QwtPlotDirectPainter.CopyBackingStore`:
When `QwtPlotCanvas.BackingStore` is enabled the painter
has to paint to the backing store and the widget. In certain
situations/environments it might be faster to paint to
the backing store only and then copy the backing store to the canvas.
This flag can also be useful for settings, where Qt fills the
the clip region with the widget background.
"""
# enum Attribute
AtomicPainter = 0x01
FullRepaint = 0x02
CopyBackingStore = 0x04
def __init__(self, parent=None):
QObject.__init__(self, parent)
self.__data = QwtPlotDirectPainter_PrivateData()
def setAttribute(self, attribute, on=True):
"""
Change an attribute
:param int attribute: Attribute to change
:param bool on: On/Off
.. seealso::
:py:meth:`testAttribute()`
"""
if self.testAttribute(attribute) != on:
self.__data.attributes |= attribute
else:
self.__data.attributes &= ~attribute
if attribute == self.AtomicPainter and on:
self.reset()
def testAttribute(self, attribute):
"""
:param int attribute: Attribute to be tested
:return: True, when attribute is enabled
.. seealso::
:py:meth:`setAttribute()`
"""
return self.__data.attributes & attribute
def setClipping(self, enable):
"""
En/Disables clipping
:param bool enable: Enables clipping is true, disable it otherwise
.. seealso::
:py:meth:`hasClipping()`, :py:meth:`clipRegion()`,
:py:meth:`setClipRegion()`
"""
self.__data.hasClipping = enable
def hasClipping(self):
"""
:return: Return true, when clipping is enabled
.. seealso::
:py:meth:`setClipping()`, :py:meth:`clipRegion()`,
:py:meth:`setClipRegion()`
"""
return self.__data.hasClipping
def setClipRegion(self, region):
"""
Assign a clip region and enable clipping
Depending on the environment setting a proper clip region might
improve the performance heavily. F.e. on Qt embedded only the clipped
part of the backing store will be copied to a (maybe unaccelerated)
frame buffer device.
:param QRegion region: Clip region
.. seealso::
:py:meth:`hasClipping()`, :py:meth:`setClipping()`,
:py:meth:`clipRegion()`
"""
self.__data.clipRegion = region
self.__data.hasClipping = True
def clipRegion(self):
"""
:return: Return Currently set clip region.
.. seealso::
:py:meth:`hasClipping()`, :py:meth:`setClipping()`,
:py:meth:`setClipRegion()`
"""
return self.__data.clipRegion
def drawSeries(self, seriesItem, from_, to):
"""
Draw a set of points of a seriesItem.
When observing a measurement while it is running, new points have
to be added to an existing seriesItem. drawSeries() can be used to
display them avoiding a complete redraw of the canvas.
Setting `plot().canvas().setAttribute(Qt.WA_PaintOutsidePaintEvent, True)`
will result in faster painting, if the paint engine of the canvas widget
supports this feature.
:param qwt.plot_series.QwtPlotSeriesItem seriesItem: Item to be painted
:param int from_: Index of the first point to be painted
:param int to: Index of the last point to be painted. If to < 0 the series will be painted to its last point.
"""
if seriesItem is None or seriesItem.plot() is None:
return
canvas = seriesItem.plot().canvas()
canvasRect = canvas.contentsRect()
plotCanvas = canvas # XXX: cast to QwtPlotCanvas
if plotCanvas and qwtHasBackingStore(plotCanvas):
painter = QPainter(
plotCanvas.backingStore()
) # XXX: cast plotCanvas.backingStore() to QPixmap
if self.__data.hasClipping:
painter.setClipRegion(self.__data.clipRegion)
qwtRenderItem(painter, canvasRect, seriesItem, from_, to)
painter.end()
if self.testAttribute(self.FullRepaint):
plotCanvas.repaint()
return
immediatePaint = True
if not canvas.testAttribute(Qt.WA_WState_InPaintEvent):
if QT_MAJOR_VERSION >= 5 or not canvas.testAttribute(
Qt.WA_PaintOutsidePaintEvent
):
immediatePaint = False
if immediatePaint:
if not self.__data.painter.isActive():
self.reset()
self.__data.painter.begin(canvas)
canvas.installEventFilter(self)
if self.__data.hasClipping:
self.__data.painter.setClipRegion(
QRegion(canvasRect) & self.__data.clipRegion
)
elif not self.__data.painter.hasClipping():
self.__data.painter.setClipRect(canvasRect)
qwtRenderItem(self.__data.painter, canvasRect, seriesItem, from_, to)
if self.__data.attributes & self.AtomicPainter:
self.reset()
elif self.__data.hasClipping:
self.__data.painter.setClipping(False)
else:
self.reset()
self.__data.seriesItem = seriesItem
self.__data.from_ = from_
self.__data.to = to
clipRegion = QRegion(canvasRect)
if self.__data.hasClipping:
clipRegion &= self.__data.clipRegion
canvas.installEventFilter(self)
canvas.repaint(clipRegion)
canvas.removeEventFilter(self)
self.__data.seriesItem = None
def reset(self):
"""Close the internal QPainter"""
if self.__data.painter.isActive():
w = self.__data.painter.device() # XXX: cast to QWidget
if w:
w.removeEventFilter(self)
self.__data.painter.end()
def eventFilter(self, obj_, event):
if event.type() == QEvent.Paint:
self.reset()
if self.__data.seriesItem:
pe = event # XXX: cast to QPaintEvent
canvas = self.__data.seriesItem.plot().canvas()
painter = QPainter(canvas)
painter.setClipRegion(pe.region())
doCopyCache = self.testAttribute(self.CopyBackingStore)
if doCopyCache:
plotCanvas = canvas # XXX: cast to QwtPlotCanvas
if plotCanvas:
doCopyCache = qwtHasBackingStore(plotCanvas)
if doCopyCache:
painter.drawPixmap(
plotCanvas.rect().topLeft(), plotCanvas.backingStore()
)
if not doCopyCache:
qwtRenderItem(
painter,
canvas.contentsRect(),
self.__data.seriesItem,
self.__data.from_,
self.__data.to,
)
return True
return False
| lgpl-2.1 | 2,105,900,071,676,100,600 | 34.363946 | 117 | 0.575437 | false |
Audaces/ml | src/po2ml/__main__.py | 1 | 3322 | #!/usr/bin/env python3
'''{executable_name}: Importer for .po files to Audaces ML new translation format.
Usage:
{executable_name}: [--mark <mark>] [<file>]
This program imports .po (gettext translation) files to Audaces ML .tra format.
If a custom mark is set through --mark, it wil be placed on both ends of the
identifier as the default translation string for untranslated strings in the
.po file.
If no file is passed as an argument, the program reads from stdin.
Output is written to stdout.
'''
import sys
import re
from ml import tra_file
sys.stdout = open(1, 'w', encoding='utf-8', newline='\n', closefd=False)
def strip_uninteresting(file_handler):
'''Removes irrelevant lines, or features the importer can't handle.
This function takes a (presumably text, we hope) file and returns
a list of strings containing only the strings deemed 'interesting',
that is, the strings that start with "msgid", "msgstr" or "msgstr[0]",
in THE SAME ORDER as they were in the file (really important).
'''
return [line.strip() for line in file_handler if line.startswith("msgid ")
or line.startswith("msgstr ")
or line.startswith("msgstr[0] ")
or line.startswith('"')]
def concatenate_strings(input):
'''Concatenates every string in an input describing strings.
This function takes as its input a string containing a sequence of
strings, delimited by '"'.
'''
strings = re.findall(r'"((?:\\"|.)*?)"', input)
return ''.join(strings)
def make_tuples(lines):
'''Actual parsing of the po file.
This function takes a list of lines in the format returned by
strip_uninteresting (check its docstring if needed) and pairs them up in
a (msgid, msgstr) manner. This creates an output similar to the one used
in the ml module.
The input to the function is assumed to be correct already, as in no
unpaired or out of order items are given.
'''
joined = ' '.join(lines)
pieces = re.split(r'\s*msg(?:id|str)\s*', joined.strip())
strings = [concatenate_strings(string) for string in pieces if string]
result = []
while strings:
msgid, msgstr, *strings = strings
if msgid:
result.append((msgid, msgstr))
return result
def parse_file(file_handler):
'''Combines removal of uninteresting lines and the actual parsing.
This function merely applies any marks needed to the output of make_tuples
applied in a given file. In case of need, check their docstrings for an
in-depth view of what they do.
'''
return (make_tuples(strip_uninteresting(file_handler)))
def main():
'''Main logic for the importer.
Main function for this program. It parses arguments looking for the
definition of a custom mark, and applies parse_file() to the given input
(file or stdin).
'''
args = sys.argv[1:]
if len(args) > 1:
print(__doc__.format(executable_name=sys.argv[0]))
sys.exit(-1)
if args:
filename = args[0]
else:
filename = 0
try:
with open(filename, encoding='utf-8') as file_handler:
print(tra_file(parse_file(file_handler)))
except FileNotFoundError:
print(filename, 'is not a valid file.')
if __name__ == '__main__':
main()
| mit | 3,981,279,307,848,930,000 | 29.759259 | 82 | 0.664359 | false |
Subsets and Splits