repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
probardjango/Comercio-Digital | src/comerciodigital/settings.py | 1 | 2734 | """
Django settings for comerciodigital project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'iqqvh9ulex94*fx(cl7c$#_a-39ru7ek-0f7f4h(jgtp874hgj'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
#nuestras apps
'productos',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'comerciodigital.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'comerciodigital.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| mit | 1,172,620,309,974,841,600 | 25.288462 | 71 | 0.695684 | false |
beeftornado/sentry | tests/sentry/event_manager/interfaces/test_http.py | 1 | 3923 | from __future__ import absolute_import
import pytest
from sentry import eventstore
from sentry.event_manager import EventManager
@pytest.fixture
def make_http_snapshot(insta_snapshot):
def inner(data):
mgr = EventManager(data={"request": data})
mgr.normalize()
evt = eventstore.create_event(data=mgr.get_data())
interface = evt.interfaces.get("request")
insta_snapshot({"errors": evt.data.get("errors"), "to_json": interface.to_json()})
return inner
def test_basic(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com"))
def test_full(make_http_snapshot):
make_http_snapshot(
dict(
method="GET",
url="http://example.com",
query_string="foo=bar",
fragment="foobar",
headers={"x-foo-bar": "baz"},
cookies={"foo": "bar"},
env={"bing": "bong"},
data="hello world",
)
)
def test_query_string_as_dict(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", query_string={"foo": "bar"}))
def test_query_string_as_pairlist(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", query_string=[["foo", "bar"]]))
def test_data_as_dict(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", data={"foo": "bar"}))
def test_urlencoded_data(make_http_snapshot):
make_http_snapshot(
dict(
url="http://example.com",
headers={"Content-Type": "application/x-www-form-urlencoded"},
data="foo=bar",
)
)
def test_infer_urlencoded_content_type(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", data="foo=bar"))
def test_json_data(make_http_snapshot):
make_http_snapshot(
dict(
url="http://example.com",
headers={"Content-Type": "application/json"},
data='{"foo":"bar"}',
)
)
def test_infer_json_content_type(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", data='{"foo":"bar"}'))
def test_cookies_as_string(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", cookies="a=b;c=d"))
make_http_snapshot(dict(url="http://example.com", cookies="a=b;c=d"))
def test_cookies_in_header(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", headers={"Cookie": "a=b;c=d"}))
def test_cookies_in_header2(make_http_snapshot):
make_http_snapshot(
dict(url="http://example.com", headers={"Cookie": "a=b;c=d"}, cookies={"foo": "bar"})
)
def test_query_string_and_fragment_as_params(make_http_snapshot):
make_http_snapshot(
dict(url="http://example.com", query_string=u"foo\ufffd=bar\u2026", fragment="fragment")
)
def test_query_string_and_fragment_in_url(make_http_snapshot):
make_http_snapshot(dict(url=u"http://example.com?foo\ufffd=bar#fragment\u2026"))
def test_header_value_list(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", headers={"Foo": ["1", "2"]}))
def test_header_value_str(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", headers={"Foo": 1}))
def test_invalid_method(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", method="1234"))
def test_invalid_method2(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", method="A" * 33))
def test_invalid_method3(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", method="A"))
def test_unknown_method(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", method="TEST"))
def test_unknown_method2(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", method="FOO-BAR"))
def test_unknown_method3(make_http_snapshot):
make_http_snapshot(dict(url="http://example.com", method="FOO_BAR"))
| bsd-3-clause | -7,135,125,457,729,543,000 | 27.845588 | 96 | 0.642366 | false |
YorkUIRLab/eosdb | topic_coherence.py | 1 | 2591 |
# coding: utf-8
# In[4]:
import glob
from datetime import datetime
import logging as log
import gensim
import matplotlib.pyplot as plt
import pyLDAvis
import pyLDAvis.gensim
from gensim.models import CoherenceModel
from sklearn.externals import joblib
import gzip
from multiprocessing import Pool
get_ipython().magic(u'matplotlib notebook')
# In[1]:
class ModelSimilarity:
# Uses a model (e.g. Word2Vec model) to calculate the similarity between two terms.
def __init__(self, model):
self.model = model
def similarity(self, ranking_i, ranking_j):
sim = 0.0
pairs = 0
for term_i in ranking_i:
for term_j in ranking_j:
try:
sim += self.model.similarity(term_i, term_j)
pairs += 1
except:
# print "Failed pair (%s,%s)" % (term_i,term_j)
pass
if pairs == 0:
return 0.0
return sim / pairs
# In[2]:
class WithinTopicMeasure:
# Measures within-topic coherence for a topic model, based on a set of term rankings.
def __init__(self, metric):
self.metric = metric
def evaluate_ranking(self, term_ranking):
return self.metric.similarity(term_ranking, term_ranking)
def evaluate_rankings(self, term_rankings):
scores = []
overall = 0.0
for topic_index in range(len(term_rankings)):
score = self.evaluate_ranking(term_rankings[topic_index])
scores.append(score)
overall += score
overall /= len(term_rankings)
return overall
# In[13]:
# To get the topic words from the model
def get_topics(ldamodel, num_topics, num_words):
topics = []
for topic_id, topic in ldamodel.show_topics(num_topics=num_topics, num_words=num_words, formatted=False):
topic = [word for word, _ in topic]
topics.append(topic)
return topics
ldamodel = joblib.load('data/eos/lda/28_LDAmodel_EOS.pkl')
print(ldamodel)
print(get_topics(ldamodel, 28, 10))
# In[18]:
model_path = 'data/eos/word2vec_model_all.model'
log.info("Loading Word2Vec model from %s ..." % model_path)
model = gensim.models.Word2Vec.load(model_path)
metric = ModelSimilarity(model)
validation_measure = WithinTopicMeasure(metric)
topic_num = 28
truncated_term_rankings = get_topics(ldamodel, topic_num, 10)
coherence_scores = validation_measure.evaluate_rankings(truncated_term_rankings)
log.info("Model coherence (k=%d) = %.4f" % (topic_num, coherence_scores))
print(coherence_scores)
# In[ ]:
| lgpl-3.0 | -2,668,634,247,923,791,000 | 24.15534 | 109 | 0.643381 | false |
gallupliu/QA | data/models.py | 1 | 2787 | # encoding: utf-8
"""
@author: gallupliu
@contact: [email protected]
@version: 1.0
@license: Apache Licence
@file: models.py
@time: 2018/1/12 23:19
"""
from data.util import unique_items
class MetadataItem(object):
def __init__(self):
self.metadata = dict()
class Token(MetadataItem):
def __init__(self, text):
"""
:type text: str
"""
super(Token, self).__init__()
self.text = text
class Sentence(MetadataItem):
def __init__(self, text, tokens):
"""
:type text: str
:type tokens: list[Token]
"""
super(Sentence, self).__init__()
self.text = text
self.tokens = tokens
@property
def vocab(self):
vocab = []
for token in self.tokens:
vocab.append(token.text)
return unique_items(vocab)
class TextItem(MetadataItem):
def __init__(self, text, sentences):
"""
:type text: str
:type sentences: list[Sentence]
"""
super(TextItem, self).__init__()
self.text = text
self.sentences = sentences
@property
def vocab(self):
vocab = []
for sentence in self.sentences:
vocab += sentence.vocab
return unique_items(vocab)
class QAPool(object):
def __init__(self, question, pooled_answers, ground_truth):
"""
:type question: TextItem
:type pooled_answers: list[TextItem]
:type ground_truth: list[TextItem]
"""
self.question = question
self.pooled_answers = pooled_answers
self.ground_truth = ground_truth
class Data(object):
def __init__(self, split_name, qa, answers):
"""
:type split_name: str
:type qa: list[QAPool]
:type answers: list[TextItem]
"""
self.split_name = split_name
self.qa = qa
self.answers = answers
class Archive(object):
def __init__(self, train, valid, test, questions, answers):
"""
:type train: Data
:type valid: Data
:type test: list[Data]
:type questions: list[TexItem]
:type answers: list[TexItem]
"""
self.train = train
self.valid = valid
self.test = test
self.questions = questions
self.answers = answers
self._vocab = None # lazily created
@property
def vocab(self):
"""
:rtype: set
"""
if self._vocab is None:
self._vocab = []
for question in self.questions:
self._vocab += question.vocab
for answer in self.answers:
self._vocab += answer.vocab
self._vocab = unique_items(self._vocab)
return self._vocab
| apache-2.0 | -6,584,500,800,930,892,000 | 21.119048 | 63 | 0.541442 | false |
tchellomello/home-assistant | homeassistant/components/nexia/__init__.py | 1 | 3636 | """Support for Nexia / Trane XL Thermostats."""
import asyncio
from datetime import timedelta
from functools import partial
import logging
from nexia.home import NexiaHome
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN, NEXIA_DEVICE, PLATFORMS, UPDATE_COORDINATOR
from .util import is_invalid_auth_code
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
},
extra=vol.ALLOW_EXTRA,
)
},
extra=vol.ALLOW_EXTRA,
)
DEFAULT_UPDATE_RATE = 120
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
"""Set up the nexia component from YAML."""
conf = config.get(DOMAIN)
hass.data.setdefault(DOMAIN, {})
if not conf:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Configure the base Nexia device for Home Assistant."""
conf = entry.data
username = conf[CONF_USERNAME]
password = conf[CONF_PASSWORD]
state_file = hass.config.path(f"nexia_config_{username}.conf")
try:
nexia_home = await hass.async_add_executor_job(
partial(
NexiaHome,
username=username,
password=password,
device_name=hass.config.location_name,
state_file=state_file,
)
)
except ConnectTimeout as ex:
_LOGGER.error("Unable to connect to Nexia service: %s", ex)
raise ConfigEntryNotReady from ex
except HTTPError as http_ex:
if is_invalid_auth_code(http_ex.response.status_code):
_LOGGER.error(
"Access error from Nexia service, please check credentials: %s", http_ex
)
return False
_LOGGER.error("HTTP error from Nexia service: %s", http_ex)
raise ConfigEntryNotReady from http_ex
async def _async_update_data():
"""Fetch data from API endpoint."""
return await hass.async_add_job(nexia_home.update)
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="Nexia update",
update_method=_async_update_data,
update_interval=timedelta(seconds=DEFAULT_UPDATE_RATE),
)
hass.data[DOMAIN][entry.entry_id] = {
NEXIA_DEVICE: nexia_home,
UPDATE_COORDINATOR: coordinator,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
| apache-2.0 | -5,522,290,898,928,607,000 | 27.857143 | 88 | 0.641364 | false |
comsaint/legco-watch | app/raw/tests/test_agenda.py | 1 | 7371 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Tests for CouncilAgenda object
from django.test import TestCase
import logging
from raw.docs import agenda
# We use fixtures which are raw HTML versions of the agendas to test the parser
# Each test case works with one source.
logging.disable(logging.CRITICAL)
class Agenda20140709TestCase(TestCase):
def setUp(self):
with open('raw/tests/fixtures/council_agenda-20140709-e.html', 'rb') as f:
self.src = f.read().decode('utf-8')
self.parser = agenda.CouncilAgenda('council_agenda-20140709-e', self.src)
def test_tabled_papers_count(self):
# 8 subsidiary legislation and 29 other papers
self.assertEqual(len(self.parser.tabled_papers), 37)
def test_tabled_papers_class(self):
for p in self.parser.tabled_papers[0:8]:
self.assertTrue(isinstance(p, agenda.TabledLegislation))
for p in self.parser.tabled_papers[8:37]:
self.assertTrue(isinstance(p, agenda.OtherTabledPaper))
def test_spot_check_tabled_papers(self):
foo = self.parser.tabled_papers[3]
self.assertEqual(foo.title, u'Timber Stores (Amendment) Regulation 2014')
self.assertEqual(foo.number, u'106/2014')
foo = self.parser.tabled_papers[9]
self.assertEqual(foo.title, u'No. 120 - Sir Robert Black Trust Fund Report of the Trustee on the Administration of the Fund for the year ended 31 March 2014')
self.assertEqual(foo.presenter, u'Secretary for Home Affairs')
foo = self.parser.tabled_papers[27]
self.assertEqual(foo.title, u'Report of the Panel on Food Safety and Environmental Hygiene 2013-2014')
self.assertEqual(foo.presenter, u'Dr Hon Helena WONG')
def test_questions_count(self):
self.assertEqual(len(self.parser.questions), 22)
def test_spot_check_questions(self):
foo = self.parser.questions[8]
self.assertEqual(foo.asker, u'Hon WONG Yuk-man')
self.assertEqual(foo.replier, u'Secretary for Security')
self.assertEqual(foo.type, agenda.AgendaQuestion.QTYPE_WRITTEN)
def test_bills_count(self):
self.assertEqual(len(self.parser.bills), 9)
def test_spot_check_bills(self):
foo = self.parser.bills[1]
self.assertEqual(foo.reading, agenda.BillReading.FIRST)
self.assertEqual(foo.title, u'Land (Miscellaneous Provisions) (Amendment) Bill 2014')
self.assertEqual(foo.attendees, [])
foo = self.parser.bills[3]
self.assertEqual(foo.reading, agenda.BillReading.SECOND)
self.assertEqual(foo.title, u'Land (Miscellaneous Provisions) (Amendment) Bill 2014')
self.assertEqual(foo.attendees, [u'Secretary for Development'])
foo = self.parser.bills[7]
self.assertEqual(foo.reading, agenda.BillReading.SECOND_THIRD)
self.assertEqual(foo.title, u'Stamp Duty (Amendment) Bill 2013')
self.assertEqual(len(foo.attendees), 2, foo.attendees)
self.assertEqual(set(foo.attendees), {u'Secretary for Financial Services and the Treasury',
u'Under Secretary for Financial Services and the Treasury'})
self.assertEqual(len(foo.amendments), 3)
class Agenda20130508TestCase(TestCase):
def setUp(self):
with open('raw/tests/fixtures/council_agenda-20130508-e.html', 'rb') as f:
self.src = f.read().decode('utf-8')
self.parser = agenda.CouncilAgenda('council_agenda-20130508-e', self.src)
def test_count_tabled_papers(self):
self.assertEqual(len(self.parser.tabled_papers), 9)
def test_tabled_papers_type(self):
for p in self.parser.tabled_papers[0:8]:
self.assertTrue(isinstance(p, agenda.TabledLegislation))
self.assertTrue(isinstance(self.parser.tabled_papers[8], agenda.OtherTabledPaper))
def test_spot_check_tabled_papers(self):
foo = self.parser.tabled_papers[2]
self.assertEqual(foo.title, u'Trade Marks Ordinance (Amendment of Schedule 1) Regulation 2013')
self.assertEqual(foo.number, u'64/2013')
foo = self.parser.tabled_papers[8]
self.assertEqual(foo.title, u'No. 92 - Financial statements for the year ended 31 August 2012')
self.assertEqual(foo.presenter, u'Secretary for Education')
def test_questions_count(self):
self.assertEqual(len(self.parser.questions), 22)
def test_spot_check_questions(self):
foo = self.parser.questions[21]
self.assertEqual(foo.asker, u'Emily LAU')
self.assertEqual(foo.replier, u'Secretary for Financial Services and the Treasury')
self.assertEqual(foo.type, agenda.AgendaQuestion.QTYPE_WRITTEN)
def test_bills_count(self):
self.assertEqual(len(self.parser.bills), 8)
def test_spot_check_bills(self):
foo = self.parser.bills[0]
self.assertEqual(foo.title, u'Hong Kong Arts Development Council (Amendment) Bill 2013')
self.assertEqual(foo.reading, agenda.BillReading.FIRST)
self.assertEqual(foo.amendments, [])
self.assertEqual(foo.attendees, [])
foo = self.parser.bills[6]
self.assertEqual(foo.title, u'Appropriation Bill 2013')
self.assertEqual(foo.reading, agenda.BillReading.SECOND_THIRD)
self.assertEqual(len(foo.amendments), 1)
# Attendees on these appropriations bills are always tricky
foo = self.parser.bills[7]
self.assertEqual(foo.title, u'Pilotage (Amendment) Bill 2013')
self.assertEqual(foo.reading, agenda.BillReading.SECOND_THIRD)
self.assertEqual(foo.attendees, [u'Secretary for Transport and Housing'])
self.assertEqual(foo.amendments, [])
class Agenda20140430TestCase(TestCase):
def setUp(self):
with open('raw/tests/fixtures/council_agenda-20140430-c.html', 'rb') as f:
self.src = f.read().decode('utf-8')
self.parser = agenda.CouncilAgenda('council_agenda-20130430-c', self.src)
def test_count_tabled_papers(self):
self.assertEqual(len(self.parser.tabled_papers), 7)
def test_tabled_papers_type(self):
for p in self.parser.tabled_papers[0:4]:
self.assertTrue(isinstance(p, agenda.TabledLegislation))
for p in self.parser.tabled_papers[4:7]:
self.assertTrue(isinstance(p, agenda.OtherTabledPaper))
def test_spot_check_papers(self):
pass
def test_questions_count(self):
self.assertEqual(len(self.parser.questions), 18)
def test_questions_spot_check(self):
foo = self.parser.questions[7]
self.assertEqual(foo.asker, u'張超雄')
self.assertEqual(foo.replier, u'發展局局長')
self.assertEqual(foo.type, agenda.AgendaQuestion.QTYPE_ORAL)
def test_bills_count(self):
self.assertEqual(len(self.parser.bills), 9)
def test_bills_spot_check(self):
foo = self.parser.bills[2]
self.assertEqual(foo.title, u'《電子健康紀錄互通系統條例草案》')
self.assertEqual(foo.attendees, [])
self.assertEqual(foo.reading, agenda.BillReading.FIRST)
foo = self.parser.bills[8]
self.assertEqual(foo.title, u'《2014年撥款條例草案》')
self.assertEqual(set(foo.attendees), {u'財政司司長'})
self.assertEqual(foo.reading, agenda.BillReading.THIRD)
| mit | -6,964,750,088,137,517,000 | 41.16763 | 166 | 0.677039 | false |
ajaygarg84/sugar | src/jarabe/view/keyhandler.py | 1 | 7320 | # Copyright (C) 2006-2007, Red Hat, Inc.
# Copyright (C) 2009 Simon Schampijer
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import logging
from gi.repository import Gdk
from gi.repository import SugarExt
from jarabe.model import sound
from jarabe.model import shell
from jarabe.model import session
from jarabe.view.tabbinghandler import TabbingHandler
from jarabe.model.shell import ShellModel
from jarabe import config
from jarabe.journal import journalactivity
_VOLUME_STEP = sound.VOLUME_STEP
_VOLUME_MAX = 100
_TABBING_MODIFIER = Gdk.ModifierType.MOD1_MASK
_actions_table = {
'F1': 'zoom_mesh',
'F2': 'zoom_group',
'F3': 'zoom_home',
'F4': 'zoom_activity',
'F5': 'open_search',
'F6': 'frame',
'XF86AudioMute': 'volume_mute',
'F11': 'volume_down',
'XF86AudioLowerVolume': 'volume_down',
'F12': 'volume_up',
'XF86AudioRaiseVolume': 'volume_up',
'<alt>F11': 'volume_min',
'<alt>F12': 'volume_max',
'XF86MenuKB': 'frame',
'<alt>Tab': 'next_window',
'<alt><shift>Tab': 'previous_window',
'<alt>Escape': 'close_window',
'XF86WebCam': 'open_search',
# the following are intended for emulator users
'<alt><shift>f': 'frame',
'<alt><shift>q': 'quit_emulator',
'XF86Search': 'open_search',
'<alt><shift>o': 'open_search'
}
_instance = None
class KeyHandler(object):
def __init__(self, frame):
self._frame = frame
self._key_pressed = None
self._keycode_pressed = 0
self._keystate_pressed = 0
self._key_grabber = SugarExt.KeyGrabber()
self._key_grabber.connect('key-pressed',
self._key_pressed_cb)
self._key_grabber.connect('key-released',
self._key_released_cb)
self._tabbing_handler = TabbingHandler(self._frame, _TABBING_MODIFIER)
for f in os.listdir(os.path.join(config.ext_path, 'globalkey')):
if f.endswith('.py') and not f.startswith('__'):
module_name = f[:-3]
try:
logging.debug('Loading module %r', module_name)
module = __import__('globalkey.' + module_name, globals(),
locals(), [module_name])
for key in module.BOUND_KEYS:
if key in _actions_table:
raise ValueError('Key %r is already bound' % key)
_actions_table[key] = module
except Exception:
logging.exception('Exception while loading extension:')
self._key_grabber.grab_keys(_actions_table.keys())
def _change_volume(self, step=None, value=None):
if step is not None:
volume = sound.get_volume() + step
elif value is not None:
volume = value
volume = min(max(0, volume), _VOLUME_MAX)
sound.set_volume(volume)
sound.set_muted(volume == 0)
def handle_previous_window(self, event_time):
self._tabbing_handler.previous_activity(event_time)
def handle_next_window(self, event_time):
self._tabbing_handler.next_activity(event_time)
def handle_close_window(self, event_time):
active_activity = shell.get_model().get_active_activity()
if active_activity.is_journal():
return
active_activity.get_window().close()
def handle_zoom_mesh(self, event_time):
shell.get_model().set_zoom_level(ShellModel.ZOOM_MESH, event_time)
def handle_zoom_group(self, event_time):
shell.get_model().set_zoom_level(ShellModel.ZOOM_GROUP, event_time)
def handle_zoom_home(self, event_time):
shell.get_model().set_zoom_level(ShellModel.ZOOM_HOME, event_time)
def handle_zoom_activity(self, event_time):
shell.get_model().set_zoom_level(ShellModel.ZOOM_ACTIVITY, event_time)
def handle_volume_max(self, event_time):
self._change_volume(value=_VOLUME_MAX)
def handle_volume_min(self, event_time):
self._change_volume(value=0)
def handle_volume_mute(self, event_time):
if sound.get_muted() is True:
sound.set_muted(False)
else:
sound.set_muted(True)
def handle_volume_up(self, event_time):
self._change_volume(step=_VOLUME_STEP)
def handle_volume_down(self, event_time):
self._change_volume(step=-_VOLUME_STEP)
def handle_frame(self, event_time):
self._frame.notify_key_press()
def handle_quit_emulator(self, event_time):
session.get_session_manager().shutdown()
def handle_open_search(self, event_time):
journalactivity.get_journal().show_journal()
def _key_pressed_cb(self, grabber, keycode, state, event_time):
key = grabber.get_key(keycode, state)
logging.debug('_key_pressed_cb: %i %i %s', keycode, state, key)
if key is not None:
self._key_pressed = key
self._keycode_pressed = keycode
self._keystate_pressed = state
action = _actions_table[key]
if self._tabbing_handler.is_tabbing():
# Only accept window tabbing events, everything else
# cancels the tabbing operation.
if not action in ['next_window', 'previous_window']:
self._tabbing_handler.stop(event_time)
return True
if hasattr(action, 'handle_key_press'):
action.handle_key_press(key)
elif isinstance(action, basestring):
method = getattr(self, 'handle_' + action)
method(event_time)
else:
raise TypeError('Invalid action %r' % action)
return True
else:
# If this is not a registered key, then cancel tabbing.
if self._tabbing_handler.is_tabbing():
if not grabber.is_modifier(keycode):
self._tabbing_handler.stop(event_time)
return True
return False
def _key_released_cb(self, grabber, keycode, state, event_time):
logging.debug('_key_released_cb: %i %i', keycode, state)
if self._tabbing_handler.is_tabbing():
# We stop tabbing and switch to the new window as soon as the
# modifier key is raised again.
if grabber.is_modifier(keycode, mask=_TABBING_MODIFIER):
self._tabbing_handler.stop(event_time)
return True
return False
def setup(frame):
global _instance
if _instance:
del _instance
_instance = KeyHandler(frame)
| gpl-2.0 | -8,039,884,357,420,951,000 | 33.046512 | 78 | 0.609426 | false |
8l/beri | cheritest/trunk/tests/cp0/test_tne_eq.py | 2 | 1330 | #-
# Copyright (c) 2011 Robert N. M. Watson
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
class test_tne_eq(BaseBERITestCase):
def test_tne_handled(self):
self.assertRegisterEqual(self.MIPS.a2, 0, "tne trapped when equal")
| apache-2.0 | 7,479,388,609,859,914,000 | 40.5625 | 77 | 0.76015 | false |
tboyce1/home-assistant | homeassistant/components/ihc/__init__.py | 2 | 7715 | """IHC component.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/ihc/
"""
import logging
import os.path
import xml.etree.ElementTree
import voluptuous as vol
from homeassistant.components.ihc.const import (
ATTR_IHC_ID, ATTR_VALUE, CONF_INFO, CONF_AUTOSETUP,
CONF_BINARY_SENSOR, CONF_LIGHT, CONF_SENSOR, CONF_SWITCH,
CONF_XPATH, CONF_NODE, CONF_DIMMABLE, CONF_INVERTING,
SERVICE_SET_RUNTIME_VALUE_BOOL, SERVICE_SET_RUNTIME_VALUE_INT,
SERVICE_SET_RUNTIME_VALUE_FLOAT)
from homeassistant.config import load_yaml_config_file
from homeassistant.const import (
CONF_URL, CONF_USERNAME, CONF_PASSWORD, CONF_ID, CONF_NAME,
CONF_UNIT_OF_MEASUREMENT, CONF_TYPE, TEMP_CELSIUS)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
REQUIREMENTS = ['ihcsdk==2.1.1']
DOMAIN = 'ihc'
IHC_DATA = 'ihc'
IHC_CONTROLLER = 'controller'
IHC_INFO = 'info'
AUTO_SETUP_YAML = 'ihc_auto_setup.yaml'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_URL): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_AUTOSETUP, default=True): cv.boolean,
vol.Optional(CONF_INFO, default=True): cv.boolean
}),
}, extra=vol.ALLOW_EXTRA)
AUTO_SETUP_SCHEMA = vol.Schema({
vol.Optional(CONF_BINARY_SENSOR, default=[]):
vol.All(cv.ensure_list, [
vol.All({
vol.Required(CONF_XPATH): cv.string,
vol.Required(CONF_NODE): cv.string,
vol.Optional(CONF_TYPE, default=None): cv.string,
vol.Optional(CONF_INVERTING, default=False): cv.boolean,
})
]),
vol.Optional(CONF_LIGHT, default=[]):
vol.All(cv.ensure_list, [
vol.All({
vol.Required(CONF_XPATH): cv.string,
vol.Required(CONF_NODE): cv.string,
vol.Optional(CONF_DIMMABLE, default=False): cv.boolean,
})
]),
vol.Optional(CONF_SENSOR, default=[]):
vol.All(cv.ensure_list, [
vol.All({
vol.Required(CONF_XPATH): cv.string,
vol.Required(CONF_NODE): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT,
default=TEMP_CELSIUS): cv.string,
})
]),
vol.Optional(CONF_SWITCH, default=[]):
vol.All(cv.ensure_list, [
vol.All({
vol.Required(CONF_XPATH): cv.string,
vol.Required(CONF_NODE): cv.string,
})
]),
})
SET_RUNTIME_VALUE_BOOL_SCHEMA = vol.Schema({
vol.Required(ATTR_IHC_ID): cv.positive_int,
vol.Required(ATTR_VALUE): cv.boolean
})
SET_RUNTIME_VALUE_INT_SCHEMA = vol.Schema({
vol.Required(ATTR_IHC_ID): cv.positive_int,
vol.Required(ATTR_VALUE): int
})
SET_RUNTIME_VALUE_FLOAT_SCHEMA = vol.Schema({
vol.Required(ATTR_IHC_ID): cv.positive_int,
vol.Required(ATTR_VALUE): vol.Coerce(float)
})
_LOGGER = logging.getLogger(__name__)
IHC_PLATFORMS = ('binary_sensor', 'light', 'sensor', 'switch')
def setup(hass, config):
"""Setup the IHC component."""
from ihcsdk.ihccontroller import IHCController
conf = config[DOMAIN]
url = conf[CONF_URL]
username = conf[CONF_USERNAME]
password = conf[CONF_PASSWORD]
ihc_controller = IHCController(url, username, password)
if not ihc_controller.authenticate():
_LOGGER.error("Unable to authenticate on ihc controller.")
return False
if (conf[CONF_AUTOSETUP] and
not autosetup_ihc_products(hass, config, ihc_controller)):
return False
hass.data[IHC_DATA] = {
IHC_CONTROLLER: ihc_controller,
IHC_INFO: conf[CONF_INFO]}
setup_service_functions(hass, ihc_controller)
return True
def autosetup_ihc_products(hass: HomeAssistantType, config, ihc_controller):
"""Auto setup of IHC products from the ihc project file."""
project_xml = ihc_controller.get_project()
if not project_xml:
_LOGGER.error("Unable to read project from ihc controller.")
return False
project = xml.etree.ElementTree.fromstring(project_xml)
# if an auto setup file exist in the configuration it will override
yaml_path = hass.config.path(AUTO_SETUP_YAML)
if not os.path.isfile(yaml_path):
yaml_path = os.path.join(os.path.dirname(__file__), AUTO_SETUP_YAML)
yaml = load_yaml_config_file(yaml_path)
try:
auto_setup_conf = AUTO_SETUP_SCHEMA(yaml)
except vol.Invalid as exception:
_LOGGER.error("Invalid IHC auto setup data: %s", exception)
return False
groups = project.findall('.//group')
for component in IHC_PLATFORMS:
component_setup = auto_setup_conf[component]
discovery_info = get_discovery_info(component_setup, groups)
if discovery_info:
discovery.load_platform(hass, component, DOMAIN, discovery_info,
config)
return True
def get_discovery_info(component_setup, groups):
"""Get discovery info for specified component."""
discovery_data = {}
for group in groups:
groupname = group.attrib['name']
for product_cfg in component_setup:
products = group.findall(product_cfg[CONF_XPATH])
for product in products:
nodes = product.findall(product_cfg[CONF_NODE])
for node in nodes:
if ('setting' in node.attrib
and node.attrib['setting'] == 'yes'):
continue
ihc_id = int(node.attrib['id'].strip('_'), 0)
name = '{}_{}'.format(groupname, ihc_id)
device = {
'ihc_id': ihc_id,
'product': product,
'product_cfg': product_cfg}
discovery_data[name] = device
return discovery_data
def setup_service_functions(hass: HomeAssistantType, ihc_controller):
"""Setup the ihc service functions."""
def set_runtime_value_bool(call):
"""Set a IHC runtime bool value service function."""
ihc_id = call.data[ATTR_IHC_ID]
value = call.data[ATTR_VALUE]
ihc_controller.set_runtime_value_bool(ihc_id, value)
def set_runtime_value_int(call):
"""Set a IHC runtime integer value service function."""
ihc_id = call.data[ATTR_IHC_ID]
value = call.data[ATTR_VALUE]
ihc_controller.set_runtime_value_int(ihc_id, value)
def set_runtime_value_float(call):
"""Set a IHC runtime float value service function."""
ihc_id = call.data[ATTR_IHC_ID]
value = call.data[ATTR_VALUE]
ihc_controller.set_runtime_value_float(ihc_id, value)
hass.services.register(DOMAIN, SERVICE_SET_RUNTIME_VALUE_BOOL,
set_runtime_value_bool,
schema=SET_RUNTIME_VALUE_BOOL_SCHEMA)
hass.services.register(DOMAIN, SERVICE_SET_RUNTIME_VALUE_INT,
set_runtime_value_int,
schema=SET_RUNTIME_VALUE_INT_SCHEMA)
hass.services.register(DOMAIN, SERVICE_SET_RUNTIME_VALUE_FLOAT,
set_runtime_value_float,
schema=SET_RUNTIME_VALUE_FLOAT_SCHEMA)
def validate_name(config):
"""Validate device name."""
if CONF_NAME in config:
return config
ihcid = config[CONF_ID]
name = 'ihc_{}'.format(ihcid)
config[CONF_NAME] = name
return config
| apache-2.0 | -6,294,268,968,842,588,000 | 35.220657 | 76 | 0.615813 | false |
ODM2/ODM2YODAParser | yodatools/dataloader/view/clsDBConfig.py | 2 | 6433 | # -*- coding: utf-8 -*-
###########################################################################
# Python code generated with wxFormBuilder (version Jun 5 2014)
# http://www.wxformbuilder.org/
#
# TODO: (Is this something we should follow?) PLEASE DO 'NOT' EDIT THIS FILE!
###########################################################################
import wx
###########################################################################
# Class clsDBConfiguration
###########################################################################
class clsDBConfiguration(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent, id=wx.ID_ANY,
pos=wx.DefaultPosition,
size=wx.Size(500, 291),
style=wx.SIMPLE_BORDER | wx.TAB_TRAVERSAL)
self.SetMinSize(wx.Size(442, 291))
self.SetMaxSize(wx.Size(627, 291))
formSizer = wx.BoxSizer(wx.VERTICAL)
sbSizer = wx.StaticBoxSizer(wx.StaticBox(self, wx.ID_ANY, 'Database Connection'), wx.VERTICAL) # noqa
connectionSizer = wx.FlexGridSizer(0, 2, 0, 15)
connectionSizer.AddGrowableCol(1)
connectionSizer.SetFlexibleDirection(wx.VERTICAL)
connectionSizer.SetNonFlexibleGrowMode(wx.FLEX_GROWMODE_ALL)
self.stVersion = wx.StaticText(self, wx.ID_ANY, 'DB Version:', wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_RIGHT) # noqa
self.stVersion.Wrap(-1)
connectionSizer.Add(self.stVersion, 0, wx.ALL | wx.ALIGN_RIGHT | wx.EXPAND, 5) # noqa
cbDatabaseType1Choices = ['2.0']#, '1.1.1']
self.cbDatabaseType1 = wx.ComboBox(self, wx.ID_ANY, '2.0', wx.DefaultPosition, wx.DefaultSize, cbDatabaseType1Choices, wx.CB_READONLY ) # noqa
self.cbDatabaseType1.SetSelection(1)
connectionSizer.Add(self.cbDatabaseType1, 1, wx.ALL | wx.EXPAND, 5)
self.stConnType = wx.StaticText(self, wx.ID_ANY, 'Connection Type:', wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_RIGHT) # noqa
self.stConnType.Wrap(-1)
connectionSizer.Add(self.stConnType, 0, wx.ALL | wx.EXPAND | wx.ALIGN_RIGHT, 5) # noqa
cbDatabaseTypeChoices = []
self.cbDatabaseType = wx.ComboBox(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, cbDatabaseTypeChoices, wx.CB_READONLY) # noqa
connectionSizer.Add(self.cbDatabaseType, 1, wx.ALL | wx.EXPAND, 5)
self.stServer = wx.StaticText(self, wx.ID_ANY, 'Server:', wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_RIGHT) # noqa
self.stServer.Wrap(-1)
connectionSizer.Add(self.stServer, 0, wx.ALL | wx.EXPAND | wx.ALIGN_RIGHT, 5) # noqa
self.txtServer = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 | wx.FULL_REPAINT_ON_RESIZE | wx.SIMPLE_BORDER) # noqa
connectionSizer.Add(self.txtServer, 1, wx.ALL | wx.EXPAND, 5)
self.stDBName = wx.StaticText(self, wx.ID_ANY, 'Database:', wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_RIGHT) # noqa
self.stDBName.Wrap(-1)
self.stDBName.SetFont(wx.Font(wx.NORMAL_FONT.GetPointSize(), 70, 90, 90, False, wx.EmptyString)) # noqa
connectionSizer.Add(self.stDBName, 0, wx.ALL | wx.EXPAND | wx.ALIGN_RIGHT, 5) # noqa
self.txtDBName = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 | wx.SIMPLE_BORDER) # noqa
connectionSizer.Add(self.txtDBName, 1, wx.ALL | wx.EXPAND, 5)
self.stUser = wx.StaticText(self, wx.ID_ANY, 'User:', wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_RIGHT) # noqa
self.stUser.Wrap(-1)
self.stUser.SetFont(wx.Font(wx.NORMAL_FONT.GetPointSize(), 70, 90, 90, False, wx.EmptyString)) # noqa
connectionSizer.Add(self.stUser, 0, wx.ALL | wx.EXPAND | wx.ALIGN_RIGHT, 5) # noqa
self.txtUser = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 | wx.SIMPLE_BORDER) # noqa
connectionSizer.Add(self.txtUser, 1, wx.ALL | wx.EXPAND, 5)
self.stPass = wx.StaticText(self, wx.ID_ANY, 'Password:', wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_RIGHT) # noqa
self.stPass.Wrap(-1)
self.stPass.SetFont(wx.Font(wx.NORMAL_FONT.GetPointSize(), 70, 90, 90, False, wx.EmptyString)) # noqa
connectionSizer.Add(self.stPass, 0, wx.ALL | wx.EXPAND | wx.ALIGN_RIGHT, 5) # noqa
self.txtPass = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_PASSWORD | wx.SIMPLE_BORDER) # noqa
connectionSizer.Add(self.txtPass, 1, wx.ALL | wx.EXPAND, 5)
sbSizer.Add(connectionSizer, 90, wx.EXPAND, 3)
formSizer.Add(sbSizer, 1, wx.ALL | wx.EXPAND, 7)
btnSizer = wx.FlexGridSizer(0, 3, 0, 25)
btnSizer.AddGrowableCol(0)
btnSizer.AddGrowableCol(1)
btnSizer.AddGrowableCol(2)
btnSizer.SetFlexibleDirection(wx.VERTICAL)
btnSizer.SetNonFlexibleGrowMode(wx.FLEX_GROWMODE_ALL)
self.btnTest = wx.Button(self, wx.ID_ANY, 'Test Connection', wx.DefaultPosition, wx.DefaultSize, 0) # noqa
btnSizer.Add(self.btnTest, 0, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_CENTER_HORIZONTAL, 5) # noqa
# self.btnSave = wx.Button(self, wx.ID_ANY, 'Save Connection', wx.DefaultPosition, wx.DefaultSize, 0) # noqa
# btnSizer.Add(self.btnSave, 0, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_CENTER_HORIZONTAL, 5) # noqa
# self.btnCancel = wx.Button(self, wx.ID_ANY, 'Cancel', wx.DefaultPosition, wx.DefaultSize, 0) # noqa
# btnSizer.Add(self.btnCancel, 0, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_CENTER_HORIZONTAL, 5) # noqa
formSizer.Add(btnSizer, 10, wx.EXPAND, 2)
self.SetSizer(formSizer)
self.Layout()
# Connect Events.
self.btnTest.Bind(wx.EVT_BUTTON, self.OnBtnTest)
# self.btnSave.Bind(wx.EVT_BUTTON, self.OnBtnSave)
# self.btnCancel.Bind(wx.EVT_BUTTON, self.OnBtnCancel)
self.btnSizer = btnSizer
self.formSizer = formSizer
self.btnTest.SetFocus()
def __del__(self):
pass
# Virtual event handlers, overide them in your derived class.
def OnBtnTest(self, event):
event.Skip()
# def OnBtnSave(self, event):
# event.Skip()
#
# def OnBtnCancel(self, event):
# event.Skip()
| bsd-3-clause | -6,074,380,409,445,029,000 | 47.368421 | 163 | 0.625214 | false |
NicoVarg99/daf-recipes | ckan/ckan/ckanext-dcatapit/ckanext/dcatapit/dcat/harvester.py | 1 | 3892 | import logging
import ckan.plugins as p
from ckanext.dcat.interfaces import IDCATRDFHarvester
from ckanext.dcatapit.dcat.profiles import LOCALISED_DICT_NAME_BASE, LOCALISED_DICT_NAME_RESOURCES
import ckanext.dcatapit.interfaces as interfaces
log = logging.getLogger(__name__)
class DCATAPITHarvesterPlugin(p.SingletonPlugin):
p.implements(IDCATRDFHarvester)
def before_download(self, url, harvest_job):
return url, []
def after_download(self, content, harvest_job):
return content, []
def before_update(self, harvest_object, dataset_dict, temp_dict):
self._before(dataset_dict, temp_dict)
def after_update(self, harvest_object, dataset_dict, temp_dict):
return self._after(dataset_dict, temp_dict)
def before_create(self, harvest_object, dataset_dict, temp_dict):
self._before(dataset_dict, temp_dict)
def after_create(self, harvest_object, dataset_dict, temp_dict):
return self._after(dataset_dict, temp_dict)
def _before(self, dataset_dict, temp_dict):
loc_dict = dataset_dict.pop(LOCALISED_DICT_NAME_BASE, {})
res_dict = dataset_dict.pop(LOCALISED_DICT_NAME_RESOURCES, {})
if loc_dict or res_dict:
temp_dict['dcatapit'] = {
LOCALISED_DICT_NAME_BASE: loc_dict,
LOCALISED_DICT_NAME_RESOURCES: res_dict
}
def _after(self, dataset_dict, temp_dict):
dcatapit_dict = temp_dict.get('dcatapit')
if not dcatapit_dict:
return None
base_dict = dcatapit_dict[LOCALISED_DICT_NAME_BASE]
if base_dict:
pkg_id = dataset_dict['id']
err = self._save_package_multilang(pkg_id, base_dict)
if err:
return err
resources_dict = dcatapit_dict[LOCALISED_DICT_NAME_RESOURCES]
if resources_dict:
err = self._save_resources_multilang(pkg_id, resources_dict)
if err:
return err
##
# Managing Solr indexes for harvested package dict
##
interfaces.update_solr_package_indexes(dataset_dict)
return None
def _save_package_multilang(self, pkg_id, base_dict):
try:
for field, lang_dict in base_dict.iteritems():
for lang, text in lang_dict.iteritems():
interfaces.upsert_package_multilang(pkg_id, field, 'package', lang, text)
except Exception, e:
return str(e)
return None
def _save_resources_multilang(self, pkg_id, resources_dict):
try:
uri_id_mapping = self._get_resource_uri_id_mapping(pkg_id)
for res_uri, res_dict in resources_dict.iteritems():
res_id = uri_id_mapping.get(res_uri, None)
if not res_id:
log.warn("Could not find resource id for URI %s", res_uri)
continue
for field, lang_dict in res_dict.iteritems():
for lang, text in lang_dict.iteritems():
interfaces.upsert_resource_multilang(res_id, field, lang, text)
except Exception, e:
return str(e)
return None
def _get_resource_uri_id_mapping(self, pkg_id):
ret = {}
# log.info("DATASET DICT: %s", dataset_dict)
dataset = p.toolkit.get_action('package_show')({}, {'id': pkg_id})
# log.info("DATASET ----------- %s", dataset)
for resource in dataset.get('resources', []):
res_id = resource.get('id', None)
res_uri = resource.get('uri', None)
if res_id and res_uri:
log.debug('Mapping resource id %s to URI "%s"', res_id, res_uri)
ret[res_uri] = res_id
else:
log.warn("Can't map URI for resource \"%s\"", resource.get('name', '---'))
return ret
| gpl-3.0 | 3,424,962,919,449,245,000 | 34.063063 | 98 | 0.5889 | false |
nfsli926/stock | python/com/nfs/util/dbutil.py | 1 | 21744 | # coding=utf-8
# 2015年9月25创建
__author__ = 'litao'
from sqlalchemy import create_engine
import tushare as ts
import urllib
import urllib2
import re
import sys
import csv
import MySQLdb
import tushare as ts
import datetime
import time
import DateUtil as dateutil
# 导入股票前复权数据
#code:string,股票代码 e.g. 600848
#start:string,开始日期 format:YYYY-MM-DD 为空时取当前日期
#end:string,结束日期 format:YYYY-MM-DD 为空时取去年今日
#autype:string,复权类型,qfq-前复权 hfq-后复权 None-不复权,默认为qfq
#index:Boolean,是否是大盘指数,默认为False
#retry_count : int, 默认3,如遇网络等问题重复执行的次数
#pause : int, 默认 0,重复请求数据过程中暂停的秒数,防止请求间隔时间太短出现的问题
#返回值说明:
#date : 交易日期 (index)
#open : 开盘价
#high : 最高价
#close : 收盘价
#low : 最低价
#volume : 成交量
#amount : 成交金额
def get_qfq_date(code,start,end):
try:
df = ts.get_h_data(code,start,end)
print start+end
if df is None:
print "qfq df is none"
else:
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.insert(0,'code',code)
df.to_sql('stock_qfq_data', engine, if_exists='append')
print code + " qfq success"
except Exception,e:
print e.message
# 导入股票的不复权的历史数据
#code:string,股票代码 e.g. 600848
#start:string,开始日期 format:YYYY-MM-DD 为空时取当前日期
#end:string,结束日期 format:YYYY-MM-DD 为空时取去年今日
#autype:string,复权类型,qfq-前复权 hfq-后复权 None-不复权,默认为qfq
#index:Boolean,是否是大盘指数,默认为False
#retry_count : int, 默认3,如遇网络等问题重复执行的次数
#pause : int, 默认 0,重复请求数据过程中暂停的秒数,防止请求间隔时间太短出现的问题
#返回值说明:
#date : 交易日期 (index)
#open : 开盘价
#high : 最高价
#close : 收盘价
#low : 最低价
#volume : 成交量
#amount : 成交金额
def get_bfq_data(code,startdate,enddate):
try:
print "sdfsdf"
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
print startdate+enddate
df = ts.get_h_data(code,start=startdate,end=enddate,autype='None')
if df is None :
print " day df is none"
else:
df.insert(0,'code',code)
df.to_sql('stock_bfq_data', engine, if_exists='append')
except Exception,e:
e.message
# 获得股票的日分笔数据
def get_day_data(code,startdate,enddate):
try:
df = ts.get_hist_data(code,start=startdate,end=enddate,ktype='D')
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
if df is None:
print " day df is none"
else:
df.insert(0,'code',code)
df.to_sql('stock_day_data', engine, if_exists='append')
except Exception,e:
print e.message
# 获得股票的周分笔数据
def get_week_data(code,startdate,enddate):
try:
df = ts.get_hist_data(code,start=startdate,end=enddate,ktype='W')
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.insert(0,'code',code)
df.to_sql('stock_week_data', engine, if_exists='append')
print code + " week success"
except Exception,e:
print e.message
# 获得股票的月分笔数据
def get_month_data(code,startdate,enddate):
try:
df = ts.get_hist_data(code,start=startdate,end=enddate,ktype='M')
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.insert(0,'code',code)
df.to_sql('stock_month_data', engine, if_exists='append')
print code + " month success"
except Exception,e:
print e.message
# 获得股票的月分笔数据
def get_five_data(code,startdate,enddate):
try:
df = ts.get_hist_data(code,start=startdate,end=enddate,ktype='5')
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.insert(0,'code',code)
df.to_sql('stock_five_data', engine, if_exists='append')
print code + " five success"
except Exception,e:
print e.message
# 获得行业分类
def get_industry_classified():
try:
df = ts.get_industry_classified();
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
# df.insert(0,'code','600848')
df.to_sql('industry_classified', engine, if_exists='append')
except Exception, e:
e.message
#ts.get_hist_data('sh')#获取上证指数k线数据,其它参数与个股一致,下同
#ts.get_hist_data('sz')#获取深圳成指k线数据
#ts.get_hist_data('hs300')#获取沪深300指数k线数据
#ts.get_hist_data('sz50')#获取上证50指数k线数据
#ts.get_hist_data('zxb')#获取中小板指数k线数据
#ts.get_hist_data('cyb')#获取创业板指数k线数据
# 获得概念分类
def get_concept_classified():
try:
df = ts.get_concept_classified();
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
# df.insert(0,'code','600848')
df.to_sql('concept_classified', engine, if_exists='append')
except Exception, e:
e.message
# 获得地域分类
def get_area_classified():
try:
df = ts.get_area_classified();
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
# df.insert(0,'code','600848')
df.to_sql('area_classified', engine, if_exists='append')
except Exception, e:
e.message
# 获得中小板分类
# 获得创业板分类
# 获得风险警示板分类
def get_st_classified():
try:
df = ts.get_st_classified();
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
# df.insert(0,'code','600848')
df.to_sql('st_classified', engine, if_exists='append')
except Exception, e:
e.message
# 沪深300成分及权重
def get_hs300s():
try:
df = ts.get_hs300s();
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
# df.insert(0,'code','600848')
df.to_sql('hs300s', engine, if_exists='append')
except Exception, e:
e.message
# 上证50成分股
def get_sz50s():
try:
df = ts.get_sz50s();
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
# df.insert(0,'code','600848')
df.to_sql('sz50s', engine, if_exists='append')
except Exception, e:
e.message
# 中证500成分股
def get_zz500s():
try:
df = ts.get_zz500s();
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
# df.insert(0,'code','600848')
df.to_sql('zz500s', engine, if_exists='append')
except Exception, e:
e.message
# 获得股票的基本数据--业绩报表
# 获取2014年第3季度的业绩报表数据
# ts.get_report_data(2014,3)
def get_report_data(year, quarter):
try:
df = ts.get_report_data(year, quarter)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('report_data', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获得股票的基本数据--盈利能力
def get_profit_data(year, quarter):
try:
df = ts.get_profit_data(year, quarter)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('profit_data', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获得股票的基本数据--营运能力
def get_operation_data(year, quarter):
try:
df = ts.get_operation_data(year, quarter)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('operation_data', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获得股票的基本数据--成长能力
def get_growth_data(year, quarter):
try:
df = ts.get_growth_data(year, quarter)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('growth_data', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获得股票的基本数据--偿债能力
def get_debtpaying_data(year, quarter):
try:
df = ts.get_debtpaying_data(year, quarter)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('debtpaying_data', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获得股票的基本数据--现金流量
def get_cashflow_data(year, quarter):
try:
df = ts.get_cashflow_data(year, quarter)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('cashflow_data', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获取宏观经济数据 -存款利率
def get_deposit_rate():
try:
df = ts.get_deposit_rate()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('deposit_rate', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获取宏观经济数据 -贷款利率
def get_loan_rate():
try:
df = ts.get_loan_rate()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('loan_rate', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获取宏观经济数据 -存款准备金绿
def get_rrr(year, quarter):
try:
df = ts.get_rrr()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('rrr', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获取宏观经济数据 -货币供应量
def get_money_supply():
try:
df = ts.get_money_supply()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('money_supply', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获取宏观经济数据 -国内生产总值(年度)
def get_gdp_year():
try:
df = ts.get_gdp_year()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('gdp_year', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 获取宏观经济数据 -国内生产总值(季度)
def get_gdp_quarter():
try:
df = ts.get_gdp_quarter()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('gdp_quarter', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 三大需求对gdp贡献
def get_gdp_for():
try:
df = ts.get_gdp_for()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('gdp_for', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 三大需求对gdp拉动
def get_gdp_pull():
try:
df = ts.get_gdp_pull()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('gdp_pull', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 三大产业贡献率
def get_gdp_contrib():
try:
df = ts.get_gdp_contrib()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('gdp_contrib', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 居民价格消费指数
def get_cpi():
try:
df = ts.get_cpi()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('cpi', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 工业品出厂价格指数
def get_ppi():
try:
df = ts.get_ppi()
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('ppi', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 龙虎榜数据
def get_top_list(date):
try:
df = ts.top_list(date)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('top_list', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 每日龙虎榜数据
def cap_tops(days,retry_count,pause):
try:
df = ts.cap_tops(days,retry_count,pause)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('cap_tops', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 个股上榜统计
def cap_tops(days,retry_count,pause):
try:
df = ts.cap_tops(days,retry_count,pause)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('cap_tops', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 营业部上榜统计
def broker_tops(days,retry_count,pause):
try:
df = ts.broker_tops(days,retry_count,pause)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('broker_tops', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 机构席位追踪
def inst_tops(days,retry_count,pause):
try:
df = ts.inst_tops(days,retry_count,pause)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('inst_tops', engine, if_exists='append')
print "message"
except Exception, e:
e.message
# 机构成交明细
def inst_detail(retry_count,pause):
try:
df = ts.inst_detail(retry_count,pause)
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
df.to_sql('inst_detail', engine, if_exists='append')
print "message"
except Exception, e:
e.message
#获得日k线数据中一直股票的最大时间
def get_day_maxdate(stockno):
try:
sql = "select max(date) maxdate from stock_day_data where code='"+stockno+"'"
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
#df.to_sql('inst_detail', engine, if_exists='append')
conn = MySQLdb.connect(host='localhost',user='root',passwd='123456',db='stock')
cursor = conn.cursor()
n = cursor.execute(sql)
maxdate = ''
for r in cursor:
maxdate = r[0]
cursor.close()
conn.close
if maxdate=='':
stockDf =ts.get_stock_basics()
sssj = str(stockDf.ix[stockno]['timeToMarket']) #上市日期YYYYMMDD
return dateutil.convertDate(sssj)
return dateutil.get_next_day(maxdate)
except Exception,e:
print e.message
#获得周线线数据中股票的最大时间
def get_week_maxdate(stockno):
try:
sql = "select max(date) maxdate from stock_week_data where code='"+stockno+"'"
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
#df.to_sql('inst_detail', engine, if_exists='append')
conn = MySQLdb.connect(host='localhost',user='root',passwd='123456',db='stock')
cursor = conn.cursor()
n = cursor.execute(sql)
maxdate = ''
for r in cursor:
maxdate = r[0]
cursor.close()
conn.close
if maxdate=='':
stockDf =ts.get_stock_basics()
sssj = str(stockDf.ix[stockno]['timeToMarket']) #上市日期YYYYMMDD
return dateutil.convertDate(sssj)
return dateutil.get_next_day(maxdate)
except Exception,e:
print e.message
#获得月K线数据中一直股票的最大时间
def get_month_maxdate(stockno):
try:
sql = "select max(date) maxdate from stock_month_data where code='"+stockno+"'"
conn = MySQLdb.connect(host='localhost',user='root',passwd='123456',db='stock')
cursor = conn.cursor()
n = cursor.execute(sql)
maxdate = ''
for r in cursor:
maxdate = r[0]
cursor.close()
conn.close
if maxdate=='':
stockDf =ts.get_stock_basics()
sssj = str(stockDf.ix[stockno]['timeToMarket']) #上市日期YYYYMMDD
return dateutil.convertDate(sssj)
return dateutil.get_next_day(maxdate)
except Exception,e:
print e.message
#获得前复权数据中一直股票的最大时间
def get_qfq_maxdate(stockno):
try:
sql = "select max(date) maxdate from stock_qfq_data where code='"+stockno+"'"
engine = create_engine('mysql://root:[email protected]/stock?charset=utf8')
#df.to_sql('inst_detail', engine, if_exists='append')
conn = MySQLdb.connect(host='localhost',user='root',passwd='123456',db='stock')
cursor = conn.cursor()
n = cursor.execute(sql)
maxdate = ''
for r in cursor:
maxdate = r[0][0:10]
cursor.close()
conn.close
if maxdate=='':
stockDf =ts.get_stock_basics()
sssj = str(stockDf.ix[stockno]['timeToMarket']) #上市日期YYYYMMDD
return dateutil.convertDate(sssj)
return dateutil.get_next_day(maxdate)
except Exception,e:
print e.message
#获得不复权stock_bfq_data表中的一只股票的最大时间
def get_bfq_maxdate(stockno):
try:
sql = "select max(transdate) maxdate from stock_bfq_data where code='"+stockno+"'"
conn = MySQLdb.connect(host='localhost',user='root',passwd='123456',db='stock')
cursor = conn.cursor()
n = cursor.execute(sql)
maxdate = ''
for r in cursor:
maxdate = r[0]
cursor.close()
conn.close
if len(maxdate)>10:
maxdate=maxdate[0:10]
if maxdate=='':
stockDf =ts.get_stock_basics()
sssj = str(stockDf.ix[stockno]['timeToMarket']) #上市日期YYYYMMDD
return dateutil.convertDate(sssj)
return dateutil.get_next_day(maxdate)
except Exception,e:
print e.message
#399001 深圳成指
#399006 创业扳指
#399005 中小板指
#399300 沪深300
#000001
#获得深圳成指的最大时间
def get_szcz_maxdate():
try:
sql = "select max(date) maxdate from stock_index_data where code='399001'"
conn = MySQLdb.connect(host='localhost',user='root',passwd='123456',db='stock')
cursor = conn.cursor()
n = cursor.execute(sql)
maxdate = ''
for r in cursor:
maxdate = r[0]
cursor.close()
conn.close
if len(maxdate)>10:
maxdate=maxdate[0:10]
return dateutil.get_next_day(maxdate)
except Exception,e:
print e.message
#获得创业板指数的最大时间
def get_cybz_maxdate():
try:
sql = "select max(date) maxdate from stock_index_data where code='399006'"
conn = MySQLdb.connect(host='localhost',user='root',passwd='123456',db='stock')
cursor = conn.cursor()
n = cursor.execute(sql)
maxdate = ''
for r in cursor:
maxdate = r[0]
cursor.close()
conn.close
if len(maxdate)>10:
maxdate=maxdate[0:10]
return dateutil.get_next_day(maxdate)
except Exception,e:
print e.message
#获得中小板指数的最大时间
def get_zxbz_maxdate():
try:
sql = "select max(date) maxdate from stock_index_data where code='399005'"
conn = MySQLdb.connect(host='localhost',user='root',passwd='123456',db='stock')
cursor = conn.cursor()
n = cursor.execute(sql)
maxdate = ''
for r in cursor:
maxdate = r[0]
cursor.close()
conn.close
if len(maxdate)>10:
maxdate=maxdate[0:10]
return dateutil.get_next_day(maxdate)
except Exception,e:
print e.message
#获得沪深300指数的最大时间
def get_hs300_maxdate():
try:
sql = "select max(date) maxdate from stock_index_data where code='399300'"
conn = MySQLdb.connect(host='localhost',user='root',passwd='123456',db='stock')
cursor = conn.cursor()
n = cursor.execute(sql)
maxdate = ''
for r in cursor:
maxdate = r[0]
cursor.close()
conn.close
if len(maxdate)>10:
maxdate=maxdate[0:10]
return dateutil.get_next_day(maxdate)
except Exception,e:
print e.message
#获得上证指数的最大时间
def get_szzs_maxdate():
try:
sql = "select max(date) maxdate from stock_index_data where code='000001'"
conn = MySQLdb.connect(host='localhost',user='root',passwd='123456',db='stock')
cursor = conn.cursor()
n = cursor.execute(sql)
maxdate = ''
for r in cursor:
maxdate = r[0]
cursor.close()
conn.close
if len(maxdate)>10:
maxdate=maxdate[0:10]
return dateutil.get_next_day(maxdate)
except Exception,e:
print e.message | apache-2.0 | -292,003,996,196,294,460 | 30.665079 | 90 | 0.607129 | false |
FrancescSala/Instagram-Challenge | solution/shr.py | 1 | 1485 | from PIL import Image
from random import shuffle
import sys
if len(sys.argv) != 4: sys.exit("Usage: python shr.py input_image num_shreds output_image")
# load the input image
img = Image.open(sys.argv[1])
# read the desired number of shreds
numShreds = int(sys.argv[2])
if numShreds < 2: sys.exit("Expected number of shreds to be at least 2")
if img.width % numShreds != 0:
print "The number of shreds must be a submultiple of the width of the image: ", img.width
sys.exit()
# prepare the shred of the image
sequence = range(0, numShreds)
shuffle(sequence)
# check the sequence in order to make sure that there are not contiguous shreds in the sequence
# if there are, just swap them
# in other words, make sure all the shreds in the shredded image will be exactly the same width
for i in range(len(sequence)-1):
# if contiguous shreds, swap them
if sequence[i] == sequence[i+1] - 1:
sequence[i] = sequence[i] + 1
sequence[i+1] = sequence[i+1] - 1
# calculate the width of the shreds
shredWidth = img.width / numShreds
# create the shredded image
shredded = Image.new(img.mode, img.size)
for i, shred_index in enumerate(sequence):
shred_x1, shred_y1 = shredWidth * shred_index, 0
shred_x2, shred_y2 = shred_x1 + shredWidth, img.height
shredded.paste(img.crop((shred_x1, shred_y1, shred_x2, shred_y2)), (shredWidth * i, 0))
# finally, save the shredded image
shredded.save(sys.argv[3])
print "Shredded image saved as: ", sys.argv[3]
| mit | 5,471,137,774,938,985,000 | 32.75 | 95 | 0.708418 | false |
ultrabug/py3status | py3status/modules/vpn_status.py | 1 | 4561 | """
Drop-in replacement for i3status run_watch VPN module.
Expands on the i3status module by displaying the name of the connected vpn
using pydbus. Asynchronously updates on dbus signals unless check_pid is True.
Configuration parameters:
cache_timeout: How often to refresh in seconds when check_pid is True.
(default 10)
check_pid: If True, act just like the default i3status module.
(default False)
format: Format of the output.
(default 'VPN: {name}|VPN: no')
pidfile: Same as i3status pidfile, checked when check_pid is True.
(default '/sys/class/net/vpn0/dev_id')
Format placeholders:
{name} The name and/or status of the VPN.
Color options:
color_bad: VPN connected
color_good: VPN down
Requires:
pydbus: Which further requires PyGi. Check your distribution's repositories.
@author Nathan Smith <nathan AT praisetopia.org>
SAMPLE OUTPUT
{'color': '#00FF00', 'full_text': u'VPN: yes'}
off
{'color': '#FF0000', 'full_text': u'VPN: no'}
"""
from pydbus import SystemBus
from gi.repository import GObject
from threading import Thread
from time import sleep
from pathlib import Path
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 10
check_pid = False
format = "VPN: {name}|VPN: no"
pidfile = "/sys/class/net/vpn0/dev_id"
def post_config_hook(self):
self.thread_started = False
self.active = []
def _start_handler_thread(self):
"""Called once to start the event handler thread."""
# Create handler thread
t = Thread(target=self._start_loop)
t.daemon = True
# Start handler thread
t.start()
self.thread_started = True
def _start_loop(self):
"""Starts main event handler loop, run in handler thread t."""
# Create our main loop, get our bus, and add the signal handler
loop = GObject.MainLoop()
bus = SystemBus()
manager = bus.get(".NetworkManager")
manager.onPropertiesChanged = self._vpn_signal_handler
# Loop forever
loop.run()
def _vpn_signal_handler(self, args):
"""Called on NetworkManager PropertiesChanged signal"""
# Args is a dictionary of changed properties
# We only care about changes in ActiveConnections
active = "ActiveConnections"
# Compare current ActiveConnections to last seen ActiveConnections
if active in args and sorted(self.active) != sorted(args[active]):
self.active = args[active]
self.py3.update()
def _get_vpn_status(self):
"""Returns None if no VPN active, Id if active."""
# Sleep for a bit to let any changes in state finish
sleep(0.3)
# Check if any active connections are a VPN
bus = SystemBus()
ids = []
for name in self.active:
conn = bus.get(".NetworkManager", name)
if conn.Vpn:
ids.append(conn.Id)
# No active VPN
return ids
def _check_pid(self):
"""Returns True if pidfile exists, False otherwise."""
return Path(self.pidfile).is_file()
# Method run by py3status
def vpn_status(self):
"""Returns response dict"""
# Start signal handler thread if it should be running
if not self.check_pid and not self.thread_started:
self._start_handler_thread()
# Set color_bad as default output. Replaced if VPN active.
name = None
color = self.py3.COLOR_BAD
# If we are acting like the default i3status module
if self.check_pid:
if self._check_pid():
name = "yes"
color = self.py3.COLOR_GOOD
# Otherwise, find the VPN name, if it is active
else:
vpn = self._get_vpn_status()
if vpn:
name = ", ".join(vpn)
color = self.py3.COLOR_GOOD
# Format and create the response dict
full_text = self.py3.safe_format(self.format, {"name": name})
response = {
"full_text": full_text,
"color": color,
"cached_until": self.py3.CACHE_FOREVER,
}
# Cache forever unless in check_pid mode
if self.check_pid:
response["cached_until"] = self.py3.time_in(self.cache_timeout)
return response
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| bsd-3-clause | -3,124,280,615,940,087,000 | 29.205298 | 80 | 0.612146 | false |
uncommoncode/uscpy | stabilizer.py | 1 | 1757 | """
Image stabilization code.
"""
import argparse
import cv2
import uscpy.sequence
import uscpy.frame
import uscpy.video
parser = argparse.ArgumentParser(description="Perform image stabalization to a video")
parser.add_argument("input", help="input video path")
parser.add_argument("output", help="output video path")
parser.add_argument("--encoder", default="rawrgb", help="output video encoder. supported formats: %s" % (uscpy.video.FORMAT_TABLE.keys()))
args = parser.parse_args()
if args.encoder not in uscpy.video.FORMAT_TABLE:
raise Exception("Encoding format '%s' not supported.")
vc = cv2.VideoCapture(args.input)
if not vc.isOpened():
raise Exception("Error opening video input '%s'" % args.input)
width = int(vc.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH))
height = int(vc.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT))
fps = vc.get(cv2.cv.CV_CAP_PROP_FPS)
fourcc = int(vc.get(cv2.cv.CV_CAP_PROP_FOURCC))
print("video-input:")
print(" width: %d\n height: %d\n fps: %d" % (width, height, fps))
print("video-output:")
print(" format: %s" % (args.encoder))
encoder_format = uscpy.video.FORMAT_TABLE[args.encoder]
vw = cv2.VideoWriter(args.output, encoder_format, fps, (width, height), True)
if not vw.isOpened():
raise Exception("Error opening video output '%s'" % args.output)
vc_sequence = uscpy.sequence.video_capture(vc)
greyscale_sequence = uscpy.sequence.processor(vc_sequence, uscpy.frame.greyscale)
stable_sequence = uscpy.sequence.phase_stabilize(greyscale_sequence)
frame_count = 0
for frame in stable_sequence:
# save each frame to disk
bgr_frame = cv2.cvtColor(frame, cv2.COLOR_GRAY2BGR)
vw.write(bgr_frame)
if (frame_count % fps) == 0:
print("rendered-frame: %d" % frame_count)
frame_count += 1
| mit | 4,128,069,966,771,620,000 | 33.45098 | 138 | 0.715424 | false |
rishig/zulip | zerver/management/commands/show_unreads.py | 1 | 2690 |
from argparse import ArgumentParser
from typing import Any, Dict, List, Set
from django.core.management.base import CommandError
from zerver.lib.management import ZulipBaseCommand
from zerver.lib.topic_mutes import build_topic_mute_checker
from zerver.models import Recipient, Subscription, UserMessage, UserProfile
def get_unread_messages(user_profile: UserProfile) -> List[Dict[str, Any]]:
user_msgs = UserMessage.objects.filter(
user_profile=user_profile,
message__recipient__type=Recipient.STREAM
).extra(
where=[UserMessage.where_unread()]
).values(
'message_id',
'message__subject',
'message__recipient_id',
'message__recipient__type_id',
).order_by("message_id")
result = [
dict(
message_id=row['message_id'],
topic=row['message__subject'],
stream_id=row['message__recipient__type_id'],
recipient_id=row['message__recipient_id'],
)
for row in list(user_msgs)]
return result
def get_muted_streams(user_profile: UserProfile, stream_ids: Set[int]) -> Set[int]:
rows = Subscription.objects.filter(
user_profile=user_profile,
recipient__type_id__in=stream_ids,
is_muted=True,
).values(
'recipient__type_id'
)
muted_stream_ids = {
row['recipient__type_id']
for row in rows}
return muted_stream_ids
def show_all_unread(user_profile: UserProfile) -> None:
unreads = get_unread_messages(user_profile)
stream_ids = {row['stream_id'] for row in unreads}
muted_stream_ids = get_muted_streams(user_profile, stream_ids)
is_topic_muted = build_topic_mute_checker(user_profile)
for row in unreads:
row['stream_muted'] = row['stream_id'] in muted_stream_ids
row['topic_muted'] = is_topic_muted(row['recipient_id'], row['topic'])
row['before'] = row['message_id'] < user_profile.pointer
for row in unreads:
print(row)
class Command(ZulipBaseCommand):
help = """Show unread counts for a particular user."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('email', metavar='<email>', type=str,
help='email address to spelunk')
self.add_realm_args(parser)
def handle(self, *args: Any, **options: str) -> None:
realm = self.get_realm(options)
email = options['email']
try:
user_profile = self.get_user(email, realm)
except CommandError:
print("e-mail %s doesn't exist in the realm %s, skipping" % (email, realm))
return
show_all_unread(user_profile)
| apache-2.0 | -5,050,300,999,581,726,000 | 31.409639 | 87 | 0.628253 | false |
FFMG/myoddweb.piger | monitor/api/python/Python-3.7.2/Lib/test/test_asyncio/test_buffered_proto.py | 3 | 2289 | import asyncio
import unittest
from test.test_asyncio import functional as func_tests
class ReceiveStuffProto(asyncio.BufferedProtocol):
def __init__(self, cb, con_lost_fut):
self.cb = cb
self.con_lost_fut = con_lost_fut
def get_buffer(self, sizehint):
self.buffer = bytearray(100)
return self.buffer
def buffer_updated(self, nbytes):
self.cb(self.buffer[:nbytes])
def connection_lost(self, exc):
if exc is None:
self.con_lost_fut.set_result(None)
else:
self.con_lost_fut.set_exception(exc)
class BaseTestBufferedProtocol(func_tests.FunctionalTestCaseMixin):
def new_loop(self):
raise NotImplementedError
def test_buffered_proto_create_connection(self):
NOISE = b'12345678+' * 1024
async def client(addr):
data = b''
def on_buf(buf):
nonlocal data
data += buf
if data == NOISE:
tr.write(b'1')
conn_lost_fut = self.loop.create_future()
tr, pr = await self.loop.create_connection(
lambda: ReceiveStuffProto(on_buf, conn_lost_fut), *addr)
await conn_lost_fut
async def on_server_client(reader, writer):
writer.write(NOISE)
await reader.readexactly(1)
writer.close()
await writer.wait_closed()
srv = self.loop.run_until_complete(
asyncio.start_server(
on_server_client, '127.0.0.1', 0))
addr = srv.sockets[0].getsockname()
self.loop.run_until_complete(
asyncio.wait_for(client(addr), 5, loop=self.loop))
srv.close()
self.loop.run_until_complete(srv.wait_closed())
class BufferedProtocolSelectorTests(BaseTestBufferedProtocol,
unittest.TestCase):
def new_loop(self):
return asyncio.SelectorEventLoop()
@unittest.skipUnless(hasattr(asyncio, 'ProactorEventLoop'), 'Windows only')
class BufferedProtocolProactorTests(BaseTestBufferedProtocol,
unittest.TestCase):
def new_loop(self):
return asyncio.ProactorEventLoop()
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 3,146,373,992,116,577,000 | 25.929412 | 75 | 0.590651 | false |
foursquare/pants | tests/python/pants_test/targets/test_python_target.py | 1 | 3354 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from pants.backend.jvm.artifact import Artifact
from pants.backend.jvm.repository import Repository
from pants.backend.python.python_artifact import PythonArtifact
from pants.backend.python.targets.python_library import PythonLibrary
from pants.backend.python.targets.python_target import PythonTarget
from pants.base.exceptions import TargetDefinitionException
from pants.build_graph.resources import Resources
from pants_test.test_base import TestBase
class PythonTargetTest(TestBase):
def test_validation(self):
internal_repo = Repository(url=None, push_db_basedir=None)
# Adding a JVM Artifact as a provides on a PythonTarget doesn't make a lot of sense.
# This test sets up that very scenario, and verifies that pants throws a
# TargetDefinitionException.
with self.assertRaises(TargetDefinitionException):
self.make_target(target_type=PythonTarget,
spec=":one",
provides=Artifact(org='com.twitter', name='one-jar', repo=internal_repo))
spec = "//:test-with-PythonArtifact"
pa = PythonArtifact(name='foo', version='1.0', description='foo')
# This test verifies that adding a 'setup_py' provides to a PythonTarget is okay.
pt_with_artifact = self.make_target(spec=spec,
target_type=PythonTarget,
provides=pa)
self.assertEquals(pt_with_artifact.address.spec, spec)
spec = "//:test-with-none"
# This test verifies that having no provides is okay.
pt_no_artifact = self.make_target(spec=spec,
target_type=PythonTarget,
provides=None)
self.assertEquals(pt_no_artifact.address.spec, spec)
def assert_single_resource_dep(self, target, expected_resource_path, expected_resource_contents):
self.assertEqual(1, len(target.dependencies))
resources_dep = target.dependencies[0]
self.assertIsInstance(resources_dep, Resources)
self.assertEqual(1, len(target.resources))
resources_tgt = target.resources[0]
self.assertIs(resources_dep, resources_tgt)
self.assertEqual([expected_resource_path], resources_tgt.sources_relative_to_buildroot())
resource_rel_path = resources_tgt.sources_relative_to_buildroot()[0]
with open(os.path.join(self.build_root, resource_rel_path)) as fp:
self.assertEqual(expected_resource_contents, fp.read())
return resources_tgt
def test_resource_dependencies(self):
self.create_file('res/data.txt', contents='1/137')
res = self.make_target(spec='res:resources', target_type=Resources, sources=['data.txt'])
lib = self.make_target(spec='test:lib',
target_type=PythonLibrary,
sources=[],
dependencies=[res])
resource_dep = self.assert_single_resource_dep(lib,
expected_resource_path='res/data.txt',
expected_resource_contents='1/137')
self.assertIs(res, resource_dep)
| apache-2.0 | -1,106,594,334,127,231,900 | 45.583333 | 99 | 0.668157 | false |
adykstra/mne-python | mne/decoding/tests/test_search_light.py | 3 | 9819 | # Author: Jean-Remi King, <[email protected]>
#
# License: BSD (3-clause)
import numpy as np
from numpy.testing import assert_array_equal, assert_equal
import pytest
from mne.utils import requires_version
from mne.decoding.search_light import SlidingEstimator, GeneralizingEstimator
from mne.decoding.transformer import Vectorizer
def make_data():
"""Make data."""
n_epochs, n_chan, n_time = 50, 32, 10
X = np.random.rand(n_epochs, n_chan, n_time)
y = np.arange(n_epochs) % 2
for ii in range(n_time):
coef = np.random.randn(n_chan)
X[y == 0, :, ii] += coef
X[y == 1, :, ii] -= coef
return X, y
@requires_version('sklearn', '0.17')
def test_search_light():
"""Test SlidingEstimator."""
from sklearn.linear_model import Ridge, LogisticRegression
from sklearn.pipeline import make_pipeline
from sklearn.metrics import roc_auc_score, make_scorer
with pytest.warns(None): # NumPy module import
from sklearn.ensemble import BaggingClassifier
from sklearn.base import is_classifier
logreg = LogisticRegression(solver='liblinear', multi_class='ovr',
random_state=0)
X, y = make_data()
n_epochs, _, n_time = X.shape
# init
pytest.raises(ValueError, SlidingEstimator, 'foo')
sl = SlidingEstimator(Ridge())
assert (not is_classifier(sl))
sl = SlidingEstimator(LogisticRegression(solver='liblinear'))
assert (is_classifier(sl))
# fit
assert_equal(sl.__repr__()[:18], '<SlidingEstimator(')
sl.fit(X, y)
assert_equal(sl.__repr__()[-28:], ', fitted with 10 estimators>')
pytest.raises(ValueError, sl.fit, X[1:], y)
pytest.raises(ValueError, sl.fit, X[:, :, 0], y)
sl.fit(X, y, sample_weight=np.ones_like(y))
# transforms
pytest.raises(ValueError, sl.predict, X[:, :, :2])
y_pred = sl.predict(X)
assert (y_pred.dtype == int)
assert_array_equal(y_pred.shape, [n_epochs, n_time])
y_proba = sl.predict_proba(X)
assert (y_proba.dtype == float)
assert_array_equal(y_proba.shape, [n_epochs, n_time, 2])
# score
score = sl.score(X, y)
assert_array_equal(score.shape, [n_time])
assert (np.sum(np.abs(score)) != 0)
assert (score.dtype == float)
sl = SlidingEstimator(logreg)
assert_equal(sl.scoring, None)
# Scoring method
for scoring in ['foo', 999]:
sl = SlidingEstimator(logreg, scoring=scoring)
sl.fit(X, y)
pytest.raises((ValueError, TypeError), sl.score, X, y)
# Check sklearn's roc_auc fix: scikit-learn/scikit-learn#6874
# -- 3 class problem
sl = SlidingEstimator(logreg, scoring='roc_auc')
y = np.arange(len(X)) % 3
sl.fit(X, y)
pytest.raises(ValueError, sl.score, X, y)
# -- 2 class problem not in [0, 1]
y = np.arange(len(X)) % 2 + 1
sl.fit(X, y)
score = sl.score(X, y)
assert_array_equal(score, [roc_auc_score(y - 1, _y_pred - 1)
for _y_pred in sl.decision_function(X).T])
y = np.arange(len(X)) % 2
# Cannot pass a metric as a scoring parameter
sl1 = SlidingEstimator(logreg, scoring=roc_auc_score)
sl1.fit(X, y)
pytest.raises(ValueError, sl1.score, X, y)
# Now use string as scoring
sl1 = SlidingEstimator(logreg, scoring='roc_auc')
sl1.fit(X, y)
rng = np.random.RandomState(0)
X = rng.randn(*X.shape) # randomize X to avoid AUCs in [0, 1]
score_sl = sl1.score(X, y)
assert_array_equal(score_sl.shape, [n_time])
assert (score_sl.dtype == float)
# Check that scoring was applied adequately
scoring = make_scorer(roc_auc_score, needs_threshold=True)
score_manual = [scoring(est, x, y) for est, x in zip(
sl1.estimators_, X.transpose(2, 0, 1))]
assert_array_equal(score_manual, score_sl)
# n_jobs
sl = SlidingEstimator(logreg, n_jobs=1, scoring='roc_auc')
score_1job = sl.fit(X, y).score(X, y)
sl.n_jobs = 2
score_njobs = sl.fit(X, y).score(X, y)
assert_array_equal(score_1job, score_njobs)
sl.predict(X)
# n_jobs > n_estimators
sl.fit(X[..., [0]], y)
sl.predict(X[..., [0]])
# pipeline
class _LogRegTransformer(LogisticRegression):
# XXX needs transformer in pipeline to get first proba only
def __init__(self):
super(_LogRegTransformer, self).__init__()
self.multi_class = 'ovr'
self.random_state = 0
self.solver = 'liblinear'
def transform(self, X):
return super(_LogRegTransformer, self).predict_proba(X)[..., 1]
pipe = make_pipeline(SlidingEstimator(_LogRegTransformer()),
logreg)
pipe.fit(X, y)
pipe.predict(X)
# n-dimensional feature space
X = np.random.rand(10, 3, 4, 2)
y = np.arange(10) % 2
y_preds = list()
for n_jobs in [1, 2]:
pipe = SlidingEstimator(
make_pipeline(Vectorizer(), logreg), n_jobs=n_jobs)
y_preds.append(pipe.fit(X, y).predict(X))
features_shape = pipe.estimators_[0].steps[0][1].features_shape_
assert_array_equal(features_shape, [3, 4])
assert_array_equal(y_preds[0], y_preds[1])
# Bagging classifiers
X = np.random.rand(10, 3, 4)
for n_jobs in (1, 2):
pipe = SlidingEstimator(BaggingClassifier(None, 2), n_jobs=n_jobs)
pipe.fit(X, y)
pipe.score(X, y)
assert (isinstance(pipe.estimators_[0], BaggingClassifier))
@requires_version('sklearn', '0.17')
def test_generalization_light():
"""Test GeneralizingEstimator."""
from sklearn.pipeline import make_pipeline
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_auc_score
logreg = LogisticRegression(solver='liblinear', multi_class='ovr',
random_state=0)
X, y = make_data()
n_epochs, _, n_time = X.shape
# fit
gl = GeneralizingEstimator(logreg)
assert_equal(repr(gl)[:23], '<GeneralizingEstimator(')
gl.fit(X, y)
gl.fit(X, y, sample_weight=np.ones_like(y))
assert_equal(gl.__repr__()[-28:], ', fitted with 10 estimators>')
# transforms
y_pred = gl.predict(X)
assert_array_equal(y_pred.shape, [n_epochs, n_time, n_time])
assert (y_pred.dtype == int)
y_proba = gl.predict_proba(X)
assert (y_proba.dtype == float)
assert_array_equal(y_proba.shape, [n_epochs, n_time, n_time, 2])
# transform to different datasize
y_pred = gl.predict(X[:, :, :2])
assert_array_equal(y_pred.shape, [n_epochs, n_time, 2])
# score
score = gl.score(X[:, :, :3], y)
assert_array_equal(score.shape, [n_time, 3])
assert (np.sum(np.abs(score)) != 0)
assert (score.dtype == float)
gl = GeneralizingEstimator(logreg, scoring='roc_auc')
gl.fit(X, y)
score = gl.score(X, y)
auc = roc_auc_score(y, gl.estimators_[0].predict_proba(X[..., 0])[..., 1])
assert_equal(score[0, 0], auc)
for scoring in ['foo', 999]:
gl = GeneralizingEstimator(logreg, scoring=scoring)
gl.fit(X, y)
pytest.raises((ValueError, TypeError), gl.score, X, y)
# Check sklearn's roc_auc fix: scikit-learn/scikit-learn#6874
# -- 3 class problem
gl = GeneralizingEstimator(logreg, scoring='roc_auc')
y = np.arange(len(X)) % 3
gl.fit(X, y)
pytest.raises(ValueError, gl.score, X, y)
# -- 2 class problem not in [0, 1]
y = np.arange(len(X)) % 2 + 1
gl.fit(X, y)
score = gl.score(X, y)
manual_score = [[roc_auc_score(y - 1, _y_pred) for _y_pred in _y_preds]
for _y_preds in gl.decision_function(X).transpose(1, 2, 0)]
assert_array_equal(score, manual_score)
# n_jobs
gl = GeneralizingEstimator(logreg, n_jobs=2)
gl.fit(X, y)
y_pred = gl.predict(X)
assert_array_equal(y_pred.shape, [n_epochs, n_time, n_time])
score = gl.score(X, y)
assert_array_equal(score.shape, [n_time, n_time])
# n_jobs > n_estimators
gl.fit(X[..., [0]], y)
gl.predict(X[..., [0]])
# n-dimensional feature space
X = np.random.rand(10, 3, 4, 2)
y = np.arange(10) % 2
y_preds = list()
for n_jobs in [1, 2]:
pipe = GeneralizingEstimator(
make_pipeline(Vectorizer(), logreg), n_jobs=n_jobs)
y_preds.append(pipe.fit(X, y).predict(X))
features_shape = pipe.estimators_[0].steps[0][1].features_shape_
assert_array_equal(features_shape, [3, 4])
assert_array_equal(y_preds[0], y_preds[1])
@requires_version('sklearn', '0.19') # 0.18 does not raise when it should
def test_cross_val_predict():
"""Test cross_val_predict with predict_proba."""
from sklearn.linear_model import LinearRegression
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.base import BaseEstimator, clone
from sklearn.model_selection import cross_val_predict
rng = np.random.RandomState(42)
X = rng.randn(10, 1, 3)
y = rng.randint(0, 2, 10)
estimator = SlidingEstimator(LinearRegression())
cross_val_predict(estimator, X, y, cv=2)
class Classifier(BaseEstimator):
"""Moch class that does not have classes_ attribute."""
def __init__(self):
self.base_estimator = LinearDiscriminantAnalysis()
def fit(self, X, y):
self.estimator_ = clone(self.base_estimator).fit(X, y)
return self
def predict_proba(self, X):
return self.estimator_.predict_proba(X)
with pytest.raises(AttributeError, match="classes_ attribute"):
estimator = SlidingEstimator(Classifier())
cross_val_predict(estimator, X, y, method='predict_proba', cv=2)
estimator = SlidingEstimator(LinearDiscriminantAnalysis())
cross_val_predict(estimator, X, y, method='predict_proba', cv=2)
| bsd-3-clause | -2,644,951,725,918,423,000 | 33.696113 | 79 | 0.618291 | false |
vicnet/weboob | weboob/tools/pdf.py | 1 | 16156 | # -*- coding: utf-8 -*-
# Copyright(C) 2014 Oleg Plakhotniuk
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from collections import namedtuple
import logging
import os
import subprocess
from tempfile import mkstemp
from .compat import range
__all__ = ['decompress_pdf', 'get_pdf_rows']
def decompress_pdf(inpdf):
"""
Takes PDF file contents as a string and returns decompressed version
of the file contents, suitable for text parsing.
External dependencies:
MuPDF (http://www.mupdf.com).
"""
inh, inname = mkstemp(suffix='.pdf')
outh, outname = mkstemp(suffix='.pdf')
os.write(inh, inpdf)
os.close(inh)
os.close(outh)
subprocess.call(['mutool', 'clean', '-d', inname, outname])
with open(outname, 'rb') as f:
outpdf = f.read()
os.remove(inname)
os.remove(outname)
return outpdf
Rect = namedtuple('Rect', ('x0', 'y0', 'x1', 'y1'))
TextRect = namedtuple('TextRect', ('x0', 'y0', 'x1', 'y1', 'text'))
def almost_eq(a, b):
return abs(a - b) < 2
def lt_to_coords(obj, ltpage):
# in a pdf, 'y' coords are bottom-to-top
# in a pdf, coordinates are very often almost equal but not strictly equal
x0 = (min(obj.x0, obj.x1))
y0 = (min(ltpage.y1 - obj.y0, ltpage.y1 - obj.y1))
x1 = (max(obj.x0, obj.x1))
y1 = (max(ltpage.y1 - obj.y0, ltpage.y1 - obj.y1))
x0 = round(x0)
y0 = round(y0)
x1 = round(x1)
y1 = round(y1)
# in a pdf, straight lines are actually rects, make them as thin as possible
if almost_eq(x1, x0):
x1 = x0
if almost_eq(y1, y0):
y1 = y0
return Rect(x0, y0, x1, y1)
def lttext_to_multilines(obj, ltpage):
# text lines within 'obj' are probably the same height
x0 = (min(obj.x0, obj.x1))
y0 = (min(ltpage.y1 - obj.y0, ltpage.y1 - obj.y1))
x1 = (max(obj.x0, obj.x1))
y1 = (max(ltpage.y1 - obj.y0, ltpage.y1 - obj.y1))
lines = obj.get_text().rstrip('\n').split('\n')
h = (y1 - y0) / len(lines)
for n, line in enumerate(lines):
yield TextRect((x0), (y0 + n * h), (x1), (y0 + n * h + h), line)
# fuzzy floats to smooth comparisons because lines are actually rects
# and seemingly-contiguous lines are actually not contiguous
class ApproxFloat(float):
def __eq__(self, other):
return almost_eq(self, other)
def __ne__(self, other):
return not self == other
def __lt__(self, other):
return self - other < 0 and self != other
def __le__(self, other):
return self - other <= 0 or self == other
def __gt__(self, other):
return not self <= other
def __ge__(self, other):
return not self < other
ANGLE_VERTICAL = 0
ANGLE_HORIZONTAL = 1
ANGLE_OTHER = 2
def angle(r):
if r.x0 == r.x1:
return ANGLE_VERTICAL
elif r.y0 == r.y1:
return ANGLE_HORIZONTAL
return ANGLE_OTHER
class ApproxVecDict(dict):
# since coords are never strictly equal, search coords around
# store vectors and points
def __getitem__(self, coords):
x, y = coords
for i in (0, -1, 1):
for j in (0, -1, 1):
try:
return super(ApproxVecDict, self).__getitem__((x+i, y+j))
except KeyError:
pass
raise KeyError()
def get(self, k, v=None):
try:
return self[k]
except KeyError:
return v
class ApproxRectDict(dict):
# like ApproxVecDict, but store rects
def __getitem__(self, coords):
x0, y0, x1, y1 = coords
for i in (0, -1, 1):
for j in (0, -1, 1):
if x0 == x1:
for j2 in (0, -1, 1):
try:
return super(ApproxRectDict, self).__getitem__((x0+i, y0+j, x0+i, y1+j2))
except KeyError:
pass
elif y0 == y1:
for i2 in (0, -1, 1):
try:
return super(ApproxRectDict, self).__getitem__((x0+i, y0+j, x1+i2, y0+j))
except KeyError:
pass
else:
return super(ApproxRectDict, self).__getitem__((x0, y0, x1, y1))
raise KeyError()
def uniq_lines(lines):
new = ApproxRectDict()
for line in lines:
line = tuple(line)
try:
new[line]
except KeyError:
new[line] = None
return [Rect(*k) for k in new.keys()]
def build_rows(lines):
points = ApproxVecDict()
# for each top-left point, build tuple with lines going down and lines going right
for line in lines:
a = angle(line)
if a not in (ANGLE_HORIZONTAL, ANGLE_VERTICAL):
continue
coord = (line.x0, line.y0)
plines = points.get(coord)
if plines is None:
plines = points[coord] = tuple([] for _ in range(2))
plines[a].append(line)
boxes = ApproxVecDict()
for plines in points.values():
if not (plines[ANGLE_HORIZONTAL] and plines[ANGLE_VERTICAL]):
continue
plines[ANGLE_HORIZONTAL].sort(key=lambda l: (l.y0, l.x1))
plines[ANGLE_VERTICAL].sort(key=lambda l: (l.x0, l.y1))
for hline in plines[ANGLE_HORIZONTAL]:
try:
vparallels = points[hline.x1, hline.y0][ANGLE_VERTICAL]
except KeyError:
continue
if not vparallels:
continue
for vline in plines[ANGLE_VERTICAL]:
try:
hparallels = points[vline.x0, vline.y1][ANGLE_HORIZONTAL]
except KeyError:
continue
if not hparallels:
continue
hparallels = [hpar for hpar in hparallels if almost_eq(hpar.x1, hline.x1)]
if not hparallels:
continue
vparallels = [vpar for vpar in vparallels if almost_eq(vpar.y1, vline.y1)]
if not vparallels:
continue
assert len(hparallels) == 1 and len(vparallels) == 1
assert almost_eq(hparallels[0].y0, vparallels[0].y1)
assert almost_eq(vparallels[0].x0, hparallels[0].x1)
box = Rect(hline.x0, hline.y0, hline.x1, vline.y1)
boxes.setdefault((vline.y0, vline.y1), []).append(box)
rows = list(boxes.values())
new_rows = []
for row in rows:
row.sort(key=lambda box: box.x0)
if row:
row = [row[0]] + [c for n, c in enumerate(row[1:], 1) if row[n-1].x0 != c.x0]
new_rows.append(row)
rows = new_rows
rows.sort(key=lambda row: row[0].y0)
return rows
def find_in_table(rows, rect):
for j, row in enumerate(rows):
if ApproxFloat(row[0].y0) > rect.y1:
break
if not (ApproxFloat(row[0].y0) <= rect.y0 and ApproxFloat(row[0].y1) >= rect.y1):
continue
for i, box in enumerate(row):
if ApproxFloat(box.x0) <= rect.x0 and ApproxFloat(box.x1) >= rect.x1:
return i, j
def arrange_texts_in_rows(rows, trects):
table = [[[] for _ in row] for row in rows]
for trect in trects:
pos = find_in_table(rows, trect)
if not pos:
continue
table[pos[1]][pos[0]].append(trect.text)
return table
LOGGER = logging.getLogger('pdf')
DEBUGFILES = logging.DEBUG - 1
def get_pdf_rows(data, miner_layout=True):
"""
Takes PDF file content as string and yield table row data for each page.
For each page in the PDF, the function yields a list of rows.
Each row is a list of cells. Each cell is a list of strings present in the cell.
Note that the rows may belong to different tables.
There are no logic tables in PDF format, so this parses PDF drawing instructions
and tries to find rectangles and arrange them in rows, then arrange text in
the rectangles.
External dependencies:
PDFMiner (http://www.unixuser.org/~euske/python/pdfminer/index.html).
"""
try:
from pdfminer.pdfparser import PDFParser, PDFSyntaxError
except ImportError:
raise ImportError('Please install python-pdfminer')
try:
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdfpage import PDFPage
newapi = True
except ImportError:
from pdfminer.pdfparser import PDFDocument
newapi = False
from pdfminer.converter import PDFPageAggregator
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.layout import LAParams, LTRect, LTTextBox, LTTextLine, LTLine, LTChar, LTCurve
parser = PDFParser(BytesIO(data))
try:
if newapi:
doc = PDFDocument(parser)
else:
doc = PDFDocument()
parser.set_document(doc)
doc.set_parser(parser)
except PDFSyntaxError:
return
rsrcmgr = PDFResourceManager()
if miner_layout:
device = PDFPageAggregator(rsrcmgr, laparams=LAParams())
else:
device = PDFPageAggregator(rsrcmgr)
interpreter = PDFPageInterpreter(rsrcmgr, device)
if newapi:
pages = PDFPage.get_pages(BytesIO(data), check_extractable=True)
else:
doc.initialize()
pages = doc.get_pages()
if LOGGER.isEnabledFor(DEBUGFILES):
import tempfile
import PIL.Image as Image
import PIL.ImageDraw as ImageDraw
import random
path = tempfile.mkdtemp(prefix='pdf')
for npage, page in enumerate(pages):
LOGGER.debug('processing page %s', npage)
interpreter.process_page(page)
page_layout = device.get_result()
texts = sum([list(lttext_to_multilines(obj, page_layout)) for obj in page_layout._objs if isinstance(obj, (LTTextBox, LTTextLine, LTChar))], [])
LOGGER.debug('found %d text objects', len(texts))
if LOGGER.isEnabledFor(DEBUGFILES):
img = Image.new('RGB', (int(page.mediabox[2]), int(page.mediabox[3])), (255, 255, 255))
draw = ImageDraw.Draw(img)
for t in texts:
color = (random.randint(127, 255), random.randint(127, 255), random.randint(127, 255))
draw.rectangle((t.x0, t.y0, t.x1, t.y1), outline=color)
draw.text((t.x0, t.y0), t.text.encode('utf-8'), color)
fpath = '%s/1text-%03d.png' % (path, npage)
img.save(fpath)
LOGGER.log(DEBUGFILES, 'saved %r', fpath)
if not miner_layout:
texts.sort(key=lambda t: (t.y0, t.x0))
# TODO filter ltcurves that are not lines?
# TODO convert rects to 4 lines?
lines = [lt_to_coords(obj, page_layout) for obj in page_layout._objs if isinstance(obj, (LTRect, LTLine, LTCurve))]
LOGGER.debug('found %d lines', len(lines))
if LOGGER.isEnabledFor(DEBUGFILES):
img = Image.new('RGB', (int(page.mediabox[2]), int(page.mediabox[3])), (255, 255, 255))
draw = ImageDraw.Draw(img)
for l in lines:
color = (random.randint(127, 255), random.randint(127, 255), random.randint(127, 255))
draw.rectangle((l.x0, l.y0, l.x1, l.y1), outline=color)
fpath = '%s/2lines-%03d.png' % (path, npage)
img.save(fpath)
LOGGER.log(DEBUGFILES, 'saved %r', fpath)
lines = list(uniq_lines(lines))
LOGGER.debug('found %d unique lines', len(lines))
rows = build_rows(lines)
LOGGER.debug('built %d rows (%d boxes)', len(rows), sum(len(row) for row in rows))
if LOGGER.isEnabledFor(DEBUGFILES):
img = Image.new('RGB', (int(page.mediabox[2]), int(page.mediabox[3])), (255, 255, 255))
draw = ImageDraw.Draw(img)
for r in rows:
for b in r:
color = (random.randint(127, 255), random.randint(127, 255), random.randint(127, 255))
draw.rectangle((b.x0 + 1, b.y0 + 1, b.x1 - 1, b.y1 - 1), outline=color)
fpath = '%s/3rows-%03d.png' % (path, npage)
img.save(fpath)
LOGGER.log(DEBUGFILES, 'saved %r', fpath)
textrows = arrange_texts_in_rows(rows, texts)
LOGGER.debug('assigned %d strings', sum(sum(len(c) for c in r) for r in textrows))
if LOGGER.isEnabledFor(DEBUGFILES):
img = Image.new('RGB', (int(page.mediabox[2]), int(page.mediabox[3])), (255, 255, 255))
draw = ImageDraw.Draw(img)
for row, trow in zip(rows, textrows):
for b, tlines in zip(row, trow):
color = (random.randint(127, 255), random.randint(127, 255), random.randint(127, 255))
draw.rectangle((b.x0 + 1, b.y0 + 1, b.x1 - 1, b.y1 - 1), outline=color)
draw.text((b.x0 + 1, b.y0 + 1), '\n'.join(tlines).encode('utf-8'), color)
fpath = '%s/4cells-%03d.png' % (path, npage)
img.save(fpath)
LOGGER.log(DEBUGFILES, 'saved %r', fpath)
yield textrows
device.close()
# Export part #
def html_to_pdf(browser, url=None, data=None, extra_options=None):
"""
Convert html to PDF.
:param browser: browser instance
:param url: link to the html ressource
:param data: HTML content
:return: the document converted in PDF
:rtype: bytes
"""
try:
import pdfkit # https://pypi.python.org/pypi/pdfkit
except ImportError:
raise ImportError('Please install python-pdfkit')
assert (url or data) and not (url and data), 'Please give only url or data parameter'
callback = pdfkit.from_url if url else pdfkit.from_string
options = {}
try:
cookies = browser.session.cookies
except AttributeError:
pass
else:
options.update({
'cookie': [(cookie, value) for cookie, value in cookies.items() if value], # cookies of browser
})
if extra_options:
options.update(extra_options)
return callback(url or data, False, options=options)
# extract all text from PDF
def extract_text(data):
try:
try:
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdfpage import PDFPage
newapi = True
except ImportError:
from pdfminer.pdfparser import PDFDocument
newapi = False
from pdfminer.pdfparser import PDFParser, PDFSyntaxError
from pdfminer.converter import TextConverter
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
except ImportError:
raise ImportError('Please install python-pdfminer to parse PDF')
else:
parser = PDFParser(BytesIO(data))
try:
if newapi:
doc = PDFDocument(parser)
else:
doc = PDFDocument()
parser.set_document(doc)
doc.set_parser(parser)
except PDFSyntaxError:
return
rsrcmgr = PDFResourceManager()
out = BytesIO()
device = TextConverter(rsrcmgr, out)
interpreter = PDFPageInterpreter(rsrcmgr, device)
if newapi:
pages = PDFPage.create_pages(doc)
else:
doc.initialize()
pages = doc.get_pages()
for page in pages:
interpreter.process_page(page)
return out.getvalue()
| lgpl-3.0 | -273,186,497,165,296,580 | 31.441767 | 152 | 0.581703 | false |
istvanzk/RasPiConnectServer | ExecuteFiles/ExecuteServerStatus.py | 1 | 1371 | #!/usr/local/bin/python3
# Filename: ExecuteServerStatus.py
# Version 2.7 07/29/13 RV MiloCreek
# Version 3.0 04.04.2016 IzK (Python3.4+)
import Config
import subprocess
import xml.etree.ElementTree as ET
import Validate
import BuildResponse
import time
if (Config.i2c_demo()):
from pyblinkm import BlinkM, Scripts
def Execute_Server_Status(root):
# find the interface object type
objectServerID = root.find("./OBJECTSERVERID").text
objectFlags = root.find("./OBJECTFLAGS").text
validate = Validate.checkForValidate(root)
if (Config.debug()):
print("VALIDATE=%s" % validate)
outgoingXMLData = BuildResponse.buildHeader(root)
if (Config.debug()):
print("objectServerID = %s" % objectServerID)
# we have the objectServerID so now we can choose the correct
# program
if (objectServerID == "SS-1"):
#check for validate request
if (validate == "YES"):
outgoingXMLData += Validate.buildValidateResponse("YES")
outgoingXMLData += BuildResponse.buildFooter()
return outgoingXMLData
responseData = "2"
outgoingXMLData += BuildResponse.buildResponse(responseData)
else:
# invalid RaspiConnect Code
outgoingXMLData += Validate.buildValidateResponse("NO")
outgoingXMLData += BuildResponse.buildFooter()
if (Config.debug()):
print(outgoingXMLData)
return outgoingXMLData
# End of ExecuteServerStatus.py
| gpl-3.0 | -6,192,896,473,730,953,000 | 17.28 | 62 | 0.736689 | false |
berkmancenter/mediacloud | apps/extract-and-vector/tests/python/extract_and_vector/dbi/downloads/extract/test_process_download_for_extractor.py | 1 | 1308 | from extract_and_vector.dbi.downloads.extract import process_download_for_extractor
from .setup_test_extract import TestExtractDB
class TestProcessDownloadForExtractor(TestExtractDB):
def test_process_download_for_extractor(self):
# Make sure nothing's extracted yet and download text is not to be found
assert len(self.db.select(
table='story_sentences',
what_to_select='*',
condition_hash={'stories_id': self.test_download['stories_id']},
).hashes()) == 0
assert len(self.db.select(
table='download_texts',
what_to_select='*',
condition_hash={'downloads_id': self.test_download['downloads_id']},
).hashes()) == 0
process_download_for_extractor(db=self.db, download=self.test_download)
# We expect the download to be extracted and the story to be processed
assert len(self.db.select(
table='story_sentences',
what_to_select='*',
condition_hash={'stories_id': self.test_download['stories_id']},
).hashes()) > 0
assert len(self.db.select(
table='download_texts',
what_to_select='*',
condition_hash={'downloads_id': self.test_download['downloads_id']},
).hashes()) > 0
| agpl-3.0 | 7,631,249,378,914,834,000 | 39.875 | 83 | 0.611621 | false |
linyc74/WinduVision | threads/process_thread.py | 1 | 9447 | import numpy as np
import cv2, time, sys, threading, json
from constants import *
from abstract_thread import *
from stereo import Stereo as stereo
class ProcessThread(AbstractThread):
def __init__(self, cap_thread_R, cap_thread_L, mediator):
super(ProcessThread, self).__init__()
self.cap_thread_R = cap_thread_R
self.cap_thread_L = cap_thread_L
self.mediator = mediator
self.__init__parms()
self.set_fps(30.0)
self.connect_signals(mediator, ['display_image', 'set_info_text'])
def __init__parms(self):
# Parameters for image processing
self.offset_x, self.offset_y = 0, 0
self.zoom = 1.0
with open('parameters/gui.json', 'r') as fh:
gui_parms = json.loads(fh.read())
w = gui_parms['default_width']
h = gui_parms['default_height']
self.set_display_size(w, h)
self.set_resize_matrix()
# Parameters for stereo depth map
self.ndisparities = 32 # Must be divisible by 16
self.SADWindowSize = 31 # Must be odd, be within 5..255 and be not larger than image width or height
# Parameters for control and timing
self.computingDepth = False
self.t_series = [time.time() for i in range(30)]
def set_display_size(self, width, height):
'''
Define the dimension of self.img_display, which is the terminal image to be displayed in the GUI.
'''
self.display_width = width
self.display_height = height
# Define the dimensions of:
# self.imgR_proc --- processed R image to be accessed externally
# self.imgL_proc --- L image
# self.img_display --- display image to be emitted to the GUI object
rows, cols = height, width
self.imgR_proc = np.zeros((rows, cols/2, 3), np.uint8)
self.imgL_proc = np.zeros((rows, cols/2, 3), np.uint8)
self.img_display = np.zeros((rows, cols , 3), np.uint8)
def set_resize_matrix(self):
'''
Define the transformation matrix for the image processing pipeline.
'''
img = self.cap_thread_R.get_image()
img_height, img_width, _ = img.shape
display_height, display_width = self.display_height, self.display_width
# The height-to-width ratio
ratio_img = float(img_height) / img_width
ratio_display = float(display_height) / (display_width / 2)
# The base scale factor is the ratio of display size / image size,
# which scales the image to the size of the display.
if ratio_img > ratio_display:
base_scale = float(display_height) / img_height # Height is the limiting factor
else:
base_scale = float(display_width/2) / img_width # Width is the limiting factor
# The actual scale factor is the product of the base scale factor and the zoom factor.
scale_x = base_scale * self.zoom
scale_y = base_scale * self.zoom
# The translation distance for centering
# = half of the difference between
# the screen size and the zoomed image size
# ( ( display size ) - ( zoomed image size ) ) / 2
tx = ( (display_width / 2) - (img_width * scale_x) ) / 2
ty = ( (display_height ) - (img_height * scale_y) ) / 2
# Putting everything together into a matrix
Sx = scale_x
Sy = scale_y
Off_x = self.offset_x
Off_y = self.offset_y
# For the right image, it's only scaling and centering
self.resize_matrix_R = np.float32([ [Sx, 0 , tx] ,
[0 , Sy, ty] ])
# For the left image, in addition to scaling and centering, the offset is also applied.
self.resize_matrix_L = np.float32([ [Sx, 0 , Sx*Off_x + tx] ,
[0 , Sy, Sy*Off_y + ty] ])
def main(self):
'''
There are three major steps for the image processing pipeline,
with some additional steps in between.
( ) Check image dimensions.
(1) Eliminate offset of the left image.
(2) Resize and translate to place each image at the center of both sides of the view.
( ) Compute depth map (optional).
(3) Combine images.
'''
# Get the images from self.capture_thread
self.imgR_0 = self.cap_thread_R.get_image() # The suffix '_0' means raw input image
self.imgL_0 = self.cap_thread_L.get_image()
# Quick check on the image dimensions
# If not matching, skip all following steps
if not self.imgR_0.shape == self.imgL_0.shape:
self.mediator.emit_signal( signal_name = 'set_info_text',
arg = 'Image dimensions not identical.' )
time.sleep(0.1)
return
# (1) Eliminate offset of the left image.
# (2) Resize and translate to place each image at the center of both sides of the view.
rows, cols = self.display_height, self.display_width / 2 # Output image dimension
self.imgR_1 = cv2.warpAffine(self.imgR_0, self.resize_matrix_R, (cols, rows))
self.imgL_1 = cv2.warpAffine(self.imgL_0, self.resize_matrix_L, (cols, rows))
# Update processed images for external access
self.imgR_proc[:,:,:] = self.imgR_1[:,:,:]
self.imgL_proc[:,:,:] = self.imgL_1[:,:,:]
# Compute stereo depth map (optional)
if self.computingDepth:
self.imgL_1 = self.compute_depth()
# (3) Combine images.
h, w = self.display_height, self.display_width
self.img_display[:, 0:(w/2), :] = self.imgL_1
self.img_display[:, (w/2):w, :] = self.imgR_1
self.mediator.emit_signal( signal_name = 'display_image',
arg = self.img_display )
self.emit_fps_info()
def compute_depth(self):
imgL = stereo.compute_depth(self.imgR_1, self.imgL_1, self.ndisparities, self.SADWindowSize)
return imgL
def emit_fps_info(self):
'''
Emits real-time frame-rate info to the gui
'''
# Shift time series by one
self.t_series[1:] = self.t_series[:-1]
# Get the current time -> First in the series
self.t_series[0] = time.time()
# Calculate frame rate
rate = len(self.t_series) / (self.t_series[0] - self.t_series[-1])
data = {'line': 3,
'text': 'Active process thread: {} fps'.format(rate)}
self.mediator.emit_signal( signal_name = 'set_info_text',
arg = data )
# Below are public methods for higher-level objects
def set_offset(self, offset_x, offset_y):
x_limit, y_limit = 100, 100
if abs(offset_x) > x_limit or abs(offset_y) > y_limit:
self.offset_x, self.offset_y = 0, 0
else:
self.offset_x, self.offset_y = offset_x, offset_y
self.set_resize_matrix()
def detect_offset(self):
'''
1) Read right and left images from the cameras.
2) Use correlation function to calculate the offset.
'''
imgR = self.cap_thread_R.get_image()
imgL = self.cap_thread_L.get_image()
imgR = cv2.cvtColor(imgR, cv2.COLOR_BGR2GRAY)
imgL = cv2.cvtColor(imgL, cv2.COLOR_BGR2GRAY)
if not imgR.shape == imgL.shape:
return
# Define ROI of the left image
row, col = imgL.shape
a = int(row*0.25)
b = int(row*0.75)
c = int(col*0.25)
d = int(col*0.75)
roiL = np.float32( imgL[a:b, c:d] )
mat = cv2.matchTemplate(image = np.float32(imgR) ,
templ = roiL ,
method = cv2.TM_CCORR_NORMED)
# Vertical alignment, should always be done
y_max = cv2.minMaxLoc(mat)[3][1]
offset_y = y_max - row / 4
# Horizontal alignment, for infinitely far objects
x_max = cv2.minMaxLoc(mat)[3][0]
offset_x = x_max - col / 4
return offset_x, offset_y
def zoom_in(self):
if self.zoom * 1.01 < 2.0:
self.zoom = self.zoom * 1.01
self.set_resize_matrix()
def zoom_out(self):
if self.zoom / 1.01 > 0.5:
self.zoom = self.zoom / 1.01
self.set_resize_matrix()
def apply_depth_parameters(self, parameters):
"""
Args:
parameters: a dictionary with
key: str, parameter name
value: int, parameter value
"""
for key, value in parameters.items():
setattr(self, key, value)
def change_display_size(self, width, height):
self.pause()
self.set_display_size(width, height)
self.set_resize_matrix()
self.resume()
def get_processed_images(self):
return self.imgR_proc, self.imgL_proc
def get_display_image(self):
return self.img_display
def set_cap_threads(self, thread_R, thread_L):
self.pause()
self.cap_thread_R = thread_R
self.cap_thread_L = thread_L
# The input image dimension could be different after switching camera
# So reset resize matrix
self.set_resize_matrix()
self.resume()
| mit | -2,138,509,404,374,302,500 | 32.147368 | 108 | 0.565471 | false |
pibroch/ocfs2-test | programs/write_torture/write_torture.py | 1 | 4399 | #!/usr/bin/env python
#
#
# Copyright (C) 2006 Oracle. All rights reserved.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a c.of the GNU General Public
# License along with this program; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 021110-1307, USA.
#
# XXX: Future improvements:
#
# Program : write_torture.py
# Description : Interface to run write_torture. Will validate parameters and
# properly configure LAM/MPI and start it before starting
# the write_torture program. This progran will run on each
# node.
# Author : Marcos E. Matsunaga
#
import os, stat, sys, time, optparse, socket, string, o2tf, pdb, timing, config
import random
#
#pdb.set_trace()
#
#args = sys.argv[1:]
#
MINBLOCKSIZE = 512
MAXBLOCKSIZE = 8192
#
DEBUGON = os.getenv('DEBUG',0)
#
EXECPGM = os.path.join(config.BINDIR,'write_torture')
#
uname = os.uname()
lhostname = str(socket.gethostname())
numnodes = 0
logfile = config.LOGFILE
blocksize = '512,4096'
seconds = 60
#
Usage = '\n %prog [-b|--blocksize] \
[-f | --filename <fullpath filename>] \
[-l | --logfile logfilename] \
[-s | --seconds seconds] \
[-u | --uniquefile] \
[-h|--help]'
#
# FUNCTIONS
#
#
# MAIN
#
if __name__=='__main__':
parser = optparse.OptionParser(usage=Usage)
#
parser.add_option('-b',
'--blocksize',
dest='blocksize',
type='string',
help='Blocksize interval that will be during test. \
Range from 512 to 8192 bytes (Format:xxx,yyy).')
#
parser.add_option('-f',
'--filename',
dest='filename',
type='string',
help='Filename that will be used during test.')
#
parser.add_option('-l',
'--logfile',
dest='logfile',
type='string',
help='Logfile used by the process.')
#
parser.add_option('-s',
'--seconds',
dest='seconds',
type='int',
help='Number of seconds the test will run (def. 60).')
#
parser.add_option('-u',
'--uniquefile',
action="store_true",
dest='uniquefile',
default=False)
#
(options, args) = parser.parse_args()
if len(args) != 0:
o2tf.printlog('args left %s' % len(args), logfile, 0, '')
parser.error('incorrect number of arguments')
#
if options.blocksize:
blocksize = options.blocksize
blockvalues = blocksize.split(',')
if len(blockvalues) != 2:
o2tf.printlog('Blocksize must be specified in format xxx,yyy\n\n',
logfile,
0,
'')
parser.error('Invalid format.')
else:
parser.error('Blocksize parameter needs to be specified.')
if int(blockvalues[0]) < MINBLOCKSIZE or int(blockvalues[1]) > MAXBLOCKSIZE:
o2tf.printlog('Blocksize must be between %s and %s\n\n' % \
(MINBLOCKSIZE, MAXBLOCKSIZE),
logfile,
0,
'')
parser.error('Invalid range.')
if DEBUGON:
o2tf.printlog('Blocksize range from %s to %s\n\n' % \
(str(blockvalues[0]), str(blockvalues[1])),
logfile,
0,
'')
#
if options.filename:
filename = options.filename
else:
parser.error('filename parameter needs to be specified.')
#
if options.logfile:
logfile = options.logfile
#
if options.seconds:
seconds = options.seconds
#
print options.uniquefile
if not options.uniquefile:
filename = options.filename + '_' + lhostname + '_' + str(os.getpid())
#
BLKSZ = random.randint(int(blockvalues[0]), int(blockvalues[1]))
cmd = (EXECPGM + ' -s %s -b %s %s 2>&1 | tee -a %s' %
(seconds, BLKSZ, filename, logfile))
if DEBUGON:
o2tf.printlog('write_torture: main - current directory %s' % os.getcwd(),
logfile,
0,
'')
o2tf.printlog('write_torture: main - filename = %s' % filename,
logfile,
0,
'')
o2tf.printlog('write_torture: main - BLKSZ = %s' %
BLKSZ,
logfile,
0,
'')
t1 = time.time()
if DEBUGON:
o2tf.printlog('write_torture: main - cmd = %s' % cmd,
logfile,
0,
'')
RC = os.system(cmd)
t2 = time.time()
if DEBUGON:
o2tf.printlog('write_torture: elapsed time = %s - RC = %s' %
((t2 - t1), RC),
logfile,
0,
'')
#
sys.exit(RC)
| gpl-2.0 | 5,714,742,131,041,537,000 | 23.713483 | 79 | 0.661287 | false |
amyth/phoenix | backends/core.py | 1 | 1130 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
########################################
##
# @author: Amyth
# @email: [email protected]
# @website: www.techstricks.com
# @created_date: 01-03-2017
# @last_modify: Wed May 3 15:58:37 2017
##
########################################
import datetime
class BaseBackend(object):
def __init__(self, *args, **kwargs):
self._prepare()
def _prepare(self):
pass
def get_date_object(self, date):
try:
return datetime.datetime.strptime(date, '%b %d %Y')
except Exception as err:
#TODO: log unsuccessful datetime conversion
print "Could not convert datetime for %s with pattern (b d Y)" % date
return None
def insert_sents(self):
raise NotImplementedError('`insert_sents` method for this backend is not defined.')
def insert_opens(self):
raise NotImplementedError('`insert_opens` method for this backend is not defined.')
def insert_clicks(self):
raise NotImplementedError('`insert_clicks` method for this backend is not defined.')
| mit | -6,139,851,337,817,696,000 | 25.904762 | 92 | 0.574336 | false |
dgilman/atom_maker | schema.py | 1 | 2566 | # How to update the database schema
# 1. Create a new function that takes a sqlite3 db connection as an argument.
# 2. Have the function update schema and increase the user version, preferrably in a transaction
# 3. Put your function in the upgrade dict in check(). Its key is the schema version it is upgrading from.
# 4. Increase SCHEMA_VERSION at the top of this file
# 5. Submit a pull request!
SCHEMA_VERSION = 4
def create_initial_format(c):
"""Schema ver 0 to 1
create tables for main cache and bugzilla real name cache"""
c.executescript("""BEGIN TRANSACTION;
create table if not exists cache (qs text primary key, ts timestamp, feed text);
create table if not exists bugzillas (id integer primary key, url text unique);
create table if not exists bugzilla_users (email text, name text, ts integer, bz integer, foreign key(bz) references bugzillas(id));
create index if not exists bugzilla_user_ts_index on bugzilla_users (ts asc);
pragma user_version = 1;
END TRANSACTION;""")
def create_bugzilla_email_index(c):
"""Create an index for the monster cache miss query. Rename bugzilla_user -> bugzilla_users"""
c.executescript("""BEGIN TRANSACTION;
drop index if exists bugzilla_user_ts_index;
create index if not exists bugzilla_users_ts_index on bugzilla_users (ts asc);
create index if not exists bugzilla_users_bz_email_index on bugzilla_users (bz, email);
pragma user_version = 2;
END TRANSACTION;""")
def create_twitter_tokens_table(c):
"""Creates a table to store marshalled twitter tokens"""
c.executescript("""BEGIN TRANSACTION;
create table if not exists twitter_tokens (name text unique not null, key text not null, secret text not null);
pragma user_version = 3;
END TRANSACTION;""")
def cache_text_to_blob(c):
"""Change the cache table to store cached feeds as blob"""
c.executescript("""BEGIN TRANSACTION;
drop table if exists cache;
create table if not exists cache (qs text primary key, ts timestamp, feed blob);
pragma user_version = 4;
END TRANSACTION;""")
def check(c):
#XXX there is a race condition here
upgrade = {0: create_initial_format,
1: create_bugzilla_email_index,
2: create_twitter_tokens_table,
3: cache_text_to_blob}
ver = lambda: c.execute("pragma user_version").fetchall()[0][0]
while ver() < SCHEMA_VERSION:
upgrade[ver()](c)
def init():
import sqlite3
conn = sqlite3.connect("cache.sqlite3", detect_types=sqlite3.PARSE_DECLTYPES)
c = conn.cursor()
c.execute("pragma foreign_keys = 1")
check(c)
return conn, c
| gpl-3.0 | -7,401,853,332,173,332,000 | 40.387097 | 132 | 0.721746 | false |
droundy/fac | tests/getting-started.py | 1 | 3162 | #!/usr/bin/python3
import sys, os, re, subprocess
if sys.version_info < (3,5):
print('Please run this script with python 3.5 or newer:', sys.version)
exit(137)
runre = re.compile(r'\[run\]: # \((.+)\)')
shellre = re.compile(r'^ \$ (.+)')
filere = re.compile(r'##### (.+)')
verbre = re.compile(r'^ (.*)')
time_remaining_re = re.compile(r'^Build time remaining: ')
with open(sys.argv[1]) as f:
for line in f:
isfile = filere.findall(line)
isshell = shellre.findall(line)
if len(isfile) > 0:
with open(isfile[0], 'w') as newf:
for line in f:
isverb = verbre.findall(line)
if len(isverb) == 0:
break
newf.write(isverb[0])
newf.write('\n')
print(isfile[0], ':', isverb[0])
elif len(isshell) > 0:
print('shell :', isshell[0])
tocheck = True
if isshell[0][-len('# fails'):] == '# fails':
tocheck = False
print('SHOULD FAIL!')
isshell[0] = isshell[0][:-len('# fails')]
ret = subprocess.run(isshell, shell=True,
stderr=subprocess.STDOUT,
check=tocheck,
stdout=subprocess.PIPE)
if not tocheck and ret.returncode == 0:
print("DID NOT FAIL!!!")
exit(1)
print('output:', ret.stdout)
output = ret.stdout
for outline in output.decode('utf-8').split('\n'):
# The time_remaining_re bit is needed to skip the
# "Build time remaining:" lines that get printed every
# once in a while. These are irregular, which is why
# we need to do this.
if len(outline)>0 and not time_remaining_re.match(outline):
print('output:', outline)
expectedline = f.readline()
if len(verbre.findall(expectedline)) == 0:
print('unexpected output from:', isshell[0])
print('output is', outline)
exit(1)
if expectedline in [' ...', ' ...\n']:
print('I expected random output.')
break
expected = verbre.findall(expectedline)[0]
expected = expected.replace('.', r'\.')
expected = expected.replace('*', r'\*')
expected = expected.replace(r'\.\.\.', '.*')
expected = expected.replace('[', r'\[')
expected = expected.replace(']', r'\]')
expected = expected.replace('(', r'\(')
expected = expected.replace(')', r'\)')
if not re.compile(expected).match(outline):
print('I expected:', expected)
print('but instead I got:', outline)
exit(1)
else:
print('input', line.strip())
| gpl-2.0 | 5,235,296,058,472,813,000 | 42.916667 | 75 | 0.448767 | false |
wakalixes/sqldataplot | plugins/pluginFitConstant.py | 1 | 1392 | #--------------------------------------------------
# Revision = $Rev: 13 $
# Date = $Date: 2011-07-31 00:39:24 +0200 (Sun, 31 Jul 2011) $
# Author = $Author: stefan $
#--------------------------------------------------
from pluginInterfaces import PluginFit, Parameter,leastsqFit
import numpy as np
class PluginFitConstant(PluginFit):
def __init__(self):
pass
def fit(self, array, errarray, param, xmin=0, xmax=0, fitAxes=[]):
"""return the data that is needed for plotting the fitting result"""
self.params = [Parameter(v) for v in param]
def f(x): return self.params[0]()
self.simpleFitAllAxes(f, array, errarray, xmin, xmax, fitAxes)
return self.generateDataFromParameters(f,[np.amin(array[0,:]),np.amax(array[0,:])], np.size(fitAxes)+1, xmin, xmax, fitAxes)
def getInitialParameters(self,data):
"""find the best initial values and return them"""
#nothing todo for a linear fit
return [1,1]
def getParameters(self):
"""return the fit parameters"""
return np.array(["C"])
def getFitModelStr(self):
"""return a string of the implemented fitting model, i.e. 'linear fit (y=A*x +B)'"""
return "Constant, y=C"
def getResultStr(self):
"""return a special result, i.e. 'Frequency = blabla'"""
return "nothing fitted"
| gpl-2.0 | -5,941,994,962,134,405,000 | 34.692308 | 130 | 0.572557 | false |
qedsoftware/commcare-hq | corehq/apps/app_manager/models.py | 1 | 221033 | # coding=utf-8
"""
Application terminology
For any given application, there are a number of different documents.
The primary application document is an instance of Application. This
document id is what you'll see in the URL on most app manager pages. Primary
application documents should have `copy_of == None` and `is_released ==
False`. When an application is saved, the field `version` is incremented.
When a user makes a build of an application, a copy of the primary
application document is made. These documents are the "versions" you see on
the deploy page. Each build document will have a different id, and the
`copy_of` field will be set to the ID of the primary application document.
Additionally, some attachments such as `profile.xml` and `suite.xml` will be
created and saved to the build doc (see `create_all_files`).
When a build is starred, this is called "releasing" the build. The parameter
`is_released` will be set to True on the build document.
You might also run in to remote applications and applications copied to be
published on the exchange, but those are quite infrequent.
"""
import calendar
from distutils.version import LooseVersion
from itertools import chain
import tempfile
import os
import logging
import hashlib
import random
import json
import types
import re
import datetime
import uuid
from collections import defaultdict, namedtuple
from functools import wraps
from copy import deepcopy
from mimetypes import guess_type
from urllib2 import urlopen
from urlparse import urljoin
from couchdbkit import MultipleResultsFound
import itertools
from lxml import etree
from django.core.cache import cache
from django.utils.translation import override, ugettext as _, ugettext
from couchdbkit.exceptions import BadValueError
from corehq.apps.app_manager.suite_xml.utils import get_select_chain
from corehq.apps.app_manager.suite_xml.generator import SuiteGenerator, MediaSuiteGenerator
from corehq.apps.app_manager.xpath_validator import validate_xpath
from corehq.apps.userreports.exceptions import ReportConfigurationNotFoundError
from corehq.util.timezones.utils import get_timezone_for_domain
from dimagi.ext.couchdbkit import *
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from restkit.errors import ResourceError
from couchdbkit.resource import ResourceNotFound
from corehq import toggles, privileges
from corehq.blobs.mixin import BlobMixin
from corehq.const import USER_DATE_FORMAT, USER_TIME_FORMAT
from corehq.apps.analytics.tasks import track_workflow
from corehq.apps.app_manager.feature_support import CommCareFeatureSupportMixin
from corehq.util.quickcache import quickcache
from corehq.util.timezones.conversions import ServerTime
from dimagi.utils.couch import CriticalSection
from django_prbac.exceptions import PermissionDenied
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.app_manager.commcare_settings import check_condition
from corehq.apps.app_manager.const import *
from corehq.apps.app_manager.xpath import (
dot_interpolate,
interpolate_xpath,
LocationXpath,
)
from corehq.apps.builds import get_default_build_spec
from dimagi.utils.couch.cache import cache_core
from dimagi.utils.couch.undo import DeleteRecord, DELETED_SUFFIX
from dimagi.utils.dates import DateSpan
from dimagi.utils.decorators.memoized import memoized
from dimagi.utils.make_uuid import random_hex
from dimagi.utils.web import get_url_base, parse_int
import commcare_translations
from corehq.util import bitly
from corehq.util import view_utils
from corehq.apps.appstore.models import SnapshotMixin
from corehq.apps.builds.models import BuildSpec, BuildRecord
from corehq.apps.hqmedia.models import HQMediaMixin
from corehq.apps.translations.models import TranslationMixin
from corehq.apps.users.models import CouchUser
from corehq.apps.users.util import cc_user_domain
from corehq.apps.domain.models import cached_property, Domain
from corehq.apps.app_manager import current_builds, app_strings, remote_app, \
id_strings, commcare_settings
from corehq.apps.app_manager.suite_xml import xml_models as suite_models
from corehq.apps.app_manager.dbaccessors import (
get_app,
get_latest_build_doc,
get_latest_released_app_doc,
domain_has_apps,
)
from corehq.apps.app_manager.util import (
split_path,
save_xform,
ParentCasePropertyBuilder,
is_usercase_in_use,
actions_use_usercase,
update_unique_ids,
app_callout_templates,
xpath_references_case,
xpath_references_user_case,
module_case_hierarchy_has_circular_reference)
from corehq.apps.app_manager.xform import XForm, parse_xml as _parse_xml, \
validate_xform
from corehq.apps.app_manager.templatetags.xforms_extras import trans
from .exceptions import (
AppEditingError,
BlankXFormError,
ConflictingCaseTypeError,
FormNotFoundException,
IncompatibleFormTypeException,
LocationXpathValidationError,
ModuleNotFoundException,
ModuleIdMissingException,
RearrangeError,
SuiteValidationError,
VersioningError,
XFormException,
XFormIdNotUnique,
XFormValidationError,
ScheduleError,
CaseXPathValidationError,
UserCaseXPathValidationError,
)
from corehq.apps.reports.daterange import get_daterange_start_end_dates, get_simple_dateranges
from jsonpath_rw import jsonpath, parse
WORKFLOW_DEFAULT = 'default' # go to the app main screen
WORKFLOW_ROOT = 'root' # go to the module select screen
WORKFLOW_PARENT_MODULE = 'parent_module' # go to the parent module's screen
WORKFLOW_MODULE = 'module' # go to the current module's screen
WORKFLOW_PREVIOUS = 'previous_screen' # go to the previous screen (prior to entering the form)
WORKFLOW_FORM = 'form' # go straight to another form
ALL_WORKFLOWS = [
WORKFLOW_DEFAULT,
WORKFLOW_ROOT,
WORKFLOW_PARENT_MODULE,
WORKFLOW_MODULE,
WORKFLOW_PREVIOUS,
WORKFLOW_FORM,
]
DETAIL_TYPES = ['case_short', 'case_long', 'ref_short', 'ref_long']
FIELD_SEPARATOR = ':'
ATTACHMENT_REGEX = r'[^/]*\.xml'
ANDROID_LOGO_PROPERTY_MAPPING = {
'hq_logo_android_home': 'brand-banner-home',
'hq_logo_android_login': 'brand-banner-login',
}
def jsonpath_update(datum_context, value):
field = datum_context.path.fields[0]
parent = jsonpath.Parent().find(datum_context)[0]
parent.value[field] = value
# store a list of references to form ID's so that
# when an app is copied we can update the references
# with the new values
form_id_references = []
def FormIdProperty(expression, **kwargs):
"""
Create a StringProperty that references a form ID. This is necessary because
form IDs change when apps are copied so we need to make sure we update
any references to the them.
:param expression: jsonpath expression that can be used to find the field
:param kwargs: arguments to be passed to the underlying StringProperty
"""
path_expression = parse(expression)
assert isinstance(path_expression, jsonpath.Child), "only child path expressions are supported"
field = path_expression.right
assert len(field.fields) == 1, 'path expression can only reference a single field'
form_id_references.append(path_expression)
return StringProperty(**kwargs)
def _rename_key(dct, old, new):
if old in dct:
if new in dct and dct[new]:
dct["%s_backup_%s" % (new, hex(random.getrandbits(32))[2:-1])] = dct[new]
dct[new] = dct[old]
del dct[old]
@memoized
def load_app_template(slug):
path = os.path.join(os.path.dirname(__file__), 'static', 'app_manager', 'json', 'template_apps')
with open(os.path.join(path, slug + '.json')) as f:
return json.load(f)
@memoized
def load_case_reserved_words():
with open(os.path.join(os.path.dirname(__file__), 'static', 'app_manager', 'json', 'case-reserved-words.json')) as f:
return json.load(f)
@memoized
def load_form_template(filename):
with open(os.path.join(os.path.dirname(__file__), 'data', filename)) as f:
return f.read()
class IndexedSchema(DocumentSchema):
"""
Abstract class.
Meant for documents that appear in a list within another document
and need to know their own position within that list.
"""
def with_id(self, i, parent):
self._i = i
self._parent = parent
return self
@property
def id(self):
return self._i
def __eq__(self, other):
return other and (self.id == other.id) and (self._parent == other._parent)
class Getter(object):
def __init__(self, attr):
self.attr = attr
def __call__(self, instance):
items = getattr(instance, self.attr)
l = len(items)
for i, item in enumerate(items):
yield item.with_id(i % l, instance)
def __get__(self, instance, owner):
# thanks, http://metapython.blogspot.com/2010/11/python-instance-methods-how-are-they.html
# this makes Getter('foo') act like a bound method
return types.MethodType(self, instance, owner)
class FormActionCondition(DocumentSchema):
"""
The condition under which to open/update/close a case/referral
Either {'type': 'if', 'question': '/xpath/to/node', 'answer': 'value'}
in which case the action takes place if question has answer answer,
or {'type': 'always'} in which case the action always takes place.
"""
type = StringProperty(choices=["if", "always", "never"], default="never")
question = StringProperty()
answer = StringProperty()
operator = StringProperty(choices=['=', 'selected', 'boolean_true'], default='=')
def is_active(self):
return self.type in ('if', 'always')
class FormAction(DocumentSchema):
"""
Corresponds to Case XML
"""
condition = SchemaProperty(FormActionCondition)
def is_active(self):
return self.condition.is_active()
@classmethod
def get_action_paths(cls, action):
if action.condition.type == 'if':
yield action.condition.question
for __, path in cls.get_action_properties(action):
yield path
@classmethod
def get_action_properties(self, action):
action_properties = action.properties()
if 'name_path' in action_properties and action.name_path:
yield 'name', action.name_path
if 'case_name' in action_properties:
yield 'name', action.case_name
if 'external_id' in action_properties and action.external_id:
yield 'external_id', action.external_id
if 'update' in action_properties:
for name, path in action.update.items():
yield name, path
if 'case_properties' in action_properties:
for name, path in action.case_properties.items():
yield name, path
if 'preload' in action_properties:
for path, name in action.preload.items():
yield name, path
class UpdateCaseAction(FormAction):
update = DictProperty()
class PreloadAction(FormAction):
preload = DictProperty()
def is_active(self):
return bool(self.preload)
class UpdateReferralAction(FormAction):
followup_date = StringProperty()
def get_followup_date(self):
if self.followup_date:
return "if(date({followup_date}) >= date(today()), {followup_date}, date(today() + 2))".format(
followup_date=self.followup_date,
)
return self.followup_date or "date(today() + 2)"
class OpenReferralAction(UpdateReferralAction):
name_path = StringProperty()
class OpenCaseAction(FormAction):
name_path = StringProperty()
external_id = StringProperty()
class OpenSubCaseAction(FormAction):
case_type = StringProperty()
case_name = StringProperty()
reference_id = StringProperty()
case_properties = DictProperty()
repeat_context = StringProperty()
# relationship = "child" for index to a parent case (default)
# relationship = "extension" for index to a host case
relationship = StringProperty(choices=['child', 'extension'], default='child')
close_condition = SchemaProperty(FormActionCondition)
class FormActions(DocumentSchema):
open_case = SchemaProperty(OpenCaseAction)
update_case = SchemaProperty(UpdateCaseAction)
close_case = SchemaProperty(FormAction)
open_referral = SchemaProperty(OpenReferralAction)
update_referral = SchemaProperty(UpdateReferralAction)
close_referral = SchemaProperty(FormAction)
case_preload = SchemaProperty(PreloadAction)
referral_preload = SchemaProperty(PreloadAction)
load_from_form = SchemaProperty(PreloadAction) # DEPRECATED
usercase_update = SchemaProperty(UpdateCaseAction)
usercase_preload = SchemaProperty(PreloadAction)
subcases = SchemaListProperty(OpenSubCaseAction)
def all_property_names(self):
names = set()
names.update(self.update_case.update.keys())
names.update(self.case_preload.preload.values())
for subcase in self.subcases:
names.update(subcase.case_properties.keys())
return names
class CaseIndex(DocumentSchema):
tag = StringProperty()
reference_id = StringProperty(default='parent')
relationship = StringProperty(choices=['child', 'extension'], default='child')
class AdvancedAction(IndexedSchema):
case_type = StringProperty()
case_tag = StringProperty()
case_properties = DictProperty()
# case_indices = NotImplemented
close_condition = SchemaProperty(FormActionCondition)
__eq__ = DocumentSchema.__eq__
def get_paths(self):
for path in self.case_properties.values():
yield path
if self.close_condition.type == 'if':
yield self.close_condition.question
def get_property_names(self):
return set(self.case_properties.keys())
@property
def is_subcase(self):
return bool(self.case_indices)
@property
def form_element_name(self):
return "case_{}".format(self.case_tag)
class AutoSelectCase(DocumentSchema):
"""
Configuration for auto-selecting a case.
Attributes:
value_source Reference to the source of the value. For mode = fixture,
this represents the FixtureDataType ID. For mode = case
this represents the 'case_tag' for the case.
The modes 'user' and 'raw' don't require a value_source.
value_key The actual field that contains the case ID. Can be a case
index or a user data key or a fixture field name or the raw
xpath expression.
"""
mode = StringProperty(choices=[AUTO_SELECT_USER,
AUTO_SELECT_FIXTURE,
AUTO_SELECT_CASE,
AUTO_SELECT_USERCASE,
AUTO_SELECT_RAW])
value_source = StringProperty()
value_key = StringProperty(required=True)
class LoadCaseFromFixture(DocumentSchema):
"""
fixture_nodeset: FixtureDataType.tag
fixture_tag: name of the column to display in the list
fixture_variable: boolean if display_column actually contains the key for the localized string
case_property: name of the column whose value should be saved when the user selects an item
arbitrary_datum_*: adds an arbitrary datum with function before the action
"""
fixture_nodeset = StringProperty()
fixture_tag = StringProperty()
fixture_variable = StringProperty()
case_property = StringProperty(default='')
auto_select = BooleanProperty(default=False)
arbitrary_datum_id = StringProperty()
arbitrary_datum_function = StringProperty()
class LoadUpdateAction(AdvancedAction):
"""
details_module: Use the case list configuration from this module to show the cases.
preload: Value from the case to load into the form. Keys are question paths,
values are case properties.
auto_select: Configuration for auto-selecting the case
load_case_from_fixture: Configureation for loading a case using fixture data
show_product_stock: If True list the product stock using the module's Product List
configuration.
product_program: Only show products for this CommCare Supply program.
"""
details_module = StringProperty()
preload = DictProperty()
auto_select = SchemaProperty(AutoSelectCase, default=None)
load_case_from_fixture = SchemaProperty(LoadCaseFromFixture, default=None)
show_product_stock = BooleanProperty(default=False)
product_program = StringProperty()
case_index = SchemaProperty(CaseIndex)
@property
def case_indices(self):
# Allows us to ducktype AdvancedOpenCaseAction
return [self.case_index] if self.case_index.tag else []
@case_indices.setter
def case_indices(self, value):
if len(value) > 1:
raise ValueError('A LoadUpdateAction cannot have more than one case index')
if value:
self.case_index = value[0]
else:
self.case_index = CaseIndex()
@case_indices.deleter
def case_indices(self):
self.case_index = CaseIndex()
def get_paths(self):
for path in super(LoadUpdateAction, self).get_paths():
yield path
for path in self.preload.keys():
yield path
def get_property_names(self):
names = super(LoadUpdateAction, self).get_property_names()
names.update(self.preload.values())
return names
@property
def case_session_var(self):
return 'case_id_{0}'.format(self.case_tag)
@classmethod
def wrap(cls, data):
if 'parent_tag' in data:
if data['parent_tag']:
data['case_index'] = {
'tag': data['parent_tag'],
'reference_id': data.get('parent_reference_id', 'parent'),
'relationship': data.get('relationship', 'child')
}
del data['parent_tag']
data.pop('parent_reference_id', None)
data.pop('relationship', None)
return super(LoadUpdateAction, cls).wrap(data)
class AdvancedOpenCaseAction(AdvancedAction):
name_path = StringProperty()
repeat_context = StringProperty()
case_indices = SchemaListProperty(CaseIndex)
open_condition = SchemaProperty(FormActionCondition)
def get_paths(self):
for path in super(AdvancedOpenCaseAction, self).get_paths():
yield path
yield self.name_path
if self.open_condition.type == 'if':
yield self.open_condition.question
@property
def case_session_var(self):
return 'case_id_new_{}_{}'.format(self.case_type, self.id)
@classmethod
def wrap(cls, data):
if 'parent_tag' in data:
if data['parent_tag']:
index = {
'tag': data['parent_tag'],
'reference_id': data.get('parent_reference_id', 'parent'),
'relationship': data.get('relationship', 'child')
}
if hasattr(data.get('case_indices'), 'append'):
data['case_indices'].append(index)
else:
data['case_indices'] = [index]
del data['parent_tag']
data.pop('parent_reference_id', None)
data.pop('relationship', None)
return super(AdvancedOpenCaseAction, cls).wrap(data)
class AdvancedFormActions(DocumentSchema):
load_update_cases = SchemaListProperty(LoadUpdateAction)
open_cases = SchemaListProperty(AdvancedOpenCaseAction)
get_load_update_actions = IndexedSchema.Getter('load_update_cases')
get_open_actions = IndexedSchema.Getter('open_cases')
def get_all_actions(self):
return itertools.chain(self.get_load_update_actions(), self.get_open_actions())
def get_subcase_actions(self):
return (a for a in self.get_all_actions() if a.case_indices)
def get_open_subcase_actions(self, parent_case_type=None):
for action in self.open_cases:
if action.case_indices:
if not parent_case_type:
yield action
else:
if any(self.actions_meta_by_tag[case_index.tag]['action'].case_type == parent_case_type
for case_index in action.case_indices):
yield action
def get_case_tags(self):
for action in self.get_all_actions():
yield action.case_tag
def get_action_from_tag(self, tag):
return self.actions_meta_by_tag.get(tag, {}).get('action', None)
@property
def actions_meta_by_tag(self):
return self._action_meta()['by_tag']
@property
def actions_meta_by_parent_tag(self):
return self._action_meta()['by_parent_tag']
@property
def auto_select_actions(self):
return self._action_meta()['by_auto_select_mode']
@memoized
def _action_meta(self):
meta = {
'by_tag': {},
'by_parent_tag': {},
'by_auto_select_mode': {
AUTO_SELECT_USER: [],
AUTO_SELECT_CASE: [],
AUTO_SELECT_FIXTURE: [],
AUTO_SELECT_USERCASE: [],
AUTO_SELECT_RAW: [],
}
}
def add_actions(type, action_list):
for action in action_list:
meta['by_tag'][action.case_tag] = {
'type': type,
'action': action
}
for parent in action.case_indices:
meta['by_parent_tag'][parent.tag] = {
'type': type,
'action': action
}
if type == 'load' and action.auto_select and action.auto_select.mode:
meta['by_auto_select_mode'][action.auto_select.mode].append(action)
add_actions('load', self.get_load_update_actions())
add_actions('open', self.get_open_actions())
return meta
class FormSource(object):
def __get__(self, form, form_cls):
if not form:
return self
unique_id = form.get_unique_id()
app = form.get_app()
filename = "%s.xml" % unique_id
# for backwards compatibility of really old apps
try:
old_contents = form['contents']
except AttributeError:
pass
else:
app.lazy_put_attachment(old_contents, filename)
del form['contents']
try:
source = app.lazy_fetch_attachment(filename)
except ResourceNotFound:
source = ''
return source
def __set__(self, form, value):
unique_id = form.get_unique_id()
app = form.get_app()
filename = "%s.xml" % unique_id
app.lazy_put_attachment(value, filename)
form.validation_cache = None
try:
form.xmlns = form.wrapped_xform().data_node.tag_xmlns
except Exception:
form.xmlns = None
class CachedStringProperty(object):
def __init__(self, key):
self.get_key = key
def __get__(self, instance, owner):
return self.get(self.get_key(instance))
def __set__(self, instance, value):
self.set(self.get_key(instance), value)
@classmethod
def get(cls, key):
return cache.get(key)
@classmethod
def set(cls, key, value):
cache.set(key, value, 7*24*60*60) # cache for 7 days
class ScheduleVisit(IndexedSchema):
"""
due: Days after the anchor date that this visit is due
starts: Days before the due date that this visit is valid from
expires: Days after the due date that this visit is valid until (optional)
repeats: Whether this is a repeat visit (one per form allowed)
increment: Days after the last visit that the repeat visit occurs
"""
due = IntegerProperty()
starts = IntegerProperty()
expires = IntegerProperty()
repeats = BooleanProperty(default=False)
increment = IntegerProperty()
@property
def id(self):
"""Visits are 1-based indexed"""
_id = super(ScheduleVisit, self).id
return _id + 1
class FormDatum(DocumentSchema):
name = StringProperty()
xpath = StringProperty()
class FormLink(DocumentSchema):
"""
xpath: xpath condition that must be true in order to open next form
form_id: id of next form to open
"""
xpath = StringProperty()
form_id = FormIdProperty('modules[*].forms[*].form_links[*].form_id')
datums = SchemaListProperty(FormDatum)
class FormSchedule(DocumentSchema):
"""
starts: Days after the anchor date that this schedule starts
expires: Days after the anchor date that this schedule expires (optional)
visits: List of visits in this schedule
allow_unscheduled: Allow unscheduled visits in this schedule
transition_condition: Condition under which we transition to the next phase
termination_condition: Condition under which we terminate the whole schedule
"""
enabled = BooleanProperty(default=True)
starts = IntegerProperty()
expires = IntegerProperty()
allow_unscheduled = BooleanProperty(default=False)
visits = SchemaListProperty(ScheduleVisit)
get_visits = IndexedSchema.Getter('visits')
transition_condition = SchemaProperty(FormActionCondition)
termination_condition = SchemaProperty(FormActionCondition)
class CommentMixin(DocumentSchema):
"""
Documentation comment for app builders and maintainers
"""
comment = StringProperty(default='')
@property
def short_comment(self):
"""
Trim comment to 500 chars (about 100 words)
"""
return self.comment if len(self.comment) <= 500 else self.comment[:497] + '...'
class FormBase(DocumentSchema):
"""
Part of a Managed Application; configuration for a form.
Translates to a second-level menu on the phone
"""
form_type = None
name = DictProperty(unicode)
unique_id = StringProperty()
show_count = BooleanProperty(default=False)
xmlns = StringProperty()
version = IntegerProperty()
source = FormSource()
validation_cache = CachedStringProperty(
lambda self: "cache-%s-%s-validation" % (self.get_app().get_id, self.unique_id)
)
post_form_workflow = StringProperty(
default=WORKFLOW_DEFAULT,
choices=ALL_WORKFLOWS
)
auto_gps_capture = BooleanProperty(default=False)
no_vellum = BooleanProperty(default=False)
form_links = SchemaListProperty(FormLink)
schedule_form_id = StringProperty()
@classmethod
def wrap(cls, data):
data.pop('validation_cache', '')
if cls is FormBase:
doc_type = data['doc_type']
if doc_type == 'Form':
return Form.wrap(data)
elif doc_type == 'AdvancedForm':
return AdvancedForm.wrap(data)
else:
try:
return CareplanForm.wrap(data)
except ValueError:
raise ValueError('Unexpected doc_type for Form', doc_type)
else:
return super(FormBase, cls).wrap(data)
@classmethod
def get_form(cls, form_unique_id, and_app=False):
try:
d = Application.get_db().view(
'app_manager/xforms_index',
key=form_unique_id
).one()
except MultipleResultsFound as e:
raise XFormIdNotUnique(
"xform id '%s' not unique: %s" % (form_unique_id, e)
)
if d:
d = d['value']
else:
raise ResourceNotFound()
# unpack the dict into variables app_id, module_id, form_id
app_id, unique_id = [d[key] for key in ('app_id', 'unique_id')]
app = Application.get(app_id)
form = app.get_form(unique_id)
if and_app:
return form, app
else:
return form
def pre_delete_hook(self):
raise NotImplementedError()
def pre_move_hook(self, from_module, to_module):
""" Called before a form is moved between modules or to a different position """
raise NotImplementedError()
def wrapped_xform(self):
return XForm(self.source)
def validate_form(self):
vc = self.validation_cache
if vc is None:
# formtranslate requires all attributes to be valid xpaths, but
# vellum namespaced attributes aren't
form = self.wrapped_xform()
form.strip_vellum_ns_attributes()
try:
if form.xml is not None:
validate_xform(etree.tostring(form.xml))
except XFormValidationError as e:
validation_dict = {
"fatal_error": e.fatal_error,
"validation_problems": e.validation_problems,
"version": e.version,
}
vc = self.validation_cache = json.dumps(validation_dict)
else:
vc = self.validation_cache = ""
if vc:
try:
raise XFormValidationError(**json.loads(vc))
except ValueError:
self.validation_cache = None
return self.validate_form()
return self
def validate_for_build(self, validate_module=True):
errors = []
try:
module = self.get_module()
except AttributeError:
module = None
meta = {
'form_type': self.form_type,
'module': module.get_module_info() if module else {},
'form': {"id": self.id if hasattr(self, 'id') else None, "name": self.name}
}
xml_valid = False
if self.source == '':
errors.append(dict(type="blank form", **meta))
else:
try:
_parse_xml(self.source)
xml_valid = True
except XFormException as e:
errors.append(dict(
type="invalid xml",
message=unicode(e) if self.source else '',
**meta
))
except ValueError:
logging.error("Failed: _parse_xml(string=%r)" % self.source)
raise
else:
try:
self.validate_form()
except XFormValidationError as e:
error = {'type': 'validation error', 'validation_message': unicode(e)}
error.update(meta)
errors.append(error)
if self.post_form_workflow == WORKFLOW_FORM:
if not self.form_links:
errors.append(dict(type="no form links", **meta))
for form_link in self.form_links:
try:
self.get_app().get_form(form_link.form_id)
except FormNotFoundException:
errors.append(dict(type='bad form link', **meta))
# this isn't great but two of FormBase's subclasses have form_filter
if hasattr(self, 'form_filter') and self.form_filter:
is_valid, message = validate_xpath(self.form_filter, allow_case_hashtags=True)
if not is_valid:
error = {
'type': 'form filter has xpath error',
'xpath_error': message,
}
error.update(meta)
errors.append(error)
errors.extend(self.extended_build_validation(meta, xml_valid, validate_module))
return errors
def extended_build_validation(self, error_meta, xml_valid, validate_module=True):
"""
Override to perform additional validation during build process.
"""
return []
def get_unique_id(self):
"""
Return unique_id if it exists, otherwise initialize it
Does _not_ force a save, so it's the caller's responsibility to save the app
"""
if not self.unique_id:
self.unique_id = random_hex()
return self.unique_id
def get_app(self):
return self._app
def get_version(self):
return self.version if self.version else self.get_app().version
def add_stuff_to_xform(self, xform, build_profile_id=None):
app = self.get_app()
langs = app.get_build_langs(build_profile_id)
xform.exclude_languages(langs)
xform.set_default_language(langs[0])
xform.normalize_itext()
xform.strip_vellum_ns_attributes()
xform.set_version(self.get_version())
def render_xform(self, build_profile_id=None):
xform = XForm(self.source)
self.add_stuff_to_xform(xform, build_profile_id)
return xform.render()
@quickcache(['self.source', 'langs', 'include_triggers', 'include_groups', 'include_translations'])
def get_questions(self, langs, include_triggers=False,
include_groups=False, include_translations=False):
return XForm(self.source).get_questions(
langs=langs,
include_triggers=include_triggers,
include_groups=include_groups,
include_translations=include_translations,
)
@memoized
def get_case_property_name_formatter(self):
"""Get a function that formats case property names
The returned function requires two arguments
`(case_property_name, data_path)` and returns a string.
"""
try:
valid_paths = {question['value']: question['tag']
for question in self.get_questions(langs=[])}
except XFormException as e:
# punt on invalid xml (sorry, no rich attachments)
valid_paths = {}
def format_key(key, path):
if valid_paths.get(path) == "upload":
return u"{}{}".format(ATTACHMENT_PREFIX, key)
return key
return format_key
def export_json(self, dump_json=True):
source = self.to_json()
del source['unique_id']
return json.dumps(source) if dump_json else source
def rename_lang(self, old_lang, new_lang):
_rename_key(self.name, old_lang, new_lang)
try:
self.rename_xform_language(old_lang, new_lang)
except XFormException:
pass
def rename_xform_language(self, old_code, new_code):
source = XForm(self.source)
if source.exists():
source.rename_language(old_code, new_code)
source = source.render()
self.source = source
def default_name(self):
app = self.get_app()
return trans(
self.name,
[app.default_language] + app.langs,
include_lang=False
)
@property
def full_path_name(self):
return "%(app_name)s > %(module_name)s > %(form_name)s" % {
'app_name': self.get_app().name,
'module_name': self.get_module().default_name(),
'form_name': self.default_name()
}
@property
def has_fixtures(self):
return 'src="jr://fixture/item-list:' in self.source
def get_auto_gps_capture(self):
app = self.get_app()
if app.build_version and app.enable_auto_gps:
return self.auto_gps_capture or app.auto_gps_capture
else:
return False
def is_registration_form(self, case_type=None):
"""
Should return True if this form passes the following tests:
* does not require a case
* registers a case of type 'case_type' if supplied
"""
raise NotImplementedError()
def uses_usercase(self):
raise NotImplementedError()
def update_app_case_meta(self, app_case_meta):
pass
@property
@memoized
def case_list_modules(self):
case_list_modules = [
mod for mod in self.get_app().get_modules() if mod.case_list_form.form_id == self.unique_id
]
return case_list_modules
@property
def is_case_list_form(self):
return bool(self.case_list_modules)
class IndexedFormBase(FormBase, IndexedSchema, CommentMixin):
def get_app(self):
return self._parent._parent
def get_module(self):
return self._parent
def get_case_type(self):
return self._parent.case_type
def check_case_properties(self, all_names=None, subcase_names=None, case_tag=None):
all_names = all_names or []
subcase_names = subcase_names or []
errors = []
# reserved_words are hard-coded in three different places!
# Here, case-config-ui-*.js, and module_view.html
reserved_words = load_case_reserved_words()
for key in all_names:
try:
validate_property(key)
except ValueError:
errors.append({'type': 'update_case word illegal', 'word': key, 'case_tag': case_tag})
_, key = split_path(key)
if key in reserved_words:
errors.append({'type': 'update_case uses reserved word', 'word': key, 'case_tag': case_tag})
# no parent properties for subcase
for key in subcase_names:
if not re.match(r'^[a-zA-Z][\w_-]*$', key):
errors.append({'type': 'update_case word illegal', 'word': key, 'case_tag': case_tag})
return errors
def check_paths(self, paths):
errors = []
try:
questions = self.get_questions(langs=[], include_triggers=True, include_groups=True)
valid_paths = {question['value']: question['tag'] for question in questions}
except XFormException as e:
errors.append({'type': 'invalid xml', 'message': unicode(e)})
else:
no_multimedia = not self.get_app().enable_multimedia_case_property
for path in set(paths):
if path not in valid_paths:
errors.append({'type': 'path error', 'path': path})
elif no_multimedia and valid_paths[path] == "upload":
errors.append({'type': 'multimedia case property not supported', 'path': path})
return errors
def add_property_save(self, app_case_meta, case_type, name,
questions, question_path, condition=None):
if question_path in questions:
app_case_meta.add_property_save(
case_type,
name,
self.unique_id,
questions[question_path],
condition
)
else:
app_case_meta.add_property_error(
case_type,
name,
self.unique_id,
"%s is not a valid question" % question_path
)
def add_property_load(self, app_case_meta, case_type, name,
questions, question_path):
if question_path in questions:
app_case_meta.add_property_load(
case_type,
name,
self.unique_id,
questions[question_path]
)
else:
app_case_meta.add_property_error(
case_type,
name,
self.unique_id,
"%s is not a valid question" % question_path
)
class JRResourceProperty(StringProperty):
def validate(self, value, required=True):
super(JRResourceProperty, self).validate(value, required)
if value is not None and not value.startswith('jr://'):
raise BadValueError("JR Resources must start with 'jr://")
return value
class NavMenuItemMediaMixin(DocumentSchema):
"""
Language-specific icon and audio.
Properties are map of lang-code to filepath
"""
media_image = SchemaDictProperty(JRResourceProperty)
media_audio = SchemaDictProperty(JRResourceProperty)
@classmethod
def wrap(cls, data):
# ToDo - Remove after migration
for media_attr in ('media_image', 'media_audio'):
old_media = data.get(media_attr, None)
if old_media and isinstance(old_media, basestring):
new_media = {'default': old_media}
data[media_attr] = new_media
return super(NavMenuItemMediaMixin, cls).wrap(data)
def _get_media_by_language(self, media_attr, lang, strict=False):
"""
Return media-path for given language if one exists, else 1st path in the
sorted lang->media-path list
*args:
media_attr: one of 'media_image' or 'media_audio'
lang: language code
**kwargs:
strict: whether to return None if media-path is not set for lang or
to return first path in sorted lang->media-path list
"""
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr)
if not media_dict:
return None
if media_dict.get(lang, ''):
return media_dict[lang]
if not strict:
# if the queried lang key doesn't exist,
# return the first in the sorted list
for lang, item in sorted(media_dict.items()):
return item
@property
def default_media_image(self):
# For older apps that were migrated
return self.icon_by_language('default')
@property
def default_media_audio(self):
# For older apps that were migrated
return self.audio_by_language('default')
def icon_by_language(self, lang, strict=False):
return self._get_media_by_language('media_image', lang, strict=strict)
def audio_by_language(self, lang, strict=False):
return self._get_media_by_language('media_audio', lang, strict=strict)
def _set_media(self, media_attr, lang, media_path):
"""
Caller's responsibility to save doc.
Currently only called from the view which saves after all Edits
"""
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr) or {}
media_dict[lang] = media_path or ''
setattr(self, media_attr, media_dict)
def set_icon(self, lang, icon_path):
self._set_media('media_image', lang, icon_path)
def set_audio(self, lang, audio_path):
self._set_media('media_audio', lang, audio_path)
def _all_media_paths(self, media_attr):
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr) or {}
valid_media_paths = {media for media in media_dict.values() if media}
return list(valid_media_paths)
def all_image_paths(self):
return self._all_media_paths('media_image')
def all_audio_paths(self):
return self._all_media_paths('media_audio')
def icon_app_string(self, lang, for_default=False):
"""
Return lang/app_strings.txt translation for given lang
if a path exists for the lang
**kwargs:
for_default: whether app_string is for default/app_strings.txt
"""
if not for_default and self.icon_by_language(lang, strict=True):
return self.icon_by_language(lang, strict=True)
if for_default:
return self.icon_by_language(lang, strict=False)
def audio_app_string(self, lang, for_default=False):
"""
see note on self.icon_app_string
"""
if not for_default and self.audio_by_language(lang, strict=True):
return self.audio_by_language(lang, strict=True)
if for_default:
return self.audio_by_language(lang, strict=False)
class Form(IndexedFormBase, NavMenuItemMediaMixin):
form_type = 'module_form'
form_filter = StringProperty()
requires = StringProperty(choices=["case", "referral", "none"], default="none")
actions = SchemaProperty(FormActions)
case_references_data = DictProperty()
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(Form, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_case_and_meta(self)
def all_other_forms_require_a_case(self):
m = self.get_module()
return all([form.requires == 'case' for form in m.get_forms() if form.id != self.id])
def session_var_for_action(self, action):
module_case_type = self.get_module().case_type
if action == 'open_case':
return 'case_id_new_{}_0'.format(module_case_type)
if isinstance(action, OpenSubCaseAction):
subcase_type = action.case_type
subcase_index = self.actions.subcases.index(action)
opens_case = 'open_case' in self.active_actions()
if opens_case:
subcase_index += 1
return 'case_id_new_{}_{}'.format(subcase_type, subcase_index)
def _get_active_actions(self, types):
actions = {}
for action_type in types:
a = getattr(self.actions, action_type)
if isinstance(a, list):
if a:
actions[action_type] = a
elif a.is_active():
actions[action_type] = a
return actions
@memoized
def get_action_type(self):
if self.actions.close_case.condition.is_active():
return 'close'
elif (self.actions.open_case.condition.is_active() or
self.actions.subcases):
return 'open'
elif self.actions.update_case.condition.is_active():
return 'update'
else:
return 'none'
@memoized
def get_icon_help_text(self):
messages = []
if self.actions.open_case.condition.is_active():
messages.append(_('This form opens a {}').format(self.get_module().case_type))
if self.actions.subcases:
messages.append(_('This form opens a subcase {}').format(', '.join(self.get_subcase_types())))
if self.actions.close_case.condition.is_active():
messages.append(_('This form closes a {}').format(self.get_module().case_type))
elif self.requires_case():
messages.append(_('This form updates a {}').format(self.get_module().case_type))
return '. '.join(messages)
def active_actions(self):
self.get_app().assert_app_v2()
if self.requires == 'none':
action_types = (
'open_case', 'update_case', 'close_case', 'subcases',
'usercase_update', 'usercase_preload',
)
elif self.requires == 'case':
action_types = (
'update_case', 'close_case', 'case_preload', 'subcases',
'usercase_update', 'usercase_preload', 'load_from_form',
)
else:
# this is left around for legacy migrated apps
action_types = (
'open_case', 'update_case', 'close_case',
'case_preload', 'subcases',
'usercase_update', 'usercase_preload',
)
return self._get_active_actions(action_types)
def active_non_preloader_actions(self):
return self._get_active_actions((
'open_case', 'update_case', 'close_case',
'open_referral', 'update_referral', 'close_referral'))
def check_actions(self):
errors = []
subcase_names = set()
for subcase_action in self.actions.subcases:
if not subcase_action.case_type:
errors.append({'type': 'subcase has no case type'})
subcase_names.update(subcase_action.case_properties)
if self.requires == 'none' and self.actions.open_case.is_active() \
and not self.actions.open_case.name_path:
errors.append({'type': 'case_name required'})
errors.extend(self.check_case_properties(
all_names=self.actions.all_property_names(),
subcase_names=subcase_names
))
def generate_paths():
for action in self.active_actions().values():
if isinstance(action, list):
actions = action
else:
actions = [action]
for action in actions:
for path in FormAction.get_action_paths(action):
yield path
errors.extend(self.check_paths(generate_paths()))
return errors
def requires_case(self):
# all referrals also require cases
return self.requires in ("case", "referral")
def requires_case_type(self):
return self.requires_case() or \
bool(self.active_non_preloader_actions())
def requires_referral(self):
return self.requires == "referral"
def uses_parent_case(self):
"""
Returns True if any of the load/update properties references the
parent case; False otherwise
"""
return any([name.startswith('parent/')
for name in self.actions.all_property_names()])
def get_registration_actions(self, case_type):
"""
:return: List of actions that create a case. Subcase actions are included
as long as they are not inside a repeat. If case_type is not None
only return actions that create a case of the specified type.
"""
reg_actions = []
if 'open_case' in self.active_actions() and (not case_type or self.get_module().case_type == case_type):
reg_actions.append('open_case')
subcase_actions = [action for action in self.actions.subcases if not action.repeat_context]
if case_type:
subcase_actions = [a for a in subcase_actions if a.case_type == case_type]
reg_actions.extend(subcase_actions)
return reg_actions
def is_registration_form(self, case_type=None):
reg_actions = self.get_registration_actions(case_type)
return len(reg_actions) == 1
def uses_usercase(self):
return actions_use_usercase(self.active_actions())
def extended_build_validation(self, error_meta, xml_valid, validate_module=True):
errors = []
if xml_valid:
for error in self.check_actions():
error.update(error_meta)
errors.append(error)
if validate_module:
needs_case_type = False
needs_case_detail = False
needs_referral_detail = False
if self.requires_case():
needs_case_detail = True
needs_case_type = True
if self.requires_case_type():
needs_case_type = True
if self.requires_referral():
needs_referral_detail = True
errors.extend(self.get_module().get_case_errors(
needs_case_type=needs_case_type,
needs_case_detail=needs_case_detail,
needs_referral_detail=needs_referral_detail,
))
return errors
def get_case_updates(self, case_type):
# This method is used by both get_all_case_properties and
# get_usercase_properties. In the case of usercase properties, use
# the usercase_update action, and for normal cases, use the
# update_case action
if case_type == self.get_module().case_type or case_type == USERCASE_TYPE:
format_key = self.get_case_property_name_formatter()
action = self.actions.usercase_update if case_type == USERCASE_TYPE else self.actions.update_case
return [format_key(*item) for item in action.update.items()]
return []
@memoized
def get_subcase_types(self):
'''
Return a list of each case type for which this Form opens a new subcase.
:return:
'''
return {subcase.case_type for subcase in self.actions.subcases
if subcase.close_condition.type == "never" and subcase.case_type}
@property
def case_references(self):
refs = self.case_references_data or {}
if "load" not in refs and self.actions.load_from_form.preload:
# for backward compatibility
# preload only has one reference per question path
preload = self.actions.load_from_form.preload
refs["load"] = {key: [value] for key, value in preload.iteritems()}
return refs
@case_references.setter
def case_references(self, refs):
"""Set case references
format: {"load": {"/data/path": ["case_property", ...], ...}}
"""
self.case_references_data = refs
if self.actions.load_from_form.preload:
self.actions.load_from_form = PreloadAction()
@memoized
def get_parent_types_and_contributed_properties(self, module_case_type, case_type):
parent_types = set()
case_properties = set()
for subcase in self.actions.subcases:
if subcase.case_type == case_type:
case_properties.update(
subcase.case_properties.keys()
)
if case_type != module_case_type and (
self.actions.open_case.is_active() or
self.actions.update_case.is_active() or
self.actions.close_case.is_active()):
parent_types.add((module_case_type, subcase.reference_id or 'parent'))
return parent_types, case_properties
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_triggers=True,
include_groups=True, include_translations=True)
}
module_case_type = self.get_module().case_type
type_meta = app_case_meta.get_type(module_case_type)
for type_, action in self.active_actions().items():
if type_ == 'open_case':
type_meta.add_opener(self.unique_id, action.condition)
self.add_property_save(
app_case_meta,
module_case_type,
'name',
questions,
action.name_path
)
if type_ == 'close_case':
type_meta.add_closer(self.unique_id, action.condition)
if type_ == 'update_case' or type_ == 'usercase_update':
for name, question_path in FormAction.get_action_properties(action):
self.add_property_save(
app_case_meta,
USERCASE_TYPE if type_ == 'usercase_update' else module_case_type,
name,
questions,
question_path
)
if type_ == 'case_preload' or type_ == 'load_from_form' or type_ == 'usercase_preload':
for name, question_path in FormAction.get_action_properties(action):
self.add_property_load(
app_case_meta,
USERCASE_TYPE if type_ == 'usercase_preload' else module_case_type,
name,
questions,
question_path
)
if type_ == 'subcases':
for act in action:
if act.is_active():
sub_type_meta = app_case_meta.get_type(act.case_type)
sub_type_meta.add_opener(self.unique_id, act.condition)
if act.close_condition.is_active():
sub_type_meta.add_closer(self.unique_id, act.close_condition)
for name, question_path in FormAction.get_action_properties(act):
self.add_property_save(
app_case_meta,
act.case_type,
name,
questions,
question_path
)
case_loads = self.case_references.get("load", {})
for question_path, case_properties in case_loads.iteritems():
for name in case_properties:
self.add_property_load(
app_case_meta,
module_case_type,
name,
questions,
question_path
)
class MappingItem(DocumentSchema):
key = StringProperty()
# lang => localized string
value = DictProperty()
@property
def treat_as_expression(self):
"""
Returns if whether the key can be treated as a valid expression that can be included in
condition-predicate of an if-clause for e.g. if(<expression>, value, ...)
"""
special_chars = '{}()[]=<>."\'/'
return any(special_char in self.key for special_char in special_chars)
@property
def key_as_variable(self):
"""
Return an xml variable name to represent this key.
If the key contains spaces or a condition-predicate of an if-clause,
return a hash of the key with "h" prepended.
If not, return the key with "k" prepended.
The prepended characters prevent the variable name from starting with a
numeral, which is illegal.
"""
if ' ' in self.key or self.treat_as_expression:
return 'h{hash}'.format(hash=hashlib.md5(self.key).hexdigest()[:8])
else:
return 'k{key}'.format(key=self.key)
def key_as_condition(self, property):
if self.treat_as_expression:
condition = dot_interpolate(self.key, property)
return u"{condition}".format(condition=condition)
else:
return u"{property} = '{key}'".format(
property=property,
key=self.key
)
def ref_to_key_variable(self, index, sort_or_display):
if sort_or_display == "sort":
key_as_var = "{}, ".format(index)
elif sort_or_display == "display":
key_as_var = "${var_name}, ".format(var_name=self.key_as_variable)
return key_as_var
class GraphAnnotations(IndexedSchema):
display_text = DictProperty()
x = StringProperty()
y = StringProperty()
class GraphSeries(DocumentSchema):
config = DictProperty()
locale_specific_config = DictProperty()
data_path = StringProperty()
x_function = StringProperty()
y_function = StringProperty()
radius_function = StringProperty()
class GraphConfiguration(DocumentSchema):
config = DictProperty()
locale_specific_config = DictProperty()
annotations = SchemaListProperty(GraphAnnotations)
graph_type = StringProperty()
series = SchemaListProperty(GraphSeries)
class DetailTab(IndexedSchema):
"""
Represents a tab in the case detail screen on the phone.
Each tab is itself a detail, nested inside the app's "main" detail.
"""
header = DictProperty()
# The first index, of all fields in the parent detail, that belongs to this tab
starting_index = IntegerProperty()
# A tab may be associated with a nodeset, resulting in a detail that
# iterates through sub-nodes of an entity rather than a single entity
has_nodeset = BooleanProperty(default=False)
nodeset = StringProperty()
class DetailColumn(IndexedSchema):
"""
Represents a column in case selection screen on the phone. Ex:
{
'header': {'en': 'Sex', 'por': 'Sexo'},
'model': 'case',
'field': 'sex',
'format': 'enum',
'xpath': '.',
'enum': [
{'key': 'm', 'value': {'en': 'Male', 'por': 'Macho'},
{'key': 'f', 'value': {'en': 'Female', 'por': 'Fêmea'},
],
}
"""
header = DictProperty()
model = StringProperty()
field = StringProperty()
format = StringProperty()
enum = SchemaListProperty(MappingItem)
graph_configuration = SchemaProperty(GraphConfiguration)
case_tile_field = StringProperty()
late_flag = IntegerProperty(default=30)
advanced = StringProperty(default="")
calc_xpath = StringProperty(default=".")
filter_xpath = StringProperty(default="")
time_ago_interval = FloatProperty(default=365.25)
@property
def enum_dict(self):
"""for backwards compatibility with building 1.0 apps"""
import warnings
warnings.warn('You should not use enum_dict. Use enum instead',
DeprecationWarning)
return dict((item.key, item.value) for item in self.enum)
def rename_lang(self, old_lang, new_lang):
for dct in [self.header] + [item.value for item in self.enum]:
_rename_key(dct, old_lang, new_lang)
@property
def field_type(self):
if FIELD_SEPARATOR in self.field:
return self.field.split(FIELD_SEPARATOR, 1)[0]
else:
return 'property' # equivalent to property:parent/case_property
@property
def field_property(self):
if FIELD_SEPARATOR in self.field:
return self.field.split(FIELD_SEPARATOR, 1)[1]
else:
return self.field
class TimeAgoInterval(object):
map = {
'day': 1.0,
'week': 7.0,
'month': 30.4375,
'year': 365.25
}
@classmethod
def get_from_old_format(cls, format):
if format == 'years-ago':
return cls.map['year']
elif format == 'months-ago':
return cls.map['month']
@classmethod
def wrap(cls, data):
if data.get('format') in ('months-ago', 'years-ago'):
data['time_ago_interval'] = cls.TimeAgoInterval.get_from_old_format(data['format'])
data['format'] = 'time-ago'
# Lazy migration: enum used to be a dict, now is a list
if isinstance(data.get('enum'), dict):
data['enum'] = sorted({'key': key, 'value': value}
for key, value in data['enum'].items())
return super(DetailColumn, cls).wrap(data)
@classmethod
def from_json(cls, data):
from corehq.apps.app_manager.views.media_utils import interpolate_media_path
to_ret = cls.wrap(data)
if to_ret.format == 'enum-image':
# interpolate icons-paths
for item in to_ret.enum:
for lang, path in item.value.iteritems():
item.value[lang] = interpolate_media_path(path)
return to_ret
class SortElement(IndexedSchema):
field = StringProperty()
type = StringProperty()
direction = StringProperty()
display = DictProperty()
def has_display_values(self):
return any(s.strip() != '' for s in self.display.values())
class CaseListLookupMixin(DocumentSchema):
"""
Allows for the addition of Android Callouts to do lookups from the CaseList
<lookup action="" image="" name="">
<extra key="" value="" />
<response key="" />
<field>
<header><text><locale id=""/></text></header>
<template><text><xpath function=""/></text></template>
</field>
</lookup>
"""
lookup_enabled = BooleanProperty(default=False)
lookup_autolaunch = BooleanProperty(default=False)
lookup_action = StringProperty()
lookup_name = StringProperty()
lookup_image = JRResourceProperty(required=False)
lookup_extras = SchemaListProperty()
lookup_responses = SchemaListProperty()
lookup_display_results = BooleanProperty(default=False) # Display callout results in case list?
lookup_field_header = DictProperty()
lookup_field_template = StringProperty()
class Detail(IndexedSchema, CaseListLookupMixin):
"""
Full configuration for a case selection screen
"""
display = StringProperty(choices=['short', 'long'])
columns = SchemaListProperty(DetailColumn)
get_columns = IndexedSchema.Getter('columns')
tabs = SchemaListProperty(DetailTab)
get_tabs = IndexedSchema.Getter('tabs')
sort_elements = SchemaListProperty(SortElement)
filter = StringProperty()
# If True, a small tile will display the case name after selection.
persist_case_context = BooleanProperty()
persistent_case_context_xml = StringProperty(default='case_name')
# Custom variables to add into the <variables /> node
custom_variables = StringProperty()
# If True, use case tiles in the case list
use_case_tiles = BooleanProperty()
# If given, use this string for the case tile markup instead of the default temaplte
custom_xml = StringProperty()
persist_tile_on_forms = BooleanProperty()
# If True, the in form tile can be pulled down to reveal all the case details.
pull_down_tile = BooleanProperty()
def get_tab_spans(self):
'''
Return the starting and ending indices into self.columns deliminating
the columns that should be in each tab.
:return:
'''
tabs = list(self.get_tabs())
ret = []
for tab in tabs:
try:
end = tabs[tab.id + 1].starting_index
except IndexError:
end = len(self.columns)
ret.append((tab.starting_index, end))
return ret
@parse_int([1])
def get_column(self, i):
return self.columns[i].with_id(i % len(self.columns), self)
def rename_lang(self, old_lang, new_lang):
for column in self.columns:
column.rename_lang(old_lang, new_lang)
class CaseList(IndexedSchema, NavMenuItemMediaMixin):
label = DictProperty()
show = BooleanProperty(default=False)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.label, old_lang, new_lang)
class CaseSearchProperty(DocumentSchema):
"""
Case properties available to search on.
"""
name = StringProperty()
label = DictProperty()
class DefaultCaseSearchProperty(DocumentSchema):
"""Case Properties with fixed value to search on"""
property = StringProperty()
default_value = StringProperty()
class CaseSearch(DocumentSchema):
"""
Properties and search command label
"""
command_label = DictProperty(default={'en': 'Search All Cases'})
properties = SchemaListProperty(CaseSearchProperty)
relevant = StringProperty(default=CLAIM_DEFAULT_RELEVANT_CONDITION)
include_closed = BooleanProperty(default=False)
default_properties = SchemaListProperty(DefaultCaseSearchProperty)
class ParentSelect(DocumentSchema):
active = BooleanProperty(default=False)
relationship = StringProperty(default='parent')
module_id = StringProperty()
class FixtureSelect(DocumentSchema):
"""
Configuration for creating a details screen from a fixture which can be used to pre-filter
cases prior to displaying the case list.
fixture_type: FixtureDataType.tag
display_column: name of the column to display in the list
localize: boolean if display_column actually contains the key for the localized string
variable_column: name of the column whose value should be saved when the user selects an item
xpath: xpath expression to use as the case filter
"""
active = BooleanProperty(default=False)
fixture_type = StringProperty()
display_column = StringProperty()
localize = BooleanProperty(default=False)
variable_column = StringProperty()
xpath = StringProperty(default='')
class DetailPair(DocumentSchema):
short = SchemaProperty(Detail)
long = SchemaProperty(Detail)
@classmethod
def wrap(cls, data):
self = super(DetailPair, cls).wrap(data)
self.short.display = 'short'
self.long.display = 'long'
return self
class CaseListForm(NavMenuItemMediaMixin):
form_id = FormIdProperty('modules[*].case_list_form.form_id')
label = DictProperty()
def rename_lang(self, old_lang, new_lang):
_rename_key(self.label, old_lang, new_lang)
class ModuleBase(IndexedSchema, NavMenuItemMediaMixin, CommentMixin):
name = DictProperty(unicode)
unique_id = StringProperty()
case_type = StringProperty()
case_list_form = SchemaProperty(CaseListForm)
module_filter = StringProperty()
root_module_id = StringProperty()
fixture_select = SchemaProperty(FixtureSelect)
auto_select_case = BooleanProperty(default=False)
@property
def is_surveys(self):
return self.case_type == ""
@classmethod
def wrap(cls, data):
if cls is ModuleBase:
doc_type = data['doc_type']
if doc_type == 'Module':
return Module.wrap(data)
elif doc_type == 'CareplanModule':
return CareplanModule.wrap(data)
elif doc_type == 'AdvancedModule':
return AdvancedModule.wrap(data)
elif doc_type == 'ReportModule':
return ReportModule.wrap(data)
elif doc_type == 'ShadowModule':
return ShadowModule.wrap(data)
else:
raise ValueError('Unexpected doc_type for Module', doc_type)
else:
return super(ModuleBase, cls).wrap(data)
def get_or_create_unique_id(self):
"""
It is the caller's responsibility to save the Application
after calling this function.
WARNING: If called on the same doc in different requests without saving,
this function will return a different uuid each time,
likely causing unexpected behavior
"""
if not self.unique_id:
self.unique_id = random_hex()
return self.unique_id
get_forms = IndexedSchema.Getter('forms')
get_suite_forms = IndexedSchema.Getter('forms')
@parse_int([1])
def get_form(self, i):
try:
return self.forms[i].with_id(i % len(self.forms), self)
except IndexError:
raise FormNotFoundException()
def get_child_modules(self):
return [
module for module in self.get_app().get_modules()
if module.unique_id != self.unique_id and getattr(module, 'root_module_id', None) == self.unique_id
]
@property
def root_module(self):
if self.root_module_id:
return self._parent.get_module_by_unique_id(self.root_module_id,
error=_("Could not find parent menu for '{}'").format(self.default_name()))
def requires_case_details(self):
return False
def get_case_types(self):
return set([self.case_type])
def get_module_info(self):
return {
'id': self.id,
'name': self.name,
}
def get_app(self):
return self._parent
def default_name(self):
app = self.get_app()
return trans(
self.name,
[app.default_language] + app.langs,
include_lang=False
)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.name, old_lang, new_lang)
for form in self.get_forms():
form.rename_lang(old_lang, new_lang)
for _, detail, _ in self.get_details():
detail.rename_lang(old_lang, new_lang)
def validate_detail_columns(self, columns):
from corehq.apps.app_manager.suite_xml.const import FIELD_TYPE_LOCATION
from corehq.apps.locations.util import parent_child
hierarchy = None
for column in columns:
if column.field_type == FIELD_TYPE_LOCATION:
hierarchy = hierarchy or parent_child(self.get_app().domain)
try:
LocationXpath('').validate(column.field_property, hierarchy)
except LocationXpathValidationError, e:
yield {
'type': 'invalid location xpath',
'details': unicode(e),
'module': self.get_module_info(),
'column': column,
}
def get_form_by_unique_id(self, unique_id):
for form in self.get_forms():
if form.get_unique_id() == unique_id:
return form
def validate_for_build(self):
errors = []
needs_case_detail = self.requires_case_details()
needs_case_type = needs_case_detail or len([1 for f in self.get_forms() if f.is_registration_form()])
if needs_case_detail or needs_case_type:
errors.extend(self.get_case_errors(
needs_case_type=needs_case_type,
needs_case_detail=needs_case_detail
))
if self.case_list_form.form_id:
try:
form = self.get_app().get_form(self.case_list_form.form_id)
except FormNotFoundException:
errors.append({
'type': 'case list form missing',
'module': self.get_module_info()
})
else:
if not form.is_registration_form(self.case_type):
errors.append({
'type': 'case list form not registration',
'module': self.get_module_info(),
'form': form,
})
if self.module_filter:
is_valid, message = validate_xpath(self.module_filter)
if not is_valid:
errors.append({
'type': 'module filter has xpath error',
'xpath_error': message,
'module': self.get_module_info(),
})
return errors
@memoized
def get_subcase_types(self):
'''
Return a set of each case type for which this module has a form that
opens a new subcase of that type.
'''
subcase_types = set()
for form in self.get_forms():
if hasattr(form, 'get_subcase_types'):
subcase_types.update(form.get_subcase_types())
return subcase_types
def get_custom_entries(self):
"""
By default, suite entries are configured by forms, but you can also provide custom
entries by overriding this function.
See ReportModule for an example
"""
return []
def uses_media(self):
"""
Whether the module uses media. If this returns false then media will not be generated
for the module.
"""
return True
def uses_usercase(self):
return False
def add_insert_form(self, from_module, form, index=None, with_source=False):
raise IncompatibleFormTypeException()
class ModuleDetailsMixin():
@classmethod
def wrap_details(cls, data):
if 'details' in data:
try:
case_short, case_long, ref_short, ref_long = data['details']
except ValueError:
# "need more than 0 values to unpack"
pass
else:
data['case_details'] = {
'short': case_short,
'long': case_long,
}
data['ref_details'] = {
'short': ref_short,
'long': ref_long,
}
finally:
del data['details']
return data
@property
def case_list_filter(self):
try:
return self.case_details.short.filter
except AttributeError:
return None
@property
def detail_sort_elements(self):
try:
return self.case_details.short.sort_elements
except Exception:
return []
def rename_lang(self, old_lang, new_lang):
super(Module, self).rename_lang(old_lang, new_lang)
for case_list in (self.case_list, self.referral_list):
case_list.rename_lang(old_lang, new_lang)
def export_json(self, dump_json=True, keep_unique_id=False):
source = self.to_json()
if not keep_unique_id:
for form in source['forms']:
del form['unique_id']
return json.dumps(source) if dump_json else source
def get_details(self):
return (
('case_short', self.case_details.short, True),
('case_long', self.case_details.long, True),
('ref_short', self.ref_details.short, False),
('ref_long', self.ref_details.long, False),
)
def validate_details_for_build(self):
errors = []
for sort_element in self.detail_sort_elements:
try:
validate_detail_screen_field(sort_element.field)
except ValueError:
errors.append({
'type': 'invalid sort field',
'field': sort_element.field,
'module': self.get_module_info(),
})
if self.case_list_filter:
try:
case_list_filter = interpolate_xpath(self.case_list_filter)
etree.XPath(case_list_filter)
except (etree.XPathSyntaxError, CaseXPathValidationError):
errors.append({
'type': 'invalid filter xpath',
'module': self.get_module_info(),
'filter': self.case_list_filter,
})
for detail in [self.case_details.short, self.case_details.long]:
if detail.use_case_tiles:
if not detail.display == "short":
errors.append({
'type': "invalid tile configuration",
'module': self.get_module_info(),
'reason': _('Case tiles may only be used for the case list (not the case details).')
})
col_by_tile_field = {c.case_tile_field: c for c in detail.columns}
for field in ["header", "top_left", "sex", "bottom_left", "date"]:
if field not in col_by_tile_field:
errors.append({
'type': "invalid tile configuration",
'module': self.get_module_info(),
'reason': _('A case property must be assigned to the "{}" tile field.'.format(field))
})
return errors
def get_case_errors(self, needs_case_type, needs_case_detail, needs_referral_detail=False):
module_info = self.get_module_info()
if needs_case_type and not self.case_type:
yield {
'type': 'no case type',
'module': module_info,
}
if needs_case_detail:
if not self.case_details.short.columns:
yield {
'type': 'no case detail',
'module': module_info,
}
columns = self.case_details.short.columns + self.case_details.long.columns
errors = self.validate_detail_columns(columns)
for error in errors:
yield error
if needs_referral_detail and not self.ref_details.short.columns:
yield {
'type': 'no ref detail',
'module': module_info,
}
class Module(ModuleBase, ModuleDetailsMixin):
"""
A group of related forms, and configuration that applies to them all.
Translates to a top-level menu on the phone.
"""
module_type = 'basic'
case_label = DictProperty()
referral_label = DictProperty()
forms = SchemaListProperty(Form)
case_details = SchemaProperty(DetailPair)
ref_details = SchemaProperty(DetailPair)
put_in_root = BooleanProperty(default=False)
case_list = SchemaProperty(CaseList)
referral_list = SchemaProperty(CaseList)
task_list = SchemaProperty(CaseList)
parent_select = SchemaProperty(ParentSelect)
search_config = SchemaProperty(CaseSearch)
display_style = StringProperty(default='list')
@classmethod
def wrap(cls, data):
data = cls.wrap_details(data)
return super(Module, cls).wrap(data)
@classmethod
def new_module(cls, name, lang):
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = Module(
name={(lang or 'en'): name or ugettext("Untitled Module")},
forms=[],
case_type='',
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
case_label={(lang or 'en'): 'Cases'},
)
module.get_or_create_unique_id()
return module
def new_form(self, name, lang, attachment=''):
form = Form(
name={lang if lang else "en": name if name else _("Untitled Form")},
)
self.forms.append(form)
form = self.get_form(-1)
form.source = attachment
return form
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, Form):
new_form = form
elif isinstance(form, AdvancedForm) and not form.actions.get_all_actions():
new_form = Form(
name=form.name,
form_filter=form.form_filter,
media_image=form.media_image,
media_audio=form.media_audio
)
new_form._parent = self
form._parent = self
if with_source:
new_form.source = form.source
else:
raise IncompatibleFormTypeException()
if index is not None:
self.forms.insert(index, new_form)
else:
self.forms.append(new_form)
return self.get_form(index or -1)
def validate_for_build(self):
errors = super(Module, self).validate_for_build() + self.validate_details_for_build()
if not self.forms and not self.case_list.show:
errors.append({
'type': 'no forms or case list',
'module': self.get_module_info(),
})
if module_case_hierarchy_has_circular_reference(self):
errors.append({
'type': 'circular case hierarchy',
'module': self.get_module_info(),
})
return errors
def requires(self):
r = set(["none"])
for form in self.get_forms():
r.add(form.requires)
if self.case_list.show:
r.add('case')
if self.referral_list.show:
r.add('referral')
for val in ("referral", "case", "none"):
if val in r:
return val
def requires_case_details(self):
ret = False
if self.case_list.show:
return True
for form in self.get_forms():
if form.requires_case():
ret = True
break
return ret
@memoized
def all_forms_require_a_case(self):
return all([form.requires == 'case' for form in self.get_forms()])
def uses_usercase(self):
"""Return True if this module has any forms that use the usercase.
"""
return any(form.uses_usercase() for form in self.get_forms())
def grid_display_style(self):
return self.display_style == 'grid'
class AdvancedForm(IndexedFormBase, NavMenuItemMediaMixin):
form_type = 'advanced_form'
form_filter = StringProperty()
actions = SchemaProperty(AdvancedFormActions)
schedule = SchemaProperty(FormSchedule, default=None)
@classmethod
def wrap(cls, data):
# lazy migration to swap keys with values in action preload dict.
# http://manage.dimagi.com/default.asp?162213
load_actions = data.get('actions', {}).get('load_update_cases', [])
for action in load_actions:
preload = action['preload']
if preload and preload.values()[0].startswith('/'):
action['preload'] = {v: k for k, v in preload.items()}
return super(AdvancedForm, cls).wrap(data)
def pre_delete_hook(self):
try:
self.disable_schedule()
except (ScheduleError, TypeError, AttributeError) as e:
logging.error("There was a {error} while running the pre_delete_hook on {form_id}. "
"There is probably nothing to worry about, but you could check to make sure "
"that there are no issues with this form.".format(error=e, form_id=self.unique_id))
pass
def pre_move_hook(self, from_module, to_module):
if from_module != to_module:
try:
self.disable_schedule()
except (ScheduleError, TypeError, AttributeError) as e:
logging.error("There was a {error} while running the pre_move_hook on {form_id}. "
"There is probably nothing to worry about, but you could check to make sure "
"that there are no issues with this module.".format(error=e, form_id=self.unique_id))
pass
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(AdvancedForm, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_case_and_meta_advanced(self)
def requires_case(self):
"""Form requires a case that must be selected by the user (excludes autoloaded cases)
"""
return any(not action.auto_select for action in self.actions.load_update_cases)
@property
def requires(self):
return 'case' if self.requires_case() else 'none'
def is_registration_form(self, case_type=None):
"""
Defined as form that opens a single case. If the case is a sub-case then
the form is only allowed to load parent cases (and any auto-selected cases).
"""
reg_actions = self.get_registration_actions(case_type)
if len(reg_actions) != 1:
return False
load_actions = [action for action in self.actions.load_update_cases if not action.auto_select]
if not load_actions:
return True
reg_action = reg_actions[0]
if not reg_action.case_indices:
return False
actions_by_tag = deepcopy(self.actions.actions_meta_by_tag)
actions_by_tag.pop(reg_action.case_tag)
def check_parents(tag):
"""Recursively check parent actions to ensure that all actions for this form are
either parents of the registration action or else auto-select actions.
"""
if not tag:
return not actions_by_tag or all(
getattr(a['action'], 'auto_select', False) for a in actions_by_tag.values()
)
try:
parent = actions_by_tag.pop(tag)
except KeyError:
return False
return all(check_parents(p.tag) for p in parent['action'].case_indices)
return all(check_parents(parent.tag) for parent in reg_action.case_indices)
def get_registration_actions(self, case_type=None):
"""
:return: List of actions that create a case. Subcase actions are included
as long as they are not inside a repeat. If case_type is not None
only return actions that create a case of the specified type.
"""
registration_actions = [
action for action in self.actions.get_open_actions()
if not action.is_subcase or not action.repeat_context
]
if case_type:
registration_actions = [a for a in registration_actions if a.case_type == case_type]
return registration_actions
def uses_case_type(self, case_type, invert_match=False):
def match(ct):
matches = ct == case_type
return not matches if invert_match else matches
return any(action for action in self.actions.load_update_cases if match(action.case_type))
def uses_usercase(self):
return self.uses_case_type(USERCASE_TYPE)
def all_other_forms_require_a_case(self):
m = self.get_module()
return all([form.requires == 'case' for form in m.get_forms() if form.id != self.id])
def get_module(self):
return self._parent
def get_phase(self):
module = self.get_module()
return next((phase for phase in module.get_schedule_phases()
for form in phase.get_forms()
if form.unique_id == self.unique_id),
None)
def disable_schedule(self):
self.schedule.enabled = False
phase = self.get_phase()
if phase:
phase.remove_form(self)
def check_actions(self):
errors = []
for action in self.actions.get_subcase_actions():
case_tags = self.actions.get_case_tags()
for case_index in action.case_indices:
if case_index.tag not in case_tags:
errors.append({'type': 'missing parent tag', 'case_tag': case_index.tag})
if isinstance(action, AdvancedOpenCaseAction):
if not action.name_path:
errors.append({'type': 'case_name required', 'case_tag': action.case_tag})
for case_index in action.case_indices:
meta = self.actions.actions_meta_by_tag.get(case_index.tag)
if meta and meta['type'] == 'open' and meta['action'].repeat_context:
if (
not action.repeat_context or
not action.repeat_context.startswith(meta['action'].repeat_context)
):
errors.append({'type': 'subcase repeat context',
'case_tag': action.case_tag,
'parent_tag': case_index.tag})
errors.extend(self.check_case_properties(
subcase_names=action.get_property_names(),
case_tag=action.case_tag
))
for action in self.actions.get_all_actions():
if not action.case_type and (not isinstance(action, LoadUpdateAction) or not action.auto_select):
errors.append({'type': "no case type in action", 'case_tag': action.case_tag})
if isinstance(action, LoadUpdateAction) and action.auto_select:
mode = action.auto_select.mode
if not action.auto_select.value_key:
key_names = {
AUTO_SELECT_CASE: _('Case property'),
AUTO_SELECT_FIXTURE: _('Lookup Table field'),
AUTO_SELECT_USER: _('custom user property'),
AUTO_SELECT_RAW: _('custom XPath expression'),
}
if mode in key_names:
errors.append({'type': 'auto select key', 'key_name': key_names[mode]})
if not action.auto_select.value_source:
source_names = {
AUTO_SELECT_CASE: _('Case tag'),
AUTO_SELECT_FIXTURE: _('Lookup Table tag'),
}
if mode in source_names:
errors.append({'type': 'auto select source', 'source_name': source_names[mode]})
elif mode == AUTO_SELECT_CASE:
case_tag = action.auto_select.value_source
if not self.actions.get_action_from_tag(case_tag):
errors.append({'type': 'auto select case ref', 'case_tag': action.case_tag})
errors.extend(self.check_case_properties(
all_names=action.get_property_names(),
case_tag=action.case_tag
))
if self.form_filter:
form_filter_references_case = (
xpath_references_case(self.form_filter) or
xpath_references_user_case(self.form_filter)
)
if form_filter_references_case:
if not any(action for action in self.actions.load_update_cases if not action.auto_select):
errors.append({'type': "filtering without case"})
def generate_paths():
for action in self.actions.get_all_actions():
for path in action.get_paths():
yield path
if self.schedule:
if self.schedule.transition_condition.type == 'if':
yield self.schedule.transition_condition.question
if self.schedule.termination_condition.type == 'if':
yield self.schedule.termination_condition.question
errors.extend(self.check_paths(generate_paths()))
return errors
def extended_build_validation(self, error_meta, xml_valid, validate_module=True):
errors = []
if xml_valid:
for error in self.check_actions():
error.update(error_meta)
errors.append(error)
module = self.get_module()
if validate_module:
errors.extend(module.get_case_errors(
needs_case_type=False,
needs_case_detail=module.requires_case_details(),
needs_referral_detail=False,
))
return errors
def get_case_updates(self, case_type):
updates = set()
format_key = self.get_case_property_name_formatter()
for action in self.actions.get_all_actions():
if action.case_type == case_type:
updates.update(format_key(*item)
for item in action.case_properties.iteritems())
if self.schedule and self.schedule.enabled and self.source:
xform = self.wrapped_xform()
self.add_stuff_to_xform(xform)
scheduler_updates = xform.get_scheduler_case_updates()[case_type]
else:
scheduler_updates = set()
return updates.union(scheduler_updates)
@property
def case_references(self):
return {}
@case_references.setter
def case_references(self, refs):
pass
@memoized
def get_parent_types_and_contributed_properties(self, module_case_type, case_type):
parent_types = set()
case_properties = set()
for subcase in self.actions.get_subcase_actions():
if subcase.case_type == case_type:
case_properties.update(
subcase.case_properties.keys()
)
for case_index in subcase.case_indices:
parent = self.actions.get_action_from_tag(case_index.tag)
if parent:
parent_types.add((parent.case_type, case_index.reference_id or 'parent'))
return parent_types, case_properties
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_translations=True)
}
for action in self.actions.load_update_cases:
for name, question_path in action.case_properties.items():
self.add_property_save(
app_case_meta,
action.case_type,
name,
questions,
question_path
)
for question_path, name in action.preload.items():
self.add_property_load(
app_case_meta,
action.case_type,
name,
questions,
question_path
)
if action.close_condition.is_active():
meta = app_case_meta.get_type(action.case_type)
meta.add_closer(self.unique_id, action.close_condition)
for action in self.actions.open_cases:
self.add_property_save(
app_case_meta,
action.case_type,
'name',
questions,
action.name_path,
action.open_condition
)
for name, question_path in action.case_properties.items():
self.add_property_save(
app_case_meta,
action.case_type,
name,
questions,
question_path,
action.open_condition
)
meta = app_case_meta.get_type(action.case_type)
meta.add_opener(self.unique_id, action.open_condition)
if action.close_condition.is_active():
meta.add_closer(self.unique_id, action.close_condition)
class SchedulePhaseForm(IndexedSchema):
"""
A reference to a form in a schedule phase.
"""
form_id = FormIdProperty("modules[*].schedule_phases[*].forms[*].form_id")
class SchedulePhase(IndexedSchema):
"""
SchedulePhases are attached to a module.
A Schedule Phase is a grouping of forms that occur within a period and share an anchor
A module should not have more than one SchedulePhase with the same anchor
anchor: Case property containing a date after which this phase becomes active
forms: The forms that are to be filled out within this phase
"""
anchor = StringProperty()
forms = SchemaListProperty(SchedulePhaseForm)
@property
def id(self):
""" A Schedule Phase is 1-indexed """
_id = super(SchedulePhase, self).id
return _id + 1
@property
def phase_id(self):
return "{}_{}".format(self.anchor, self.id)
def get_module(self):
return self._parent
_get_forms = IndexedSchema.Getter('forms')
def get_forms(self):
"""Returns the actual form objects related to this phase"""
module = self.get_module()
return (module.get_form_by_unique_id(form.form_id) for form in self._get_forms())
def get_form(self, desired_form):
return next((form for form in self.get_forms() if form.unique_id == desired_form.unique_id), None)
def get_phase_form_index(self, form):
"""
Returns the index of the form with respect to the phase
schedule_phase.forms = [a,b,c]
schedule_phase.get_phase_form_index(b)
=> 1
schedule_phase.get_phase_form_index(c)
=> 2
"""
return next((phase_form.id for phase_form in self._get_forms() if phase_form.form_id == form.unique_id),
None)
def remove_form(self, form):
"""Remove a form from the phase"""
idx = self.get_phase_form_index(form)
if idx is None:
raise ScheduleError("That form doesn't exist in the phase")
self.forms.remove(self.forms[idx])
def add_form(self, form):
"""Adds a form to this phase, removing it from other phases"""
old_phase = form.get_phase()
if old_phase is not None and old_phase.anchor != self.anchor:
old_phase.remove_form(form)
if self.get_form(form) is None:
self.forms.append(SchedulePhaseForm(form_id=form.unique_id))
def change_anchor(self, new_anchor):
if new_anchor is None or new_anchor.strip() == '':
raise ScheduleError(_("You can't create a phase without an anchor property"))
self.anchor = new_anchor
if self.get_module().phase_anchors.count(new_anchor) > 1:
raise ScheduleError(_("You can't have more than one phase with the anchor {}").format(new_anchor))
class AdvancedModule(ModuleBase):
module_type = 'advanced'
case_label = DictProperty()
forms = SchemaListProperty(AdvancedForm)
case_details = SchemaProperty(DetailPair)
product_details = SchemaProperty(DetailPair)
put_in_root = BooleanProperty(default=False)
case_list = SchemaProperty(CaseList)
has_schedule = BooleanProperty()
schedule_phases = SchemaListProperty(SchedulePhase)
get_schedule_phases = IndexedSchema.Getter('schedule_phases')
search_config = SchemaProperty(CaseSearch)
@classmethod
def wrap(cls, data):
# lazy migration to accommodate search_config as empty list
# http://manage.dimagi.com/default.asp?231186
if data.get('search_config') == []:
data['search_config'] = {}
return super(AdvancedModule, cls).wrap(data)
@classmethod
def new_module(cls, name, lang):
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = AdvancedModule(
name={(lang or 'en'): name or ugettext("Untitled Module")},
forms=[],
case_type='',
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
product_details=DetailPair(
short=Detail(
columns=[
DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Product")},
field='name',
model='product',
),
],
),
long=Detail(),
),
)
module.get_or_create_unique_id()
return module
def new_form(self, name, lang, attachment=''):
form = AdvancedForm(
name={lang if lang else "en": name if name else _("Untitled Form")},
)
form.schedule = FormSchedule(enabled=False)
self.forms.append(form)
form = self.get_form(-1)
form.source = attachment
return form
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, AdvancedForm):
new_form = form
elif isinstance(form, Form):
new_form = AdvancedForm(
name=form.name,
form_filter=form.form_filter,
media_image=form.media_image,
media_audio=form.media_audio
)
new_form._parent = self
form._parent = self
if with_source:
new_form.source = form.source
actions = form.active_actions()
open = actions.get('open_case', None)
update = actions.get('update_case', None)
close = actions.get('close_case', None)
preload = actions.get('case_preload', None)
subcases = actions.get('subcases', None)
case_type = from_module.case_type
base_action = None
if open:
base_action = AdvancedOpenCaseAction(
case_type=case_type,
case_tag='open_{0}_0'.format(case_type),
name_path=open.name_path,
open_condition=open.condition,
case_properties=update.update if update else {},
)
new_form.actions.open_cases.append(base_action)
elif update or preload or close:
base_action = LoadUpdateAction(
case_type=case_type,
case_tag='load_{0}_0'.format(case_type),
case_properties=update.update if update else {},
preload=preload.preload if preload else {}
)
if from_module.parent_select.active:
app = self.get_app()
select_chain = get_select_chain(app, from_module, include_self=False)
for n, link in enumerate(reversed(list(enumerate(select_chain)))):
i, module = link
new_form.actions.load_update_cases.append(LoadUpdateAction(
case_type=module.case_type,
case_tag='_'.join(['parent'] * (i + 1)),
details_module=module.unique_id,
case_index=CaseIndex(tag='_'.join(['parent'] * (i + 2)) if n > 0 else '')
))
base_action.case_indices = [CaseIndex(tag='parent')]
if close:
base_action.close_condition = close.condition
new_form.actions.load_update_cases.append(base_action)
if subcases:
for i, subcase in enumerate(subcases):
open_subcase_action = AdvancedOpenCaseAction(
case_type=subcase.case_type,
case_tag='open_{0}_{1}'.format(subcase.case_type, i+1),
name_path=subcase.case_name,
open_condition=subcase.condition,
case_properties=subcase.case_properties,
repeat_context=subcase.repeat_context,
case_indices=[CaseIndex(
tag=base_action.case_tag if base_action else '',
reference_id=subcase.reference_id,
)]
)
new_form.actions.open_cases.append(open_subcase_action)
else:
raise IncompatibleFormTypeException()
if index is not None:
self.forms.insert(index, new_form)
else:
self.forms.append(new_form)
return self.get_form(index or -1)
def rename_lang(self, old_lang, new_lang):
super(AdvancedModule, self).rename_lang(old_lang, new_lang)
self.case_list.rename_lang(old_lang, new_lang)
def requires_case_details(self):
if self.case_list.show:
return True
for form in self.forms:
if any(action.case_type == self.case_type for action in form.actions.load_update_cases):
return True
def all_forms_require_a_case(self):
return all(form.requires_case() for form in self.forms)
def get_details(self):
return (
('case_short', self.case_details.short, True),
('case_long', self.case_details.long, True),
('product_short', self.product_details.short, self.get_app().commtrack_enabled),
('product_long', self.product_details.long, False),
)
def get_case_errors(self, needs_case_type, needs_case_detail, needs_referral_detail=False):
module_info = self.get_module_info()
if needs_case_type and not self.case_type:
yield {
'type': 'no case type',
'module': module_info,
}
if needs_case_detail:
if not self.case_details.short.columns:
yield {
'type': 'no case detail',
'module': module_info,
}
if self.get_app().commtrack_enabled and not self.product_details.short.columns:
for form in self.forms:
if self.case_list.show or \
any(action.show_product_stock for action in form.actions.load_update_cases):
yield {
'type': 'no product detail',
'module': module_info,
}
break
columns = self.case_details.short.columns + self.case_details.long.columns
if self.get_app().commtrack_enabled:
columns += self.product_details.short.columns
errors = self.validate_detail_columns(columns)
for error in errors:
yield error
def validate_for_build(self):
errors = super(AdvancedModule, self).validate_for_build()
if not self.forms and not self.case_list.show:
errors.append({
'type': 'no forms or case list',
'module': self.get_module_info(),
})
if self.case_list_form.form_id:
forms = self.forms
case_tag = None
loaded_case_types = None
for form in forms:
info = self.get_module_info()
form_info = {"id": form.id if hasattr(form, 'id') else None, "name": form.name}
non_auto_select_actions = [a for a in form.actions.load_update_cases if not a.auto_select]
this_forms_loaded_case_types = {action.case_type for action in non_auto_select_actions}
if loaded_case_types is None:
loaded_case_types = this_forms_loaded_case_types
elif loaded_case_types != this_forms_loaded_case_types:
errors.append({
'type': 'all forms in case list module must load the same cases',
'module': info,
'form': form_info,
})
if not non_auto_select_actions:
errors.append({
'type': 'case list module form must require case',
'module': info,
'form': form_info,
})
elif len(non_auto_select_actions) != 1:
for index, action in reversed(list(enumerate(non_auto_select_actions))):
if (
index > 0 and
non_auto_select_actions[index - 1].case_tag not in (p.tag for p in action.case_indices)
):
errors.append({
'type': 'case list module form can only load parent cases',
'module': info,
'form': form_info,
})
case_action = non_auto_select_actions[-1] if non_auto_select_actions else None
if case_action and case_action.case_type != self.case_type:
errors.append({
'type': 'case list module form must match module case type',
'module': info,
'form': form_info,
})
# set case_tag if not already set
case_tag = case_action.case_tag if not case_tag and case_action else case_tag
if case_action and case_action.case_tag != case_tag:
errors.append({
'type': 'all forms in case list module must have same case management',
'module': info,
'form': form_info,
'expected_tag': case_tag
})
if case_action and case_action.details_module and case_action.details_module != self.unique_id:
errors.append({
'type': 'forms in case list module must use modules details',
'module': info,
'form': form_info,
})
return errors
def _uses_case_type(self, case_type, invert_match=False):
return any(form.uses_case_type(case_type, invert_match) for form in self.forms)
def uses_usercase(self):
"""Return True if this module has any forms that use the usercase.
"""
return self._uses_case_type(USERCASE_TYPE)
@property
def phase_anchors(self):
return [phase.anchor for phase in self.schedule_phases]
def get_or_create_schedule_phase(self, anchor):
"""Returns a tuple of (phase, new?)"""
if anchor is None or anchor.strip() == '':
raise ScheduleError(_("You can't create a phase without an anchor property"))
phase = next((phase for phase in self.get_schedule_phases() if phase.anchor == anchor), None)
is_new_phase = False
if phase is None:
self.schedule_phases.append(SchedulePhase(anchor=anchor))
# TODO: is there a better way of doing this?
phase = list(self.get_schedule_phases())[-1] # get the phase from the module so we know the _parent
is_new_phase = True
return (phase, is_new_phase)
def _clear_schedule_phases(self):
self.schedule_phases = []
def update_schedule_phases(self, anchors):
""" Take a list of anchors, reorders, deletes and creates phases from it """
old_phases = {phase.anchor: phase for phase in self.get_schedule_phases()}
self._clear_schedule_phases()
for anchor in anchors:
try:
self.schedule_phases.append(old_phases.pop(anchor))
except KeyError:
self.get_or_create_schedule_phase(anchor)
deleted_phases_with_forms = [anchor for anchor, phase in old_phases.iteritems() if len(phase.forms)]
if deleted_phases_with_forms:
raise ScheduleError(_("You can't delete phases with anchors "
"{phase_anchors} because they have forms attached to them").format(
phase_anchors=(", ").join(deleted_phases_with_forms)))
return self.get_schedule_phases()
def update_schedule_phase_anchors(self, new_anchors):
""" takes a list of tuples (id, new_anchor) and updates the phase anchors """
for anchor in new_anchors:
id = anchor[0] - 1
new_anchor = anchor[1]
try:
list(self.get_schedule_phases())[id].change_anchor(new_anchor)
except IndexError:
pass # That phase wasn't found, so we can't change it's anchor. Ignore it
class CareplanForm(IndexedFormBase, NavMenuItemMediaMixin):
form_type = 'careplan_form'
mode = StringProperty(required=True, choices=['create', 'update'])
custom_case_updates = DictProperty()
case_preload = DictProperty()
@classmethod
def wrap(cls, data):
if cls is CareplanForm:
doc_type = data['doc_type']
if doc_type == 'CareplanGoalForm':
return CareplanGoalForm.wrap(data)
elif doc_type == 'CareplanTaskForm':
return CareplanTaskForm.wrap(data)
else:
raise ValueError('Unexpected doc_type for CareplanForm', doc_type)
else:
return super(CareplanForm, cls).wrap(data)
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(CareplanForm, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_care_plan(self)
def get_case_updates(self, case_type):
if case_type == self.case_type:
format_key = self.get_case_property_name_formatter()
return [format_key(*item) for item in self.case_updates().iteritems()]
else:
return []
def get_case_type(self):
return self.case_type
def get_parent_case_type(self):
return self._parent.case_type
def get_parent_types_and_contributed_properties(self, module_case_type, case_type):
parent_types = set()
case_properties = set()
if case_type == self.case_type:
if case_type == CAREPLAN_GOAL:
parent_types.add((module_case_type, 'parent'))
elif case_type == CAREPLAN_TASK:
parent_types.add((CAREPLAN_GOAL, 'goal'))
case_properties.update(self.case_updates().keys())
return parent_types, case_properties
def is_registration_form(self, case_type=None):
return self.mode == 'create' and (not case_type or self.case_type == case_type)
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_translations=True)
}
meta = app_case_meta.get_type(self.case_type)
for name, question_path in self.case_updates().items():
self.add_property_save(
app_case_meta,
self.case_type,
name,
questions,
question_path
)
for name, question_path in self.case_preload.items():
self.add_property_load(
app_case_meta,
self.case_type,
name,
questions,
question_path
)
meta.add_opener(self.unique_id, FormActionCondition(
type='always',
))
meta.add_closer(self.unique_id, FormActionCondition(
type='if',
question=self.close_path,
answer='yes',
))
class CareplanGoalForm(CareplanForm):
case_type = CAREPLAN_GOAL
name_path = StringProperty(required=True, default='/data/name')
date_followup_path = StringProperty(required=True, default='/data/date_followup')
description_path = StringProperty(required=True, default='/data/description')
close_path = StringProperty(required=True, default='/data/close_goal')
@classmethod
def new_form(cls, lang, name, mode):
action = 'Update' if mode == 'update' else 'New'
form = CareplanGoalForm(mode=mode)
name = name or '%s Careplan %s' % (action, CAREPLAN_CASE_NAMES[form.case_type])
form.name = {lang: name}
if mode == 'update':
form.description_path = '/data/description_group/description'
source = load_form_template('%s_%s.xml' % (form.case_type, mode))
return form, source
def case_updates(self):
changes = self.custom_case_updates.copy()
changes.update({
'date_followup': self.date_followup_path,
'description': self.description_path,
})
return changes
def get_fixed_questions(self):
def q(name, case_key, label):
return {
'name': name,
'key': case_key,
'label': label,
'path': self[name]
}
questions = [
q('description_path', 'description', _('Description')),
q('date_followup_path', 'date_followup', _('Followup date')),
]
if self.mode == 'create':
return [q('name_path', 'name', _('Name'))] + questions
else:
return questions + [q('close_path', 'close', _('Close if'))]
class CareplanTaskForm(CareplanForm):
case_type = CAREPLAN_TASK
name_path = StringProperty(required=True, default='/data/task_repeat/name')
date_followup_path = StringProperty(required=True, default='/data/date_followup')
description_path = StringProperty(required=True, default='/data/description')
latest_report_path = StringProperty(required=True, default='/data/progress_group/progress_update')
close_path = StringProperty(required=True, default='/data/task_complete')
@classmethod
def new_form(cls, lang, name, mode):
action = 'Update' if mode == 'update' else 'New'
form = CareplanTaskForm(mode=mode)
name = name or '%s Careplan %s' % (action, CAREPLAN_CASE_NAMES[form.case_type])
form.name = {lang: name}
if mode == 'create':
form.date_followup_path = '/data/task_repeat/date_followup'
form.description_path = '/data/task_repeat/description'
source = load_form_template('%s_%s.xml' % (form.case_type, mode))
return form, source
def case_updates(self):
changes = self.custom_case_updates.copy()
changes.update({
'date_followup': self.date_followup_path,
})
if self.mode == 'create':
changes['description'] = self.description_path
else:
changes['latest_report'] = self.latest_report_path
return changes
def get_fixed_questions(self):
def q(name, case_key, label):
return {
'name': name,
'key': case_key,
'label': label,
'path': self[name]
}
questions = [
q('date_followup_path', 'date_followup', _('Followup date')),
]
if self.mode == 'create':
return [
q('name_path', 'name', _('Name')),
q('description_path', 'description', _('Description')),
] + questions
else:
return questions + [
q('latest_report_path', 'latest_report', _('Latest report')),
q('close_path', 'close', _('Close if')),
]
class CareplanModule(ModuleBase):
"""
A set of forms and configuration for managing the Care Plan workflow.
"""
module_type = 'careplan'
parent_select = SchemaProperty(ParentSelect)
display_separately = BooleanProperty(default=False)
forms = SchemaListProperty(CareplanForm)
goal_details = SchemaProperty(DetailPair)
task_details = SchemaProperty(DetailPair)
@classmethod
def new_module(cls, name, lang, target_module_id, target_case_type):
lang = lang or 'en'
module = CareplanModule(
name={lang: name or ugettext("Care Plan")},
parent_select=ParentSelect(
active=True,
relationship='parent',
module_id=target_module_id
),
case_type=target_case_type,
goal_details=DetailPair(
short=cls._get_detail(lang, 'goal_short'),
long=cls._get_detail(lang, 'goal_long'),
),
task_details=DetailPair(
short=cls._get_detail(lang, 'task_short'),
long=cls._get_detail(lang, 'task_long'),
)
)
module.get_or_create_unique_id()
return module
@classmethod
def _get_detail(cls, lang, detail_type):
header = ugettext('Goal') if detail_type.startswith('goal') else ugettext('Task')
columns = [
DetailColumn(
format='plain',
header={lang: header},
field='name',
model='case'),
DetailColumn(
format='date',
header={lang: ugettext("Followup")},
field='date_followup',
model='case')]
if detail_type.endswith('long'):
columns.append(DetailColumn(
format='plain',
header={lang: ugettext("Description")},
field='description',
model='case'))
if detail_type == 'tasks_long':
columns.append(DetailColumn(
format='plain',
header={lang: ugettext("Last update")},
field='latest_report',
model='case'))
return Detail(type=detail_type, columns=columns)
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, CareplanForm):
if index is not None:
self.forms.insert(index, form)
else:
self.forms.append(form)
return self.get_form(index or -1)
else:
raise IncompatibleFormTypeException()
def requires_case_details(self):
return True
def get_case_types(self):
return set([self.case_type]) | set(f.case_type for f in self.forms)
def get_form_by_type(self, case_type, mode):
for form in self.get_forms():
if form.case_type == case_type and form.mode == mode:
return form
def get_details(self):
return (
('%s_short' % CAREPLAN_GOAL, self.goal_details.short, True),
('%s_long' % CAREPLAN_GOAL, self.goal_details.long, True),
('%s_short' % CAREPLAN_TASK, self.task_details.short, True),
('%s_long' % CAREPLAN_TASK, self.task_details.long, True),
)
def get_case_errors(self, needs_case_type, needs_case_detail, needs_referral_detail=False):
module_info = self.get_module_info()
if needs_case_type and not self.case_type:
yield {
'type': 'no case type',
'module': module_info,
}
if needs_case_detail:
if not self.goal_details.short.columns:
yield {
'type': 'no case detail for goals',
'module': module_info,
}
if not self.task_details.short.columns:
yield {
'type': 'no case detail for tasks',
'module': module_info,
}
columns = self.goal_details.short.columns + self.goal_details.long.columns
columns += self.task_details.short.columns + self.task_details.long.columns
errors = self.validate_detail_columns(columns)
for error in errors:
yield error
def validate_for_build(self):
errors = super(CareplanModule, self).validate_for_build()
if not self.forms:
errors.append({
'type': 'no forms',
'module': self.get_module_info(),
})
return errors
class ReportGraphConfig(DocumentSchema):
graph_type = StringProperty(
choices=[
'bar',
'time',
'xy',
],
default='bar',
required=True,
)
series_configs = DictProperty(DictProperty)
config = DictProperty()
class ReportAppFilter(DocumentSchema):
@classmethod
def wrap(cls, data):
if cls is ReportAppFilter:
doc_type = data['doc_type']
doc_type_to_filter_class = {
'AutoFilter': AutoFilter,
'CustomDataAutoFilter': CustomDataAutoFilter,
'StaticChoiceFilter': StaticChoiceFilter,
'StaticChoiceListFilter': StaticChoiceListFilter,
'StaticDatespanFilter': StaticDatespanFilter,
'CustomDatespanFilter': CustomDatespanFilter,
'CustomMonthFilter': CustomMonthFilter,
'MobileSelectFilter': MobileSelectFilter,
'AncestorLocationTypeFilter': AncestorLocationTypeFilter,
'NumericFilter': NumericFilter,
}
try:
klass = doc_type_to_filter_class[doc_type]
except KeyError:
raise ValueError('Unexpected doc_type for ReportAppFilter', doc_type)
else:
return klass.wrap(data)
else:
return super(ReportAppFilter, cls).wrap(data)
def get_filter_value(self, user, ui_filter):
raise NotImplementedError
def _filter_by_case_sharing_group_id(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [
Choice(value=group._id, display=None)
for group in user.get_case_sharing_groups()
]
def _filter_by_location_id(user, ui_filter):
return ui_filter.value(**{ui_filter.name: user.location_id})
def _filter_by_username(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user.raw_username, display=None)
def _filter_by_user_id(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user._id, display=None)
def _filter_by_parent_location_id(user, ui_filter):
location = user.sql_location
location_parent = location.parent.location_id if location and location.parent else None
return ui_filter.value(**{ui_filter.name: location_parent})
_filter_type_to_func = {
'case_sharing_group': _filter_by_case_sharing_group_id,
'location_id': _filter_by_location_id,
'parent_location_id': _filter_by_parent_location_id,
'username': _filter_by_username,
'user_id': _filter_by_user_id,
}
class AutoFilter(ReportAppFilter):
filter_type = StringProperty(choices=_filter_type_to_func.keys())
def get_filter_value(self, user, ui_filter):
return _filter_type_to_func[self.filter_type](user, ui_filter)
class CustomDataAutoFilter(ReportAppFilter):
custom_data_property = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user.user_data[self.custom_data_property], display=None)
class StaticChoiceFilter(ReportAppFilter):
select_value = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [Choice(value=self.select_value, display=None)]
class StaticChoiceListFilter(ReportAppFilter):
value = StringListProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [Choice(value=string_value, display=None) for string_value in self.value]
class StaticDatespanFilter(ReportAppFilter):
date_range = StringProperty(
choices=[choice.slug for choice in get_simple_dateranges()],
required=True,
)
def get_filter_value(self, user, ui_filter):
start_date, end_date = get_daterange_start_end_dates(self.date_range)
return DateSpan(startdate=start_date, enddate=end_date)
class CustomDatespanFilter(ReportAppFilter):
operator = StringProperty(
choices=[
'=',
'<=',
'>=',
'>',
'<',
'between'
],
required=True,
)
date_number = StringProperty(required=True)
date_number2 = StringProperty()
def get_filter_value(self, user, ui_filter):
assert user is not None, (
"CustomDatespanFilter.get_filter_value must be called "
"with an OTARestoreUser object, not None")
timezone = get_timezone_for_domain(user.domain)
today = ServerTime(datetime.datetime.utcnow()).user_time(timezone).done().date()
start_date = end_date = None
days = int(self.date_number)
if self.operator == 'between':
days2 = int(self.date_number2)
# allows user to have specified the two numbers in either order
if days > days2:
end = days2
start = days
else:
start = days2
end = days
start_date = today - datetime.timedelta(days=start)
end_date = today - datetime.timedelta(days=end)
elif self.operator == '=':
start_date = end_date = today - datetime.timedelta(days=days)
elif self.operator == '>=':
start_date = None
end_date = today - datetime.timedelta(days=days)
elif self.operator == '<=':
start_date = today - datetime.timedelta(days=days)
end_date = None
elif self.operator == '<':
start_date = today - datetime.timedelta(days=days - 1)
end_date = None
elif self.operator == '>':
start_date = None
end_date = today - datetime.timedelta(days=days + 1)
return DateSpan(startdate=start_date, enddate=end_date)
def is_lte(integer):
def validate(x):
if not x <= integer:
raise BadValueError('Value must be less than or equal to {}'.format(integer))
return validate
def is_gte(integer):
def validate(x):
if not x >= integer:
raise BadValueError('Value must be greater than or equal to {}'.format(integer))
return validate
class CustomMonthFilter(ReportAppFilter):
"""
Filter by months that start on a day number other than 1
See [FB 215656](http://manage.dimagi.com/default.asp?215656)
"""
# Values for start_of_month < 1 specify the number of days from the end of the month. Values capped at
# len(February).
start_of_month = IntegerProperty(
required=True,
validators=(is_gte(-27), is_lte(28))
)
# DateSpan to return i.t.o. number of months to go back
period = IntegerProperty(
default=DEFAULT_MONTH_FILTER_PERIOD_LENGTH,
validators=(is_gte(0),)
)
@classmethod
def wrap(cls, doc):
doc['start_of_month'] = int(doc['start_of_month'])
if 'period' in doc:
doc['period'] = int(doc['period'] or DEFAULT_MONTH_FILTER_PERIOD_LENGTH)
return super(CustomMonthFilter, cls).wrap(doc)
def get_filter_value(self, user, ui_filter):
def get_last_month(this_month):
return datetime.date(this_month.year, this_month.month, 1) - datetime.timedelta(days=1)
def get_last_day(date):
_, last_day = calendar.monthrange(date.year, date.month)
return last_day
start_of_month = int(self.start_of_month)
today = datetime.date.today()
if start_of_month > 0:
start_day = start_of_month
else:
# start_of_month is zero or negative. Work backwards from the end of the month
start_day = get_last_day(today) + start_of_month
# Loop over months backwards for period > 0
month = today if today.day >= start_day else get_last_month(today)
for i in range(int(self.period)):
month = get_last_month(month)
if start_of_month > 0:
start_date = datetime.date(month.year, month.month, start_day)
days = get_last_day(start_date) - 1
end_date = start_date + datetime.timedelta(days=days)
else:
start_day = get_last_day(month) + start_of_month
start_date = datetime.date(month.year, month.month, start_day)
next_month = datetime.date(month.year, month.month, get_last_day(month)) + datetime.timedelta(days=1)
end_day = get_last_day(next_month) + start_of_month - 1
end_date = datetime.date(next_month.year, next_month.month, end_day)
return DateSpan(startdate=start_date, enddate=end_date)
class MobileSelectFilter(ReportAppFilter):
def get_filter_value(self, user, ui_filter):
return None
class AncestorLocationTypeFilter(ReportAppFilter):
ancestor_location_type_name = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.locations.models import SQLLocation
try:
ancestor = user.sql_location.get_ancestors(include_self=True).\
get(location_type__name=self.ancestor_location_type_name)
except (AttributeError, SQLLocation.DoesNotExist):
# user.sql_location is None, or location does not have an ancestor of that type
return None
return ancestor.location_id
class NumericFilter(ReportAppFilter):
operator = StringProperty(choices=['=', '!=', '<', '<=', '>', '>=']),
operand = FloatProperty()
@classmethod
def wrap(cls, doc):
doc['operand'] = float(doc['operand'])
return super(NumericFilter, cls).wrap(doc)
def get_filter_value(self, user, ui_filter):
return {
'operator': self.operator,
'operand': self.operand,
}
class ReportAppConfig(DocumentSchema):
"""
Class for configuring how a user configurable report shows up in an app
"""
report_id = StringProperty(required=True)
header = DictProperty()
localized_description = DictProperty()
xpath_description = StringProperty()
use_xpath_description = BooleanProperty(default=False)
graph_configs = DictProperty(ReportGraphConfig)
filters = SchemaDictProperty(ReportAppFilter)
uuid = StringProperty(required=True)
_report = None
def __init__(self, *args, **kwargs):
super(ReportAppConfig, self).__init__(*args, **kwargs)
if not self.uuid:
self.uuid = random_hex()
@classmethod
def wrap(cls, doc):
# for backwards compatibility with apps that have localized or xpath descriptions
old_description = doc.get('description')
if old_description:
if isinstance(old_description, basestring) and not doc.get('xpath_description'):
doc['xpath_description'] = old_description
elif isinstance(old_description, dict) and not doc.get('localized_description'):
doc['localized_description'] = old_description
if not doc.get('xpath_description'):
doc['xpath_description'] = '""'
return super(ReportAppConfig, cls).wrap(doc)
def report(self, domain):
if self._report is None:
from corehq.apps.userreports.models import get_report_config
self._report = get_report_config(self.report_id, domain)[0]
return self._report
class ReportModule(ModuleBase):
"""
Module for user configurable reports
"""
module_type = 'report'
report_configs = SchemaListProperty(ReportAppConfig)
forms = []
_loaded = False
@property
@memoized
def reports(self):
from corehq.apps.userreports.models import get_report_configs
return get_report_configs([r.report_id for r in self.report_configs], self.get_app().domain)
@classmethod
def new_module(cls, name, lang):
module = ReportModule(
name={(lang or 'en'): name or ugettext("Reports")},
case_type='',
)
module.get_or_create_unique_id()
return module
def get_details(self):
from .suite_xml.features.mobile_ucr import ReportModuleSuiteHelper
return ReportModuleSuiteHelper(self).get_details()
def get_custom_entries(self):
from .suite_xml.features.mobile_ucr import ReportModuleSuiteHelper
return ReportModuleSuiteHelper(self).get_custom_entries()
def get_menus(self, supports_module_filter=False):
kwargs = {}
if supports_module_filter:
kwargs['relevant'] = interpolate_xpath(self.module_filter)
menu = suite_models.LocalizedMenu(
id=id_strings.menu_id(self),
menu_locale_id=id_strings.module_locale(self),
media_image=bool(len(self.all_image_paths())),
media_audio=bool(len(self.all_audio_paths())),
image_locale_id=id_strings.module_icon_locale(self),
audio_locale_id=id_strings.module_audio_locale(self),
**kwargs
)
menu.commands.extend([
suite_models.Command(id=id_strings.report_command(config.uuid))
for config in self.report_configs
])
yield menu
def check_report_validity(self):
"""
returns is_valid, valid_report_configs
If any report doesn't exist, is_valid is False, otherwise True
valid_report_configs is a list of all report configs that refer to existing reports
"""
try:
all_report_ids = [report._id for report in self.reports]
valid_report_configs = [report_config for report_config in self.report_configs
if report_config.report_id in all_report_ids]
is_valid = (len(valid_report_configs) == len(self.report_configs))
except ReportConfigurationNotFoundError:
valid_report_configs = [] # assuming that if one report is in a different domain, they all are
is_valid = False
return namedtuple('ReportConfigValidity', 'is_valid valid_report_configs')(
is_valid=is_valid,
valid_report_configs=valid_report_configs
)
def validate_for_build(self):
errors = super(ReportModule, self).validate_for_build()
if not self.check_report_validity().is_valid:
errors.append({
'type': 'report config ref invalid',
'module': self.get_module_info()
})
return errors
class ShadowModule(ModuleBase, ModuleDetailsMixin):
"""
A module that acts as a shortcut to another module. This module has its own
settings (name, icon/audio, filter, etc.) and its own case list/detail, but
inherits case type and forms from its source module.
"""
module_type = 'shadow'
source_module_id = StringProperty()
forms = []
excluded_form_ids = SchemaListProperty()
case_details = SchemaProperty(DetailPair)
ref_details = SchemaProperty(DetailPair)
put_in_root = BooleanProperty(default=False)
case_list = SchemaProperty(CaseList)
referral_list = SchemaProperty(CaseList)
task_list = SchemaProperty(CaseList)
parent_select = SchemaProperty(ParentSelect)
get_forms = IndexedSchema.Getter('forms')
@classmethod
def wrap(cls, data):
data = cls.wrap_details(data)
return super(ShadowModule, cls).wrap(data)
@property
def source_module(self):
if self.source_module_id:
try:
return self._parent.get_module_by_unique_id(self.source_module_id,
error=_("Could not find source module for '{}'.").format(self.default_name()))
except ModuleNotFoundException:
pass
return None
@property
def case_type(self):
if not self.source_module:
return None
return self.source_module.case_type
@property
def requires(self):
if not self.source_module:
return 'none'
return self.source_module.requires
@property
def root_module_id(self):
if not self.source_module:
return None
return self.source_module.root_module_id
def get_suite_forms(self):
if not self.source_module:
return []
return [f for f in self.source_module.get_forms() if f.unique_id not in self.excluded_form_ids]
@parse_int([1])
def get_form(self, i):
return None
def requires_case_details(self):
if not self.source_module:
return False
return self.source_module.requires_case_details()
def get_case_types(self):
if not self.source_module:
return []
return self.source_module.get_case_types()
@memoized
def get_subcase_types(self):
if not self.source_module:
return []
return self.source_module.get_subcase_types()
@memoized
def all_forms_require_a_case(self):
if not self.source_module:
return []
return self.source_module.all_forms_require_a_case()
@classmethod
def new_module(cls, name, lang):
lang = lang or 'en'
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = ShadowModule(
name={(lang or 'en'): name or ugettext("Untitled Module")},
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
)
module.get_or_create_unique_id()
return module
def validate_for_build(self):
errors = super(ShadowModule, self).validate_for_build()
errors += self.validate_details_for_build()
if not self.source_module:
errors.append({
'type': 'no source module id',
'module': self.get_module_info()
})
return errors
class LazyBlobDoc(BlobMixin):
"""LazyAttachmentDoc for blob db
Cache blobs in local memory (for this request)
and in django cache (for the next few requests)
and commit to couchdb.
See also `dimagi.utils.couch.lazy_attachment_doc.LazyAttachmentDoc`
Cache strategy:
- on fetch, check in local memory, then cache
- if both are a miss, fetch from couchdb and store in both
- after an attachment is committed to the blob db and the
save save has succeeded, save the attachment in the cache
"""
def __init__(self, *args, **kwargs):
super(LazyBlobDoc, self).__init__(*args, **kwargs)
self._LAZY_ATTACHMENTS = {}
# to cache fetched attachments
# these we do *not* send back down upon save
self._LAZY_ATTACHMENTS_CACHE = {}
@classmethod
def wrap(cls, data):
if "_attachments" in data:
data = data.copy()
attachments = data.pop("_attachments").copy()
if cls.migrating_blobs_from_couch:
# preserve stubs so couch attachments don't get deleted on save
stubs = {}
for name, value in list(attachments.items()):
if isinstance(value, dict) and "stub" in value:
stubs[name] = attachments.pop(name)
if stubs:
data["_attachments"] = stubs
else:
attachments = None
self = super(LazyBlobDoc, cls).wrap(data)
if attachments:
for name, attachment in attachments.items():
if isinstance(attachment, basestring):
info = {"content": attachment}
else:
raise ValueError("Unknown attachment format: {!r}"
.format(attachment))
self.lazy_put_attachment(name=name, **info)
return self
def __attachment_cache_key(self, name):
return u'lazy_attachment/{id}/{name}'.format(id=self.get_id, name=name)
def __set_cached_attachment(self, name, content, timeout=60*60*24):
cache.set(self.__attachment_cache_key(name), content, timeout=timeout)
self._LAZY_ATTACHMENTS_CACHE[name] = content
def __get_cached_attachment(self, name):
try:
# it has been fetched already during this request
content = self._LAZY_ATTACHMENTS_CACHE[name]
except KeyError:
content = cache.get(self.__attachment_cache_key(name))
if content is not None:
self._LAZY_ATTACHMENTS_CACHE[name] = content
return content
def put_attachment(self, content, name=None, *args, **kw):
cache.delete(self.__attachment_cache_key(name))
self._LAZY_ATTACHMENTS_CACHE.pop(name, None)
return super(LazyBlobDoc, self).put_attachment(content, name, *args, **kw)
def lazy_put_attachment(self, content, name=None, content_type=None,
content_length=None):
"""
Ensure the attachment is available through lazy_fetch_attachment
and that upon self.save(), the attachments are put to the doc as well
"""
self._LAZY_ATTACHMENTS[name] = {
'content': content,
'content_type': content_type,
'content_length': content_length,
}
def lazy_fetch_attachment(self, name):
# it has been put/lazy-put already during this request
if name in self._LAZY_ATTACHMENTS:
content = self._LAZY_ATTACHMENTS[name]['content']
else:
content = self.__get_cached_attachment(name)
if content is None:
try:
content = self.fetch_attachment(name)
except ResourceNotFound as e:
# django cache will pickle this exception for you
# but e.response isn't picklable
if hasattr(e, 'response'):
del e.response
content = e
self.__set_cached_attachment(name, content, timeout=60*5)
raise
else:
self.__set_cached_attachment(name, content)
if isinstance(content, ResourceNotFound):
raise content
return content
def lazy_list_attachments(self):
keys = set()
keys.update(getattr(self, '_LAZY_ATTACHMENTS', None) or {})
keys.update(self.blobs or {})
return keys
def save(self, **params):
def super_save():
super(LazyBlobDoc, self).save(**params)
if self._LAZY_ATTACHMENTS:
with self.atomic_blobs(super_save):
for name, info in self._LAZY_ATTACHMENTS.items():
if not info['content_type']:
info['content_type'] = ';'.join(filter(None, guess_type(name)))
super(LazyBlobDoc, self).put_attachment(name=name, **info)
# super_save() has succeeded by now
for name, info in self._LAZY_ATTACHMENTS.items():
self.__set_cached_attachment(name, info['content'])
self._LAZY_ATTACHMENTS.clear()
else:
super_save()
class VersionedDoc(LazyBlobDoc):
"""
A document that keeps an auto-incrementing version number, knows how to make copies of itself,
delete a copy of itself, and revert back to an earlier copy of itself.
"""
domain = StringProperty()
copy_of = StringProperty()
version = IntegerProperty()
short_url = StringProperty()
short_odk_url = StringProperty()
short_odk_media_url = StringProperty()
_meta_fields = ['_id', '_rev', 'domain', 'copy_of', 'version', 'short_url', 'short_odk_url', 'short_odk_media_url']
@property
def id(self):
return self._id
def save(self, response_json=None, increment_version=None, **params):
if increment_version is None:
increment_version = not self.copy_of
if increment_version:
self.version = self.version + 1 if self.version else 1
super(VersionedDoc, self).save(**params)
if response_json is not None:
if 'update' not in response_json:
response_json['update'] = {}
response_json['update']['app-version'] = self.version
def make_build(self):
assert self.get_id
assert self.copy_of is None
cls = self.__class__
copies = cls.view('app_manager/applications', key=[self.domain, self._id, self.version], include_docs=True, limit=1).all()
if copies:
copy = copies[0]
else:
copy = deepcopy(self.to_json())
bad_keys = ('_id', '_rev', '_attachments', 'external_blobs',
'short_url', 'short_odk_url', 'short_odk_media_url', 'recipients')
for bad_key in bad_keys:
if bad_key in copy:
del copy[bad_key]
copy = cls.wrap(copy)
copy['copy_of'] = self._id
copy.copy_attachments(self)
return copy
def copy_attachments(self, other, regexp=ATTACHMENT_REGEX):
for name in other.lazy_list_attachments() or {}:
if regexp is None or re.match(regexp, name):
self.lazy_put_attachment(other.lazy_fetch_attachment(name), name)
def make_reversion_to_copy(self, copy):
"""
Replaces couch doc with a copy of the backup ("copy").
Returns the another Application/RemoteApp referring to this
updated couch doc. The returned doc should be used in place of
the original doc, i.e. should be called as follows:
app = app.make_reversion_to_copy(copy)
app.save()
"""
if copy.copy_of != self._id:
raise VersioningError("%s is not a copy of %s" % (copy, self))
app = deepcopy(copy.to_json())
app['_rev'] = self._rev
app['_id'] = self._id
app['version'] = self.version
app['copy_of'] = None
app.pop('_attachments', None)
app.pop('external_blobs', None)
cls = self.__class__
app = cls.wrap(app)
app.copy_attachments(copy)
return app
def delete_copy(self, copy):
if copy.copy_of != self._id:
raise VersioningError("%s is not a copy of %s" % (copy, self))
copy.delete_app()
copy.save(increment_version=False)
def scrub_source(self, source):
"""
To be overridden.
Use this to scrub out anything
that should be shown in the
application source, such as ids, etc.
"""
return source
def export_json(self, dump_json=True):
source = deepcopy(self.to_json())
for field in self._meta_fields:
if field in source:
del source[field]
_attachments = {}
for name in self.lazy_list_attachments():
if re.match(ATTACHMENT_REGEX, name):
# FIXME loss of metadata (content type, etc.)
_attachments[name] = self.lazy_fetch_attachment(name)
# the '_attachments' value is a dict of `name: blob_content`
# pairs, and is part of the exported (serialized) app interface
source['_attachments'] = _attachments
source.pop("external_blobs", None)
source = self.scrub_source(source)
return json.dumps(source) if dump_json else source
@classmethod
def from_source(cls, source, domain):
for field in cls._meta_fields:
if field in source:
del source[field]
source['domain'] = domain
app = cls.wrap(source)
return app
def is_deleted(self):
return self.doc_type.endswith(DELETED_SUFFIX)
def unretire(self):
self.doc_type = self.get_doc_type()
self.save()
def get_doc_type(self):
if self.doc_type.endswith(DELETED_SUFFIX):
return self.doc_type[:-len(DELETED_SUFFIX)]
else:
return self.doc_type
def absolute_url_property(method):
"""
Helper for the various fully qualified application URLs
Turns a method returning an unqualified URL
into a property returning a fully qualified URL
(e.g., '/my_url/' => 'https://www.commcarehq.org/my_url/')
Expects `self.url_base` to be fully qualified url base
"""
@wraps(method)
def _inner(self):
return "%s%s" % (self.url_base, method(self))
return property(_inner)
class BuildProfile(DocumentSchema):
name = StringProperty()
langs = StringListProperty()
def __eq__(self, other):
return self.langs == other.langs
def __ne__(self, other):
return not self.__eq__(other)
class MediaList(DocumentSchema):
media_refs = StringListProperty()
class ApplicationBase(VersionedDoc, SnapshotMixin,
CommCareFeatureSupportMixin,
CommentMixin):
"""
Abstract base class for Application and RemoteApp.
Contains methods for generating the various files and zipping them into CommCare.jar
See note at top of file for high-level overview.
"""
recipients = StringProperty(default="")
# this is the supported way of specifying which commcare build to use
build_spec = SchemaProperty(BuildSpec)
platform = StringProperty(
choices=["nokia/s40", "nokia/s60", "winmo", "generic"],
default="nokia/s40"
)
text_input = StringProperty(
choices=['roman', 'native', 'custom-keys', 'qwerty'],
default="roman"
)
# The following properties should only appear on saved builds
# built_with stores a record of CommCare build used in a saved app
built_with = SchemaProperty(BuildRecord)
build_signed = BooleanProperty(default=True)
built_on = DateTimeProperty(required=False)
build_comment = StringProperty()
comment_from = StringProperty()
build_broken = BooleanProperty(default=False)
# not used yet, but nice for tagging/debugging
# currently only canonical value is 'incomplete-build',
# for when build resources aren't found where they should be
build_broken_reason = StringProperty()
# watch out for a past bug:
# when reverting to a build that happens to be released
# that got copied into into the new app doc, and when new releases were made,
# they were automatically starred
# AFAIK this is fixed in code, but my rear its ugly head in an as-yet-not-understood
# way for apps that already had this problem. Just keep an eye out
is_released = BooleanProperty(default=False)
# django-style salted hash of the admin password
admin_password = StringProperty()
# a=Alphanumeric, n=Numeric, x=Neither (not allowed)
admin_password_charset = StringProperty(choices=['a', 'n', 'x'], default='n')
langs = StringListProperty()
secure_submissions = BooleanProperty(default=False)
# metadata for data platform
amplifies_workers = StringProperty(
choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
default=AMPLIFIES_NOT_SET
)
amplifies_project = StringProperty(
choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
default=AMPLIFIES_NOT_SET
)
minimum_use_threshold = StringProperty(
default='15'
)
experienced_threshold = StringProperty(
default='3'
)
# exchange properties
cached_properties = DictProperty()
description = StringProperty()
deployment_date = DateTimeProperty()
phone_model = StringProperty()
user_type = StringProperty()
attribution_notes = StringProperty()
# always false for RemoteApp
case_sharing = BooleanProperty(default=False)
vellum_case_management = BooleanProperty(default=False)
build_profiles = SchemaDictProperty(BuildProfile)
# each language is a key and the value is a list of multimedia referenced in that language
media_language_map = SchemaDictProperty(MediaList)
use_j2me_endpoint = BooleanProperty(default=False)
# Whether or not the Application has had any forms submitted against it
has_submissions = BooleanProperty(default=False)
@classmethod
def wrap(cls, data):
should_save = False
# scrape for old conventions and get rid of them
if 'commcare_build' in data:
version, build_number = data['commcare_build'].split('/')
data['build_spec'] = BuildSpec.from_string("%s/latest" % version).to_json()
del data['commcare_build']
if 'commcare_tag' in data:
version, build_number = current_builds.TAG_MAP[data['commcare_tag']]
data['build_spec'] = BuildSpec.from_string("%s/latest" % version).to_json()
del data['commcare_tag']
if data.has_key("built_with") and isinstance(data['built_with'], basestring):
data['built_with'] = BuildSpec.from_string(data['built_with']).to_json()
if 'native_input' in data:
if 'text_input' not in data:
data['text_input'] = 'native' if data['native_input'] else 'roman'
del data['native_input']
if 'build_langs' in data:
if data['build_langs'] != data['langs'] and 'build_profiles' not in data:
data['build_profiles'] = {
uuid.uuid4().hex: dict(
name=', '.join(data['build_langs']),
langs=data['build_langs']
)
}
should_save = True
del data['build_langs']
if 'original_doc' in data:
data['copy_history'] = [data.pop('original_doc')]
should_save = True
data["description"] = data.get('description') or data.get('short_description')
self = super(ApplicationBase, cls).wrap(data)
if not self.build_spec or self.build_spec.is_null():
self.build_spec = get_default_build_spec()
if should_save:
self.save()
return self
def rename_lang(self, old_lang, new_lang):
validate_lang(new_lang)
def is_remote_app(self):
return False
def get_latest_app(self, released_only=True):
if released_only:
return get_app(self.domain, self.get_id, latest=True)
else:
return self.view('app_manager/applications',
startkey=[self.domain, self.get_id, {}],
endkey=[self.domain, self.get_id],
include_docs=True,
limit=1,
descending=True,
).first()
@memoized
def get_latest_saved(self):
"""
This looks really similar to get_latest_app, not sure why tim added
"""
doc = (get_latest_released_app_doc(self.domain, self._id) or
get_latest_build_doc(self.domain, self._id))
return self.__class__.wrap(doc) if doc else None
def set_admin_password(self, raw_password):
salt = os.urandom(5).encode('hex')
self.admin_password = make_password(raw_password, salt=salt)
if raw_password.isnumeric():
self.admin_password_charset = 'n'
elif raw_password.isalnum():
self.admin_password_charset = 'a'
else:
self.admin_password_charset = 'x'
def check_password_charset(self):
errors = []
if hasattr(self, 'profile'):
password_format = self.profile.get('properties', {}).get('password_format', 'n')
message = ('Your app requires {0} passwords '
'but the admin password is not {0}')
if password_format == 'n' and self.admin_password_charset in 'ax':
errors.append({'type': 'password_format',
'message': message.format('numeric')})
if password_format == 'a' and self.admin_password_charset in 'x':
errors.append({'type': 'password_format',
'message': message.format('alphanumeric')})
return errors
def get_build(self):
return self.build_spec.get_build()
@property
def build_version(self):
# `LooseVersion`s are smart!
# LooseVersion('2.12.0') > '2.2'
# (even though '2.12.0' < '2.2')
if self.build_spec.version:
return LooseVersion(self.build_spec.version)
@property
def commcare_minor_release(self):
"""This is mostly just for views"""
return '%d.%d' % self.build_spec.minor_release()
@property
def short_name(self):
return self.name if len(self.name) <= 12 else '%s..' % self.name[:10]
@property
def has_careplan_module(self):
return False
@property
def url_base(self):
custom_base_url = getattr(self, 'custom_base_url', None)
return custom_base_url or get_url_base()
@absolute_url_property
def post_url(self):
if self.secure_submissions:
url_name = 'receiver_secure_post_with_app_id'
else:
url_name = 'receiver_post_with_app_id'
return reverse(url_name, args=[self.domain, self.get_id])
@absolute_url_property
def key_server_url(self):
return reverse('key_server_url', args=[self.domain])
@absolute_url_property
def ota_restore_url(self):
return reverse('app_aware_restore', args=[self.domain, self._id])
@absolute_url_property
def form_record_url(self):
return '/a/%s/api/custom/pact_formdata/v1/' % self.domain
@absolute_url_property
def hq_profile_url(self):
# RemoteApp already has a property called "profile_url",
# Application.profile_url just points here to stop the conflict
# http://manage.dimagi.com/default.asp?227088#1149422
return "%s?latest=true" % (
reverse('download_profile', args=[self.domain, self._id])
)
@absolute_url_property
def media_profile_url(self):
return "%s?latest=true" % (
reverse('download_media_profile', args=[self.domain, self._id])
)
@property
def profile_loc(self):
return "jr://resource/profile.xml"
@absolute_url_property
def jar_url(self):
return reverse('download_jar', args=[self.domain, self._id])
def get_jar_path(self):
spec = {
'nokia/s40': 'Nokia/S40',
'nokia/s60': 'Nokia/S60',
'generic': 'Generic/Default',
'winmo': 'Native/WinMo'
}[self.platform]
if self.platform in ('nokia/s40', 'nokia/s60'):
spec += {
('native',): '-native-input',
('roman',): '-generic',
('custom-keys',): '-custom-keys',
('qwerty',): '-qwerty'
}[(self.text_input,)]
return spec
def get_jadjar(self):
return self.get_build().get_jadjar(self.get_jar_path(), self.use_j2me_endpoint)
def validate_fixtures(self):
if not domain_has_privilege(self.domain, privileges.LOOKUP_TABLES):
# remote apps don't support get_forms yet.
# for now they can circumvent the fixture limitation. sneaky bastards.
if hasattr(self, 'get_forms'):
for form in self.get_forms():
if form.has_fixtures:
raise PermissionDenied(_(
"Usage of lookup tables is not supported by your "
"current subscription. Please upgrade your "
"subscription before using this feature."
))
def validate_intents(self):
if domain_has_privilege(self.domain, privileges.CUSTOM_INTENTS):
return
if hasattr(self, 'get_forms'):
for form in self.get_forms():
intents = form.wrapped_xform().odk_intents
if intents:
if not domain_has_privilege(self.domain, privileges.TEMPLATED_INTENTS):
raise PermissionDenied(_(
"Usage of integrations is not supported by your "
"current subscription. Please upgrade your "
"subscription before using this feature."
))
else:
templates = next(app_callout_templates)
if len(set(intents) - set(t['id'] for t in templates)):
raise PermissionDenied(_(
"Usage of external integration is not supported by your "
"current subscription. Please upgrade your "
"subscription before using this feature."
))
def validate_jar_path(self):
build = self.get_build()
setting = commcare_settings.get_commcare_settings_lookup()['hq']['text_input']
value = self.text_input
setting_version = setting['since'].get(value)
if setting_version:
setting_version = tuple(map(int, setting_version.split('.')))
my_version = build.minor_release()
if my_version < setting_version:
i = setting['values'].index(value)
assert i != -1
name = _(setting['value_names'][i])
raise AppEditingError((
'%s Text Input is not supported '
'in CommCare versions before %s.%s. '
'(You are using %s.%s)'
) % ((name,) + setting_version + my_version))
@property
def advanced_app_builder(self):
properties = (self.profile or {}).get('properties', {})
return properties.get('advanced_app_builder', 'false') == 'true'
@property
def jad_settings(self):
settings = {
'JavaRosa-Admin-Password': self.admin_password,
'Profile': self.profile_loc,
'MIDlet-Jar-URL': self.jar_url,
#'MIDlet-Name': self.name,
# e.g. 2011-Apr-11 20:45
'CommCare-Release': "true",
}
if self.build_version < '2.8':
settings['Build-Number'] = self.version
return settings
def create_build_files(self, save=False, build_profile_id=None):
built_on = datetime.datetime.utcnow()
all_files = self.create_all_files(build_profile_id)
if save:
self.date_created = built_on
self.built_on = built_on
self.built_with = BuildRecord(
version=self.build_spec.version,
build_number=self.version,
datetime=built_on,
)
for filepath in all_files:
self.lazy_put_attachment(all_files[filepath],
'files/%s' % filepath)
def create_jadjar_from_build_files(self, save=False):
self.validate_jar_path()
with CriticalSection(['create_jadjar_' + self._id]):
try:
return (
self.lazy_fetch_attachment('CommCare.jad'),
self.lazy_fetch_attachment('CommCare.jar'),
)
except (ResourceError, KeyError):
all_files = {
filename[len('files/'):]: self.lazy_fetch_attachment(filename)
for filename in self.blobs if filename.startswith('files/')
}
all_files = {
name: (contents if isinstance(contents, str) else contents.encode('utf-8'))
for name, contents in all_files.items()
}
release_date = self.built_with.datetime or datetime.datetime.utcnow()
jad_settings = {
'Released-on': release_date.strftime("%Y-%b-%d %H:%M"),
}
jad_settings.update(self.jad_settings)
jadjar = self.get_jadjar().pack(all_files, jad_settings)
if save:
self.lazy_put_attachment(jadjar.jad, 'CommCare.jad')
self.lazy_put_attachment(jadjar.jar, 'CommCare.jar')
self.built_with.signed = jadjar.signed
return jadjar.jad, jadjar.jar
def validate_app(self):
errors = []
errors.extend(self.check_password_charset())
try:
self.validate_fixtures()
self.validate_intents()
self.create_all_files()
except CaseXPathValidationError as cve:
errors.append({
'type': 'invalid case xpath reference',
'module': cve.module,
'form': cve.form,
})
except UserCaseXPathValidationError as ucve:
errors.append({
'type': 'invalid user case xpath reference',
'module': ucve.module,
'form': ucve.form,
})
except (AppEditingError, XFormValidationError, XFormException,
PermissionDenied, SuiteValidationError) as e:
errors.append({'type': 'error', 'message': unicode(e)})
except Exception as e:
if settings.DEBUG:
raise
# this is much less useful/actionable without a URL
# so make sure to include the request
logging.error('Unexpected error building app', exc_info=True,
extra={'request': view_utils.get_request()})
errors.append({'type': 'error', 'message': 'unexpected error: %s' % e})
return errors
@absolute_url_property
def odk_profile_url(self):
return reverse('download_odk_profile', args=[self.domain, self._id])
@absolute_url_property
def odk_media_profile_url(self):
return reverse('download_odk_media_profile', args=[self.domain, self._id])
@property
def odk_profile_display_url(self):
return self.short_odk_url or self.odk_profile_url
@property
def odk_media_profile_display_url(self):
return self.short_odk_media_url or self.odk_media_profile_url
def get_odk_qr_code(self, with_media=False, build_profile_id=None):
"""Returns a QR code, as a PNG to install on CC-ODK"""
try:
return self.lazy_fetch_attachment("qrcode.png")
except ResourceNotFound:
from pygooglechart import QRChart
HEIGHT = WIDTH = 250
code = QRChart(HEIGHT, WIDTH)
url = self.odk_profile_url if not with_media else self.odk_media_profile_url
if build_profile_id is not None:
url += '?profile={profile_id}'.format(profile_id=build_profile_id)
code.add_data(url)
# "Level L" error correction with a 0 pixel margin
code.set_ec('L', 0)
f, fname = tempfile.mkstemp()
code.download(fname)
os.close(f)
with open(fname, "rb") as f:
png_data = f.read()
self.lazy_put_attachment(png_data, "qrcode.png",
content_type="image/png")
return png_data
def generate_shortened_url(self, view_name, build_profile_id=None):
try:
if settings.BITLY_LOGIN:
if build_profile_id is not None:
long_url = "{}{}?profile={}".format(
self.url_base, reverse(view_name, args=[self.domain, self._id]), build_profile_id
)
else:
long_url = "{}{}".format(self.url_base, reverse(view_name, args=[self.domain, self._id]))
shortened_url = bitly.shorten(long_url)
else:
shortened_url = None
except Exception:
logging.exception("Problem creating bitly url for app %s. Do you have network?" % self.get_id)
else:
return shortened_url
def get_short_url(self, build_profile_id=None):
if not build_profile_id:
if not self.short_url:
self.short_url = self.generate_shortened_url('download_jad')
self.save()
return self.short_url
else:
return self.generate_shortened_url('download_jad', build_profile_id)
def get_short_odk_url(self, with_media=False, build_profile_id=None):
if not build_profile_id:
if with_media:
if not self.short_odk_media_url:
self.short_odk_media_url = self.generate_shortened_url('download_odk_media_profile')
self.save()
return self.short_odk_media_url
else:
if not self.short_odk_url:
self.short_odk_url = self.generate_shortened_url('download_odk_profile')
self.save()
return self.short_odk_url
else:
if with_media:
return self.generate_shortened_url('download_odk_media_profile', build_profile_id)
else:
return self.generate_shortened_url('download_odk_profile', build_profile_id)
def fetch_jar(self):
return self.get_jadjar().fetch_jar()
def make_build(self, comment=None, user_id=None, previous_version=None):
copy = super(ApplicationBase, self).make_build()
if not copy._id:
# I expect this always to be the case
# but check explicitly so as not to change the _id if it exists
copy._id = copy.get_db().server.next_uuid()
force_new_forms = False
if previous_version and self.build_profiles != previous_version.build_profiles:
force_new_forms = True
copy.set_form_versions(previous_version, force_new_forms)
copy.set_media_versions(previous_version)
copy.create_build_files(save=True)
# since this hard to put in a test
# I'm putting this assert here if copy._id is ever None
# which makes tests error
assert copy._id
copy.build_comment = comment
copy.comment_from = user_id
if user_id:
user = CouchUser.get(user_id)
if not user.has_built_app:
user.has_built_app = True
user.save()
copy.is_released = False
if not copy.is_remote_app():
copy.update_mm_map()
return copy
def delete_app(self):
domain_has_apps.clear(self.domain)
self.doc_type += '-Deleted'
record = DeleteApplicationRecord(
domain=self.domain,
app_id=self.id,
datetime=datetime.datetime.utcnow()
)
record.save()
return record
def save(self, response_json=None, increment_version=None, **params):
if not self._rev and not domain_has_apps(self.domain):
domain_has_apps.clear(self.domain)
user = getattr(view_utils.get_request(), 'couch_user', None)
if user and user.days_since_created == 0:
track_workflow(user.get_email(), 'Saved the App Builder within first 24 hours')
super(ApplicationBase, self).save(
response_json=response_json, increment_version=increment_version, **params)
def set_form_versions(self, previous_version, force_new_version=False):
# by default doing nothing here is fine.
pass
def set_media_versions(self, previous_version):
pass
def update_mm_map(self):
if self.build_profiles and domain_has_privilege(self.domain, privileges.BUILD_PROFILES):
for lang in self.langs:
self.media_language_map[lang] = MediaList()
for form in self.get_forms():
xml = form.wrapped_xform()
for lang in self.langs:
media = []
for path in xml.all_media_references(lang):
if path is not None:
media.append(path)
map_item = self.multimedia_map.get(path)
#dont break if multimedia is missing
if map_item:
map_item.form_media = True
self.media_language_map[lang].media_refs.extend(media)
else:
self.media_language_map = {}
def get_build_langs(self, build_profile_id=None):
if build_profile_id is not None:
return self.build_profiles[build_profile_id].langs
else:
return self.langs
def validate_lang(lang):
if not re.match(r'^[a-z]{2,3}(-[a-z]*)?$', lang):
raise ValueError("Invalid Language")
def validate_property(property):
"""
Validate a case property name
>>> validate_property('parent/maternal-grandmother_fullName')
>>> validate_property('foo+bar')
Traceback (most recent call last):
...
ValueError: Invalid Property
"""
# this regex is also copied in propertyList.ejs
if not re.match(r'^[a-zA-Z][\w_-]*(/[a-zA-Z][\w_-]*)*$', property):
raise ValueError("Invalid Property")
def validate_detail_screen_field(field):
# If you change here, also change here:
# corehq/apps/app_manager/static/app_manager/js/detail-screen-config.js
field_re = r'^([a-zA-Z][\w_-]*:)*([a-zA-Z][\w_-]*/)*#?[a-zA-Z][\w_-]*$'
if not re.match(field_re, field):
raise ValueError("Invalid Sort Field")
class SavedAppBuild(ApplicationBase):
def to_saved_build_json(self, timezone):
data = super(SavedAppBuild, self).to_json().copy()
for key in ('modules', 'user_registration', 'external_blobs',
'_attachments', 'profile', 'translations'
'description', 'short_description'):
data.pop(key, None)
built_on_user_time = ServerTime(self.built_on).user_time(timezone)
data.update({
'id': self.id,
'built_on_date': built_on_user_time.ui_string(USER_DATE_FORMAT),
'built_on_time': built_on_user_time.ui_string(USER_TIME_FORMAT),
'menu_item_label': self.built_with.get_menu_item_label(),
'jar_path': self.get_jar_path(),
'short_name': self.short_name,
'enable_offline_install': self.enable_offline_install,
})
comment_from = data['comment_from']
if comment_from:
try:
comment_user = CouchUser.get(comment_from)
except ResourceNotFound:
data['comment_user_name'] = comment_from
else:
data['comment_user_name'] = comment_user.full_name
return data
class Application(ApplicationBase, TranslationMixin, HQMediaMixin):
"""
An Application that can be created entirely through the online interface
"""
modules = SchemaListProperty(ModuleBase)
name = StringProperty()
# profile's schema is {'features': {}, 'properties': {}, 'custom_properties': {}}
# ended up not using a schema because properties is a reserved word
profile = DictProperty()
use_custom_suite = BooleanProperty(default=False)
custom_base_url = StringProperty()
cloudcare_enabled = BooleanProperty(default=False)
translation_strategy = StringProperty(default='select-known',
choices=app_strings.CHOICES.keys())
commtrack_requisition_mode = StringProperty(choices=CT_REQUISITION_MODES)
auto_gps_capture = BooleanProperty(default=False)
date_created = DateTimeProperty()
created_from_template = StringProperty()
use_grid_menus = BooleanProperty(default=False)
grid_form_menus = StringProperty(default='none',
choices=['none', 'all', 'some'])
# legacy property; kept around to be able to identify (deprecated) v1 apps
application_version = StringProperty(default=APP_V2, choices=[APP_V1, APP_V2], required=False)
def assert_app_v2(self):
assert self.application_version == APP_V2
@property
@memoized
def commtrack_enabled(self):
if settings.UNIT_TESTING:
return False # override with .tests.util.commtrack_enabled
domain_obj = Domain.get_by_name(self.domain) if self.domain else None
return domain_obj.commtrack_enabled if domain_obj else False
@classmethod
def wrap(cls, data):
for module in data.get('modules', []):
for attr in ('case_label', 'referral_label'):
if not module.has_key(attr):
module[attr] = {}
for lang in data['langs']:
if not module['case_label'].get(lang):
module['case_label'][lang] = commcare_translations.load_translations(lang).get('cchq.case', 'Cases')
if not module['referral_label'].get(lang):
module['referral_label'][lang] = commcare_translations.load_translations(lang).get('cchq.referral', 'Referrals')
data.pop('commtrack_enabled', None) # Remove me after migrating apps
self = super(Application, cls).wrap(data)
# make sure all form versions are None on working copies
if not self.copy_of:
for form in self.get_forms():
form.version = None
# weird edge case where multimedia_map gets set to null and causes issues
if self.multimedia_map is None:
self.multimedia_map = {}
return self
def save(self, *args, **kwargs):
super(Application, self).save(*args, **kwargs)
# Import loop if this is imported at the top
# TODO: revamp so signal_connections <- models <- signals
from corehq.apps.app_manager import signals
signals.app_post_save.send(Application, application=self)
def make_reversion_to_copy(self, copy):
app = super(Application, self).make_reversion_to_copy(copy)
for form in app.get_forms():
# reset the form's validation cache, since the form content is
# likely to have changed in the revert!
form.validation_cache = None
form.version = None
app.build_broken = False
return app
@property
def profile_url(self):
return self.hq_profile_url
@absolute_url_property
def suite_url(self):
return reverse('download_suite', args=[self.domain, self.get_id])
@property
def suite_loc(self):
if self.enable_relative_suite_path:
return './suite.xml'
else:
return "jr://resource/suite.xml"
@absolute_url_property
def media_suite_url(self):
return reverse('download_media_suite', args=[self.domain, self.get_id])
@property
def media_suite_loc(self):
if self.enable_relative_suite_path:
return "./media_suite.xml"
else:
return "jr://resource/media_suite.xml"
@property
def default_language(self):
return self.langs[0] if len(self.langs) > 0 else "en"
def fetch_xform(self, module_id=None, form_id=None, form=None, build_profile_id=None):
if not form:
form = self.get_module(module_id).get_form(form_id)
return form.validate_form().render_xform(build_profile_id).encode('utf-8')
def set_form_versions(self, previous_version, force_new_version=False):
"""
Set the 'version' property on each form as follows to the current app version if the form is new
or has changed since the last build. Otherwise set it to the version from the last build.
"""
def _hash(val):
return hashlib.md5(val).hexdigest()
if previous_version:
for form_stuff in self.get_forms(bare=False):
filename = 'files/%s' % self.get_form_filename(**form_stuff)
form = form_stuff["form"]
if not force_new_version:
form_version = None
try:
previous_form = previous_version.get_form(form.unique_id)
# take the previous version's compiled form as-is
# (generation code may have changed since last build)
previous_source = previous_version.fetch_attachment(filename)
except (ResourceNotFound, FormNotFoundException):
pass
else:
previous_hash = _hash(previous_source)
# hack - temporarily set my version to the previous version
# so that that's not treated as the diff
previous_form_version = previous_form.get_version()
form.version = previous_form_version
my_hash = _hash(self.fetch_xform(form=form))
if previous_hash == my_hash:
form_version = previous_form_version
if form_version is None:
form.version = None
else:
form.version = form_version
else:
form.version = None
def set_media_versions(self, previous_version):
"""
Set the media version numbers for all media in the app to the current app version
if the media is new or has changed since the last build. Otherwise set it to the
version from the last build.
"""
# access to .multimedia_map is slow
prev_multimedia_map = previous_version.multimedia_map if previous_version else {}
for path, map_item in self.multimedia_map.iteritems():
prev_map_item = prev_multimedia_map.get(path, None)
if prev_map_item and prev_map_item.unique_id:
# Re-use the id so CommCare knows it's the same resource
map_item.unique_id = prev_map_item.unique_id
if (prev_map_item and prev_map_item.version
and prev_map_item.multimedia_id == map_item.multimedia_id):
map_item.version = prev_map_item.version
else:
map_item.version = self.version
def ensure_module_unique_ids(self, should_save=False):
"""
Creates unique_ids for modules that don't have unique_id attributes
should_save: the doc will be saved only if should_save is set to True
WARNING: If called on the same doc in different requests without saving,
this function will set different uuid each time,
likely causing unexpected behavior
"""
if any(not mod.unique_id for mod in self.modules):
for mod in self.modules:
mod.get_or_create_unique_id()
if should_save:
self.save()
def create_app_strings(self, lang, build_profile_id=None):
gen = app_strings.CHOICES[self.translation_strategy]
if lang == 'default':
return gen.create_default_app_strings(self, build_profile_id)
else:
return gen.create_app_strings(self, lang)
@property
def skip_validation(self):
properties = (self.profile or {}).get('properties', {})
return properties.get('cc-content-valid', 'yes')
@property
def jad_settings(self):
s = super(Application, self).jad_settings
s.update({
'Skip-Validation': self.skip_validation,
})
return s
def create_profile(self, is_odk=False, with_media=False,
template='app_manager/profile.xml', build_profile_id=None):
self__profile = self.profile
app_profile = defaultdict(dict)
for setting in commcare_settings.get_custom_commcare_settings():
setting_type = setting['type']
setting_id = setting['id']
if setting_type not in ('properties', 'features'):
setting_value = None
elif setting_id not in self__profile.get(setting_type, {}):
if 'commcare_default' in setting and setting['commcare_default'] != setting['default']:
setting_value = setting['default']
else:
setting_value = None
else:
setting_value = self__profile[setting_type][setting_id]
if setting_value:
app_profile[setting_type][setting_id] = {
'value': setting_value,
'force': setting.get('force', False)
}
# assert that it gets explicitly set once per loop
del setting_value
if self.case_sharing:
app_profile['properties']['server-tether'] = {
'force': True,
'value': 'sync',
}
logo_refs = [logo_name for logo_name in self.logo_refs if logo_name in ANDROID_LOGO_PROPERTY_MAPPING]
if logo_refs and domain_has_privilege(self.domain, privileges.COMMCARE_LOGO_UPLOADER):
for logo_name in logo_refs:
app_profile['properties'][ANDROID_LOGO_PROPERTY_MAPPING[logo_name]] = {
'value': self.logo_refs[logo_name]['path'],
}
if with_media:
profile_url = self.media_profile_url if not is_odk else (self.odk_media_profile_url + '?latest=true')
else:
profile_url = self.profile_url if not is_odk else (self.odk_profile_url + '?latest=true')
if toggles.CUSTOM_PROPERTIES.enabled(self.domain) and "custom_properties" in self__profile:
app_profile['custom_properties'].update(self__profile['custom_properties'])
locale = self.get_build_langs(build_profile_id)[0]
return render_to_string(template, {
'is_odk': is_odk,
'app': self,
'profile_url': profile_url,
'app_profile': app_profile,
'cc_user_domain': cc_user_domain(self.domain),
'include_media_suite': with_media,
'uniqueid': self.copy_of or self.id,
'name': self.name,
'descriptor': u"Profile File",
'build_profile_id': build_profile_id,
'locale': locale
}).encode('utf-8')
@property
def custom_suite(self):
try:
return self.lazy_fetch_attachment('custom_suite.xml')
except ResourceNotFound:
return ""
def set_custom_suite(self, value):
self.put_attachment(value, 'custom_suite.xml')
def create_suite(self, build_profile_id=None):
self.assert_app_v2()
return SuiteGenerator(self, build_profile_id).generate_suite()
def create_media_suite(self, build_profile_id=None):
return MediaSuiteGenerator(self, build_profile_id).generate_suite()
@classmethod
def get_form_filename(cls, type=None, form=None, module=None):
return 'modules-%s/forms-%s.xml' % (module.id, form.id)
def create_all_files(self, build_profile_id=None):
prefix = '' if not build_profile_id else build_profile_id + '/'
files = {
'{}profile.xml'.format(prefix): self.create_profile(is_odk=False, build_profile_id=build_profile_id),
'{}profile.ccpr'.format(prefix): self.create_profile(is_odk=True, build_profile_id=build_profile_id),
'{}media_profile.xml'.format(prefix):
self.create_profile(is_odk=False, with_media=True, build_profile_id=build_profile_id),
'{}media_profile.ccpr'.format(prefix):
self.create_profile(is_odk=True, with_media=True, build_profile_id=build_profile_id),
'{}suite.xml'.format(prefix): self.create_suite(build_profile_id),
'{}media_suite.xml'.format(prefix): self.create_media_suite(build_profile_id),
}
langs_for_build = self.get_build_langs(build_profile_id)
for lang in ['default'] + langs_for_build:
files["{prefix}{lang}/app_strings.txt".format(
prefix=prefix, lang=lang)] = self.create_app_strings(lang, build_profile_id)
for form_stuff in self.get_forms(bare=False):
filename = prefix + self.get_form_filename(**form_stuff)
form = form_stuff['form']
try:
files[filename] = self.fetch_xform(form=form, build_profile_id=build_profile_id)
except XFormException as e:
raise XFormException(_('Error in form "{}": {}').format(trans(form.name), unicode(e)))
return files
get_modules = IndexedSchema.Getter('modules')
@parse_int([1])
def get_module(self, i):
try:
return self.modules[i].with_id(i % len(self.modules), self)
except IndexError:
raise ModuleNotFoundException()
def get_module_by_unique_id(self, unique_id, error=''):
def matches(module):
return module.get_or_create_unique_id() == unique_id
for obj in self.get_modules():
if matches(obj):
return obj
if not error:
error = _("Could not find '{unique_id}' in app '{app_id}'.").format(
app_id=self.id, unique_id=unique_id)
raise ModuleNotFoundException(error)
def get_forms(self, bare=True):
for module in self.get_modules():
for form in module.get_forms():
yield form if bare else {
'type': 'module_form',
'module': module,
'form': form
}
def get_form(self, unique_form_id, bare=True):
def matches(form):
return form.get_unique_id() == unique_form_id
for obj in self.get_forms(bare):
if matches(obj if bare else obj['form']):
return obj
raise FormNotFoundException(
("Form in app '%s' with unique id '%s' not found"
% (self.id, unique_form_id)))
def get_form_location(self, unique_form_id):
for m_index, module in enumerate(self.get_modules()):
for f_index, form in enumerate(module.get_forms()):
if unique_form_id == form.unique_id:
return m_index, f_index
raise KeyError("Form in app '%s' with unique id '%s' not found" % (self.id, unique_form_id))
@classmethod
def new_app(cls, domain, name, lang="en"):
app = cls(domain=domain, modules=[], name=name, langs=[lang],
date_created=datetime.datetime.utcnow(), vellum_case_management=True)
return app
def add_module(self, module):
self.modules.append(module)
return self.get_module(-1)
def delete_module(self, module_unique_id):
try:
module = self.get_module_by_unique_id(module_unique_id)
except ModuleNotFoundException:
return None
record = DeleteModuleRecord(
domain=self.domain,
app_id=self.id,
module_id=module.id,
module=module,
datetime=datetime.datetime.utcnow()
)
del self.modules[module.id]
record.save()
return record
def new_form(self, module_id, name, lang, attachment=""):
module = self.get_module(module_id)
return module.new_form(name, lang, attachment)
def delete_form(self, module_unique_id, form_unique_id):
try:
module = self.get_module_by_unique_id(module_unique_id)
form = self.get_form(form_unique_id)
except (ModuleNotFoundException, FormNotFoundException):
return None
record = DeleteFormRecord(
domain=self.domain,
app_id=self.id,
module_unique_id=module_unique_id,
form_id=form.id,
form=form,
datetime=datetime.datetime.utcnow(),
)
record.save()
try:
form.pre_delete_hook()
except NotImplementedError:
pass
del module['forms'][form.id]
return record
def rename_lang(self, old_lang, new_lang):
validate_lang(new_lang)
if old_lang == new_lang:
return
if new_lang in self.langs:
raise AppEditingError("Language %s already exists!" % new_lang)
for i,lang in enumerate(self.langs):
if lang == old_lang:
self.langs[i] = new_lang
for profile in self.build_profiles:
for i, lang in enumerate(profile.langs):
if lang == old_lang:
profile.langs[i] = new_lang
for module in self.get_modules():
module.rename_lang(old_lang, new_lang)
_rename_key(self.translations, old_lang, new_lang)
def rearrange_modules(self, i, j):
modules = self.modules
try:
modules.insert(i, modules.pop(j))
except IndexError:
raise RearrangeError()
self.modules = modules
def rearrange_forms(self, to_module_id, from_module_id, i, j):
"""
The case type of the two modules conflict,
ConflictingCaseTypeError is raised,
but the rearrangement (confusingly) goes through anyway.
This is intentional.
"""
to_module = self.get_module(to_module_id)
from_module = self.get_module(from_module_id)
try:
from_module.forms[j].pre_move_hook(from_module, to_module)
except NotImplementedError:
pass
try:
form = from_module.forms.pop(j)
if toggles.APP_MANAGER_V2.enabled(self.domain):
if not to_module.is_surveys and i == 0:
# first form is the reg form
i = 1
if from_module.is_surveys != to_module.is_surveys:
if from_module.is_surveys:
form.requires = "case"
form.actions.update_case = UpdateCaseAction(
condition=FormActionCondition(type='always'))
else:
form.requires = "none"
form.actions.update_case = UpdateCaseAction(
condition=FormActionCondition(type='never'))
to_module.add_insert_form(from_module, form, index=i, with_source=True)
except IndexError:
raise RearrangeError()
if to_module.case_type != from_module.case_type \
and not toggles.APP_MANAGER_V2.enabled(self.domain):
raise ConflictingCaseTypeError()
def scrub_source(self, source):
return update_unique_ids(source)
def copy_form(self, module_id, form_id, to_module_id):
"""
The case type of the two modules conflict,
ConflictingCaseTypeError is raised,
but the copying (confusingly) goes through anyway.
This is intentional.
"""
from_module = self.get_module(module_id)
form = from_module.get_form(form_id)
to_module = self.get_module(to_module_id)
self._copy_form(from_module, form, to_module, rename=True)
def _copy_form(self, from_module, form, to_module, *args, **kwargs):
if not form.source:
raise BlankXFormError()
if from_module['case_type'] != to_module['case_type']:
raise ConflictingCaseTypeError()
copy_source = deepcopy(form.to_json())
if 'unique_id' in copy_source:
del copy_source['unique_id']
if 'rename' in kwargs and kwargs['rename']:
for lang, name in copy_source['name'].iteritems():
with override(lang):
copy_source['name'][lang] = _('Copy of {name}').format(name=name)
copy_form = to_module.add_insert_form(from_module, FormBase.wrap(copy_source))
save_xform(self, copy_form, form.source)
@cached_property
def has_case_management(self):
for module in self.get_modules():
for form in module.get_forms():
if len(form.active_actions()) > 0:
return True
return False
@memoized
def case_type_exists(self, case_type):
return case_type in self.get_case_types()
@memoized
def get_case_types(self):
extra_types = set()
if is_usercase_in_use(self.domain):
extra_types.add(USERCASE_TYPE)
return set(chain(*[m.get_case_types() for m in self.get_modules()])) | extra_types
def has_media(self):
return len(self.multimedia_map) > 0
@memoized
def get_xmlns_map(self):
xmlns_map = defaultdict(list)
for form in self.get_forms():
xmlns_map[form.xmlns].append(form)
return xmlns_map
def get_form_by_xmlns(self, xmlns, log_missing=True):
if xmlns == "http://code.javarosa.org/devicereport":
return None
forms = self.get_xmlns_map()[xmlns]
if len(forms) != 1:
if log_missing or len(forms) > 1:
logging.error('App %s in domain %s has %s forms with xmlns %s' % (
self.get_id,
self.domain,
len(forms),
xmlns,
))
return None
else:
form, = forms
return form
def get_questions(self, xmlns):
form = self.get_form_by_xmlns(xmlns)
if not form:
return []
return form.get_questions(self.langs)
def check_subscription(self):
def app_uses_usercase(app):
return any(m.uses_usercase() for m in app.get_modules())
errors = []
if app_uses_usercase(self) and not domain_has_privilege(self.domain, privileges.USER_CASE):
errors.append({
'type': 'subscription',
'message': _('Your application is using User Case functionality. You can remove User Case '
'functionality by opening the User Case Management tab in a form that uses it, and '
'clicking "Remove User Case Properties".')
})
return errors
def validate_app(self):
xmlns_count = defaultdict(int)
errors = []
for lang in self.langs:
if not lang:
errors.append({'type': 'empty lang'})
if not self.modules:
errors.append({'type': "no modules"})
for module in self.get_modules():
errors.extend(module.validate_for_build())
for form in self.get_forms():
errors.extend(form.validate_for_build(validate_module=False))
# make sure that there aren't duplicate xmlns's
xmlns_count[form.xmlns] += 1
for xmlns in xmlns_count:
if xmlns_count[xmlns] > 1:
errors.append({'type': "duplicate xmlns", "xmlns": xmlns})
if any(not module.unique_id for module in self.get_modules()):
raise ModuleIdMissingException
modules_dict = {m.unique_id: m for m in self.get_modules()}
def _parent_select_fn(module):
if hasattr(module, 'parent_select') and module.parent_select.active:
return module.parent_select.module_id
if self._has_dependency_cycle(modules_dict, _parent_select_fn):
errors.append({'type': 'parent cycle'})
errors.extend(self._child_module_errors(modules_dict))
errors.extend(self.check_subscription())
if not errors:
errors = super(Application, self).validate_app()
return errors
def _has_dependency_cycle(self, modules, neighbour_id_fn):
"""
Detect dependency cycles given modules and the neighbour_id_fn
:param modules: A mapping of module unique_ids to Module objects
:neighbour_id_fn: function to get the neibour module unique_id
:return: True if there is a cycle in the module relationship graph
"""
visited = set()
completed = set()
def cycle_helper(m):
if m.id in visited:
if m.id in completed:
return False
return True
visited.add(m.id)
parent = modules.get(neighbour_id_fn(m), None)
if parent is not None and cycle_helper(parent):
return True
completed.add(m.id)
return False
for module in modules.values():
if cycle_helper(module):
return True
return False
def _child_module_errors(self, modules_dict):
module_errors = []
def _root_module_fn(module):
if hasattr(module, 'root_module_id'):
return module.root_module_id
if self._has_dependency_cycle(modules_dict, _root_module_fn):
module_errors.append({'type': 'root cycle'})
module_ids = set([m.unique_id for m in self.get_modules()])
root_ids = set([_root_module_fn(m) for m in self.get_modules() if _root_module_fn(m) is not None])
if not root_ids.issubset(module_ids):
module_errors.append({'type': 'unknown root'})
return module_errors
def get_profile_setting(self, s_type, s_id):
setting = self.profile.get(s_type, {}).get(s_id)
if setting is not None:
return setting
yaml_setting = commcare_settings.get_commcare_settings_lookup()[s_type][s_id]
for contingent in yaml_setting.get("contingent_default", []):
if check_condition(self, contingent["condition"]):
setting = contingent["value"]
if setting is not None:
return setting
if self.build_version < yaml_setting.get("since", "0"):
setting = yaml_setting.get("disabled_default", None)
if setting is not None:
return setting
return yaml_setting.get("default")
@property
def has_careplan_module(self):
return any((module for module in self.modules if isinstance(module, CareplanModule)))
@quickcache(['self.version'])
def get_case_metadata(self):
from corehq.apps.reports.formdetails.readable import AppCaseMetadata
builder = ParentCasePropertyBuilder(self)
case_relationships = builder.get_parent_type_map(self.get_case_types())
meta = AppCaseMetadata()
for case_type, relationships in case_relationships.items():
type_meta = meta.get_type(case_type)
type_meta.relationships = relationships
for module in self.get_modules():
for form in module.get_forms():
form.update_app_case_meta(meta)
seen_types = []
def get_children(case_type):
seen_types.append(case_type)
return [type_.name for type_ in meta.case_types if type_.relationships.get('parent') == case_type]
def get_hierarchy(case_type):
return {child: get_hierarchy(child) for child in get_children(case_type)}
roots = [type_ for type_ in meta.case_types if not type_.relationships]
for type_ in roots:
meta.type_hierarchy[type_.name] = get_hierarchy(type_.name)
for type_ in meta.case_types:
if type_.name not in seen_types:
meta.type_hierarchy[type_.name] = {}
type_.error = _("Error in case type hierarchy")
return meta
def get_subcase_types(self, case_type):
"""
Return the subcase types defined across an app for the given case type
"""
return {t for m in self.get_modules()
if m.case_type == case_type
for t in m.get_subcase_types()}
@memoized
def grid_display_for_some_modules(self):
return self.grid_menu_toggle_enabled() and self.grid_form_menus == 'some'
@memoized
def grid_display_for_all_modules(self):
return self.grid_menu_toggle_enabled() and self.grid_form_menus == 'all'
def grid_menu_toggle_enabled(self):
return toggles.GRID_MENUS.enabled(self.domain)
class RemoteApp(ApplicationBase):
"""
A wrapper for a url pointing to a suite or profile file. This allows you to
write all the files for an app by hand, and then give the url to app_manager
and let it package everything together for you.
"""
profile_url = StringProperty(default="http://")
name = StringProperty()
manage_urls = BooleanProperty(default=False)
questions_map = DictProperty(required=False)
def is_remote_app(self):
return True
@classmethod
def new_app(cls, domain, name, lang='en'):
app = cls(domain=domain, name=name, langs=[lang])
return app
def create_profile(self, is_odk=False, langs=None):
# we don't do odk for now anyway
return remote_app.make_remote_profile(self, langs)
def strip_location(self, location):
return remote_app.strip_location(self.profile_url, location)
def fetch_file(self, location):
location = self.strip_location(location)
url = urljoin(self.profile_url, location)
try:
content = urlopen(url).read()
except Exception:
raise AppEditingError('Unable to access resource url: "%s"' % url)
return location, content
def get_build_langs(self):
if self.build_profiles:
if len(self.build_profiles.keys()) > 1:
raise AppEditingError('More than one app profile for a remote app')
else:
# return first profile, generated as part of lazy migration
return self.build_profiles[self.build_profiles.keys()[0]].langs
else:
return self.langs
@classmethod
def get_locations(cls, suite):
for resource in suite.findall('*/resource'):
try:
loc = resource.findtext('location[@authority="local"]')
except Exception:
loc = resource.findtext('location[@authority="remote"]')
yield resource.getparent().tag, loc
@property
def SUITE_XPATH(self):
return 'suite/resource/location[@authority="local"]'
def create_all_files(self, build_profile_id=None):
langs_for_build = self.get_build_langs()
files = {
'profile.xml': self.create_profile(langs=langs_for_build),
}
tree = _parse_xml(files['profile.xml'])
def add_file_from_path(path, strict=False, transform=None):
added_files = []
# must find at least one
try:
tree.find(path).text
except (TypeError, AttributeError):
if strict:
raise AppEditingError("problem with file path reference!")
else:
return
for loc_node in tree.findall(path):
loc, file = self.fetch_file(loc_node.text)
if transform:
file = transform(file)
files[loc] = file
added_files.append(file)
return added_files
add_file_from_path('features/users/logo')
try:
suites = add_file_from_path(
self.SUITE_XPATH,
strict=True,
transform=(lambda suite:
remote_app.make_remote_suite(self, suite))
)
except AppEditingError:
raise AppEditingError(ugettext('Problem loading suite file from profile file. Is your profile file correct?'))
for suite in suites:
suite_xml = _parse_xml(suite)
for tag, location in self.get_locations(suite_xml):
location, data = self.fetch_file(location)
if tag == 'xform' and langs_for_build:
try:
xform = XForm(data)
except XFormException as e:
raise XFormException('In file %s: %s' % (location, e))
xform.exclude_languages(whitelist=langs_for_build)
data = xform.render()
files.update({location: data})
return files
def make_questions_map(self):
langs_for_build = self.get_build_langs()
if self.copy_of:
xmlns_map = {}
def fetch(location):
filepath = self.strip_location(location)
return self.fetch_attachment('files/%s' % filepath)
profile_xml = _parse_xml(fetch('profile.xml'))
suite_location = profile_xml.find(self.SUITE_XPATH).text
suite_xml = _parse_xml(fetch(suite_location))
for tag, location in self.get_locations(suite_xml):
if tag == 'xform':
xform = XForm(fetch(location))
xmlns = xform.data_node.tag_xmlns
questions = xform.get_questions(langs_for_build)
xmlns_map[xmlns] = questions
return xmlns_map
else:
return None
def get_questions(self, xmlns):
if not self.questions_map:
self.questions_map = self.make_questions_map()
if not self.questions_map:
return []
self.save()
questions = self.questions_map.get(xmlns, [])
return questions
str_to_cls = {
"Application": Application,
"Application-Deleted": Application,
"RemoteApp": RemoteApp,
"RemoteApp-Deleted": RemoteApp,
}
def import_app(app_id_or_source, domain, source_properties=None, validate_source_domain=None):
if isinstance(app_id_or_source, basestring):
app_id = app_id_or_source
source = get_app(None, app_id)
src_dom = source['domain']
if validate_source_domain:
validate_source_domain(src_dom)
source = source.export_json()
source = json.loads(source)
else:
cls = str_to_cls[app_id_or_source['doc_type']]
# Don't modify original app source
app = cls.wrap(deepcopy(app_id_or_source))
source = app.export_json(dump_json=False)
try:
attachments = source['_attachments']
except KeyError:
attachments = {}
finally:
source['_attachments'] = {}
if source_properties is not None:
for key, value in source_properties.iteritems():
source[key] = value
cls = str_to_cls[source['doc_type']]
# Allow the wrapper to update to the current default build_spec
if 'build_spec' in source:
del source['build_spec']
app = cls.from_source(source, domain)
app.date_created = datetime.datetime.utcnow()
app.cloudcare_enabled = domain_has_privilege(domain, privileges.CLOUDCARE)
with app.atomic_blobs():
for name, attachment in attachments.items():
if re.match(ATTACHMENT_REGEX, name):
app.put_attachment(attachment, name)
if not app.is_remote_app():
for _, m in app.get_media_objects():
if domain not in m.valid_domains:
m.valid_domains.append(domain)
m.save()
if not app.is_remote_app() and any(module.uses_usercase() for module in app.get_modules()):
from corehq.apps.app_manager.util import enable_usercase
enable_usercase(domain)
return app
class DeleteApplicationRecord(DeleteRecord):
app_id = StringProperty()
def undo(self):
app = ApplicationBase.get(self.app_id)
app.doc_type = app.get_doc_type()
app.save(increment_version=False)
class DeleteModuleRecord(DeleteRecord):
app_id = StringProperty()
module_id = IntegerProperty()
module = SchemaProperty(ModuleBase)
def undo(self):
app = Application.get(self.app_id)
modules = app.modules
modules.insert(self.module_id, self.module)
app.modules = modules
app.save()
class DeleteFormRecord(DeleteRecord):
app_id = StringProperty()
module_id = IntegerProperty()
module_unique_id = StringProperty()
form_id = IntegerProperty()
form = SchemaProperty(FormBase)
def undo(self):
app = Application.get(self.app_id)
if self.module_unique_id is not None:
name = trans(self.form.name, app.default_language, include_lang=False)
module = app.get_module_by_unique_id(
self.module_unique_id,
error=_("Could not find form '{}'").format(name)
)
else:
module = app.modules[self.module_id]
forms = module.forms
forms.insert(self.form_id, self.form)
module.forms = forms
app.save()
class CareplanAppProperties(DocumentSchema):
name = StringProperty()
latest_release = StringProperty()
case_type = StringProperty()
goal_conf = DictProperty()
task_conf = DictProperty()
class CareplanConfig(Document):
domain = StringProperty()
app_configs = SchemaDictProperty(CareplanAppProperties)
@classmethod
def for_domain(cls, domain):
res = cache_core.cached_view(
cls.get_db(),
"by_domain_doc_type_date/view",
key=[domain, 'CareplanConfig', None],
reduce=False,
include_docs=True,
wrapper=cls.wrap)
if len(res) > 0:
result = res[0]
else:
result = None
return result
# backwards compatibility with suite-1.0.xml
FormBase.get_command_id = lambda self: id_strings.form_command(self)
FormBase.get_locale_id = lambda self: id_strings.form_locale(self)
ModuleBase.get_locale_id = lambda self: id_strings.module_locale(self)
ModuleBase.get_case_list_command_id = lambda self: id_strings.case_list_command(self)
ModuleBase.get_case_list_locale_id = lambda self: id_strings.case_list_locale(self)
Module.get_referral_list_command_id = lambda self: id_strings.referral_list_command(self)
Module.get_referral_list_locale_id = lambda self: id_strings.referral_list_locale(self)
| bsd-3-clause | -5,053,952,302,909,447,000 | 35.65539 | 132 | 0.582232 | false |
alphagov/notifications-api | app/utils.py | 1 | 5728 | from datetime import datetime, timedelta
import pytz
from flask import url_for
from notifications_utils.template import (
BroadcastMessageTemplate,
HTMLEmailTemplate,
LetterPrintTemplate,
SMSMessageTemplate,
)
from notifications_utils.timezones import convert_utc_to_bst
from sqlalchemy import func
DATETIME_FORMAT_NO_TIMEZONE = "%Y-%m-%d %H:%M:%S.%f"
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
DATE_FORMAT = "%Y-%m-%d"
local_timezone = pytz.timezone("Europe/London")
def pagination_links(pagination, endpoint, **kwargs):
if 'page' in kwargs:
kwargs.pop('page', None)
links = {}
if pagination.has_prev:
links['prev'] = url_for(endpoint, page=pagination.prev_num, **kwargs)
if pagination.has_next:
links['next'] = url_for(endpoint, page=pagination.next_num, **kwargs)
links['last'] = url_for(endpoint, page=pagination.pages, **kwargs)
return links
def url_with_token(data, url, config, base_url=None):
from notifications_utils.url_safe_token import generate_token
token = generate_token(data, config['SECRET_KEY'], config['DANGEROUS_SALT'])
base_url = (base_url or config['ADMIN_BASE_URL']) + url
return base_url + token
def get_template_instance(template, values):
from app.models import BROADCAST_TYPE, EMAIL_TYPE, LETTER_TYPE, SMS_TYPE
return {
SMS_TYPE: SMSMessageTemplate,
EMAIL_TYPE: HTMLEmailTemplate,
LETTER_TYPE: LetterPrintTemplate,
BROADCAST_TYPE: BroadcastMessageTemplate,
}[template['template_type']](template, values)
def get_london_midnight_in_utc(date):
"""
This function converts date to midnight as BST (British Standard Time) to UTC,
the tzinfo is lastly removed from the datetime because the database stores the timestamps without timezone.
:param date: the day to calculate the London midnight in UTC for
:return: the datetime of London midnight in UTC, for example 2016-06-17 = 2016-06-16 23:00:00
"""
return local_timezone.localize(datetime.combine(date, datetime.min.time())).astimezone(
pytz.UTC).replace(
tzinfo=None)
def get_midnight_for_day_before(date):
day_before = date - timedelta(1)
return get_london_midnight_in_utc(day_before)
def get_london_month_from_utc_column(column):
"""
Where queries need to count notifications by month it needs to be
the month in BST (British Summer Time).
The database stores all timestamps as UTC without the timezone.
- First set the timezone on created_at to UTC
- then convert the timezone to BST (or Europe/London)
- lastly truncate the datetime to month with which we can group
queries
"""
return func.date_trunc(
"month",
func.timezone("Europe/London", func.timezone("UTC", column))
)
def get_public_notify_type_text(notify_type, plural=False):
from app.models import (
BROADCAST_TYPE,
PRECOMPILED_LETTER,
SMS_TYPE,
UPLOAD_DOCUMENT,
)
notify_type_text = notify_type
if notify_type == SMS_TYPE:
notify_type_text = 'text message'
elif notify_type == UPLOAD_DOCUMENT:
notify_type_text = 'document'
elif notify_type == PRECOMPILED_LETTER:
notify_type_text = 'precompiled letter'
elif notify_type == BROADCAST_TYPE:
notify_type_text = 'broadcast message'
return '{}{}'.format(notify_type_text, 's' if plural else '')
def midnight_n_days_ago(number_of_days):
"""
Returns midnight a number of days ago. Takes care of daylight savings etc.
"""
return get_london_midnight_in_utc(datetime.utcnow() - timedelta(days=number_of_days))
def escape_special_characters(string):
for special_character in ('\\', '_', '%', '/'):
string = string.replace(
special_character,
r'\{}'.format(special_character)
)
return string
def email_address_is_nhs(email_address):
return email_address.lower().endswith((
'@nhs.uk', '@nhs.net', '.nhs.uk', '.nhs.net',
))
def get_notification_table_to_use(service, notification_type, process_day, has_delete_task_run):
"""
Work out what table will contain notification data for a service by looking up their data retention.
Make sure that when you run this you think about whether the delete task has run for that day! If it's run, the
notifications from that day will have moved to NotificationHistory. The delete tasks run between 4 and 5am every
morning.
"""
from app.models import Notification, NotificationHistory
data_retention = service.data_retention.get(notification_type)
days_of_retention = data_retention.days_of_retention if data_retention else 7
todays_bst_date = convert_utc_to_bst(datetime.utcnow()).date()
days_ago = todays_bst_date - process_day
if not has_delete_task_run:
# if the task hasn't run yet, we've got an extra day of data in the notification table so can go back an extra
# day before looking at NotificationHistory
days_of_retention += 1
return Notification if days_ago <= timedelta(days=days_of_retention) else NotificationHistory
def get_archived_db_column_value(column):
date = datetime.utcnow().strftime("%Y-%m-%d")
return f'_archived_{date}_{column}'
def get_dt_string_or_none(val):
return val.strftime(DATETIME_FORMAT) if val else None
def get_uuid_string_or_none(val):
return str(val) if val else None
def format_sequential_number(sequential_number):
return format(sequential_number, "x").zfill(8)
def get_reference_from_personalisation(personalisation):
if personalisation:
return personalisation.get("reference")
return None
| mit | -5,519,629,083,492,794,000 | 33.095238 | 118 | 0.68523 | false |
lbybee/march_madness | old/test_ranking.py | 1 | 2504 | import numpy as np
def genEstimates(rank_data, key):
"""takes in rank data from expandSeaData and returns a
list of matrices for each year"""
rank_values = rank_data[["team", key]].values
data_dict = {}
for row_i in rank_values:
data_dict[row_i[0]] = {}
for row_j in rank_values:
rank_diff = row_j[1] - row_j[1]
data_dict[row_i[0]][row_j[0]] = 1 / (1.0 + pow(10, - rank_diff
/ 15))
return data_dict
def genAllSeasonsEstimates(rank_data, key):
"""takes in the rank data and generates the estimates for each
season"""
s_estimates = []
seasons = list(set(rank_data["season"]))
for s in seasons:
r_data = rank_data[rank_data["season"] == s]
s_estimates.append(genEstimates(r_data, key))
return s_estimates
def genAllEstimates(rank_data):
"""generates the estimates for each key"""
ranks = ["seed", "crank", "rating"]
for i in range(28):
ranks.append("orank_%d" % i)
r_estimates = []
for r in ranks:
s_data = rank_data[["season", "team", "rating"]]
r_data = rank_data[["season", "team", r]]
r_estimates.append(genAllSeasonsEstimates(r_data, s_data, r))
return r_estimates
def testEstimate(res_data, estimates):
"""tests that whether the estimates were right or not"""
res_data = res_data[["wteam", "lteam"]].values
log_loss = 0.0
j = 0
for row in res_data:
if row[0] in estimates and row[1] in estimates:
j += 1
if estimates[row[1]][row[0]] > estimates[row[0]][row[1]]:
log_loss += np.log(estimates[row[0]][row[1]])
else:
log_loss += np.log(1 - estimates[row[1]][row[0]])
if j != 0:
return - log_loss / j
else:
return 0
def testAllSeasons(res_data, seasons_l, estimates):
"""tests the estimates for each season"""
log_loss = 0.0
j = 0
for est, sea in zip(estimates[:-1], seasons_l[:-1]):
t_data = res_data[res_data["season"] == sea]
t_add = testEstimate(t_data, est)
print t_add
if t_add != 0:
j += 1
log_loss += t_add
return log_loss / j
def testAllEstimates(res_data, seasons_l, estimates_l):
"""tests each estimates for each season"""
log_loss_l = []
for est in estimates_l:
log_loss_l.append(testAllSeasons(res_data, seasons_l, est))
return log_loss_l
| gpl-2.0 | -6,628,350,248,022,735,000 | 28.458824 | 74 | 0.559505 | false |
erigones/esdc-ce | api/vm/qga/views.py | 1 | 1509 | from api.decorators import api_view, request_data
from api.vm.qga.api_views import VmQGA
__all__ = ('vm_qga',)
#: vm_status: PUT: running, stopping
@api_view(('PUT',))
@request_data() # get_vm() = IsVmOwner
def vm_qga(request, hostname_or_uuid, command, data=None):
"""
Run (:http:put:`PUT </vm/(hostname_or_uuid)/qga/(command)>`) a command via Qemu Guest Agent.
.. http:put:: /vm/(hostname_or_uuid)/qga/(command)
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-yes|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg command: **required** - QGA command. Available commands are:
* ``fsfreeze`` ``<status|freeze|thaw>``
* ``info``
* ``ping``
* ``sync``
* ``reboot``
* ``poweroff``
* ``get-time``
* ``set-time`` ``[epoch time in nanoseconds]``
:type command: string
:arg data.params: List of command parameters
:type data.params: array
:status 200: SUCCESS
:status 201: PENDING
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found
:status 412: Invalid command
:status 423: Node is not operational / VM is not operational
:status 501: Operation not supported
"""
return VmQGA(request, hostname_or_uuid, command, data).put()
| apache-2.0 | 8,276,019,273,834,272,000 | 30.4375 | 96 | 0.555335 | false |
gnulinooks/sympy | sympy/geometry/line.py | 1 | 19520 | from sympy.core.basic import Basic, S, C
from sympy.simplify import simplify
from sympy.geometry.exceptions import GeometryError
from entity import GeometryEntity
from point import Point
class LinearEntity(GeometryEntity):
"""
A linear entity (line, ray, segment, etc) in space.
This is an abstract class and is not meant to be instantiated.
Subclasses should implement the following methods:
__eq__
__contains__
"""
def __new__(cls, p1, p2, **kwargs):
if not isinstance(p1, Point) or not isinstance(p2, Point):
raise TypeError("%s.__new__ requires Point instances" % cls.__name__)
if p1 == p2:
raise RuntimeError("%s.__new__ requires two distinct points" % cls.__name__)
return GeometryEntity.__new__(cls, p1, p2, **kwargs)
@property
def p1(self):
"""One of the defining points of a linear entity."""
return self.__getitem__(0)
@property
def p2(self):
"""One of the defining points of a linear entity."""
return self.__getitem__(1)
@property
def coefficients(self):
"""The coefficients (a,b,c) for equation ax+by+c=0"""
return (self.p1[1]-self.p2[1],
self.p2[0]-self.p1[0],
self.p1[0]*self.p2[1] - self.p1[1]*self.p2[0])
def is_concurrent(*lines):
"""
Returns True if the set of linear entities are concurrent, False
otherwise. Two or more linear entities are concurrent if they all
intersect at a single point.
Description of Method Used:
===========================
Simply take the first two lines and find their intersection.
If there is no intersection, then the first two lines were
parallel and had no intersection so concurrenecy is impossible
amongst the whole set. Otherwise, check to see if the
intersection point of the first two lines is a member on
the rest of the lines. If so, the lines are concurrent.
"""
_lines = lines
lines = GeometryEntity.extract_entities(lines)
# Concurrency requires intersection at a single point; One linear
# entity cannot be concurrent.
if len(lines) <= 1:
return False
try:
# Get the intersection (if parallel)
p = GeometryEntity.do_intersection(lines[0], lines[1])
if len(p) == 0: return False
# Make sure the intersection is on every linear entity
for line in lines[2:]:
if p[0] not in line:
return False
return True
except AttributeError:
return False
def is_parallel(l1, l2):
"""Returns True if l1 and l2 are parallel, False otherwise"""
try:
a1,b1,c1 = l1.coefficients
a2,b2,c2 = l2.coefficients
return bool(simplify(a1*b2 - b1*a2) == 0)
except AttributeError:
return False
def is_perpendicular(l1, l2):
"""Returns True if l1 and l2 are perpendicular, False otherwise"""
try:
a1,b1,c1 = l1.coefficients
a2,b2,c2 = l2.coefficients
return bool(simplify(a1*a2 + b1*b2) == 0)
except AttributeError:
return False
def angle_between(l1, l2):
"""
Returns an angle formed between the two linear entities.
Description of Method Used:
===========================
From the dot product of vectors v1 and v2 it is known that:
dot(v1, v2) = |v1|*|v2|*cos(A)
where A is the angle formed between the two vectors. We can
get the directional vectors of the two lines and readily
find the angle between the two using the above formula.
"""
v1 = l1.p2 - l1.p1
v2 = l2.p2 - l2.p1
return C.acos( (v1[0]*v2[0]+v1[1]*v2[1]) / (abs(v1)*abs(v2)) )
def parallel_line(self, p):
"""
Returns a new Line which is parallel to this linear entity and passes
through the specified point.
"""
d = self.p1 - self.p2
return Line(p, p + d)
def perpendicular_line(self, p):
"""
Returns a new Line which is perpendicular to this linear entity and
passes through the specified point.
"""
d1,d2 = self.p1 - self.p2
if d2 == 0: # If an horizontal line
if p[1] == self.p1[1]: # if p is on this linear entity
p2 = Point(p[0], p[1] + 1)
return Line(p, p2)
else:
p2 = Point(p[0], self.p1[1])
return Line(p, p2)
else:
p2 = Point(p[0] - d2, p[1] + d1)
return Line(p, p2)
def perpendicular_segment(self, p):
"""
Returns a new Segment which connects p to a point on this linear
entity and is also perpendicular to this line. Returns p itself
if p is on this linear entity.
"""
if p in self:
return p
pl = self.perpendicular_line(p)
p2 = GeometryEntity.do_intersection(self, pl)[0]
return Segment(p, p2)
@property
def slope(self):
"""
The slope of this linear entity, or infinity if vertical.
"""
d1,d2 = self.p1 - self.p2
if d1 == 0:
return S.Infinity
return simplify(d2/d1)
@property
def points(self):
"""The two points used to define this linear entity."""
return (self.p1, self.p2)
def projection(self, o):
"""
Project a point, line, ray, or segment onto this linear entity.
If projection cannot be performed then a GeometryError is raised.
Notes:
======
- A projection involves taking the two points that define
the linear entity and projecting those points onto a
Line and then reforming the linear entity using these
projections.
- A point P is projected onto a line L by finding the point
on L that is closest to P. This is done by creating a
perpendicular line through P and L and finding its
intersection with L.
"""
tline = Line(self.p1, self.p2)
def project(p):
"""Project a point onto the line representing self."""
if p in tline: return p
l1 = tline.perpendicular_line(p)
return tline.intersection(l1)[0]
projected = None
if isinstance(o, Point):
return project(o)
elif isinstance(o, LinearEntity):
n_p1 = project(o.p1)
n_p2 = project(o.p2)
if n_p1 == n_p2:
projected = n_p1
else:
projected = o.__class__(n_p1, n_p2)
# Didn't know how to project so raise an error
if projected is None:
n1 = self.__class__.__name__
n2 = o.__class__.__name__
raise GeometryError("Do not know how to project %s onto %s" % (n2, n1))
return GeometryEntity.do_intersection(self, projected)[0]
def intersection(self, o):
if isinstance(o, Point):
if o in self:
return [o]
else:
return []
elif isinstance(o, LinearEntity):
a1,b1,c1 = self.coefficients
a2,b2,c2 = o.coefficients
t = simplify(a1*b2 - a2*b1)
if t == 0: # are parallel?
if isinstance(self, Line):
if o.p1 in self:
return [o]
return []
elif isinstance(o, Line):
if self.p1 in o:
return [self]
return []
elif isinstance(self, Ray):
if isinstance(o, Ray):
# case 1, rays in the same direction
if self.xdirection == o.xdirection:
if self.source[0] < o.source[0]:
return [o]
return [self]
# case 2, rays in the opposite directions
else:
if o.source in self:
if self.source == o.source:
return [self.source]
return [Segment(o.source, self.source)]
return []
elif isinstance(o, Segment):
if o.p1 in self:
if o.p2 in self:
return [o]
return [Segment(o.p1, self.source)]
elif o.p2 in self:
return [Segment(o.p2, self.source)]
return []
elif isinstance(self, Segment):
if isinstance(o, Ray):
return o.intersection(self)
elif isinstance(o, Segment):
# A reminder that the points of Segments are ordered
# in such a way that the following works. See
# Segment.__new__ for details on the ordering.
if self.p1 not in o:
if self.p2 not in o:
# Neither of the endpoints are in o so either
# o is contained in this segment or it isn't
if o in self:
return [self]
return []
else:
# p1 not in o but p2 is. Either there is a
# segment as an intersection, or they only
# intersect at an endpoint
if self.p2 == o.p1:
return [o.p1]
return [Segment(o.p1, self.p2)]
elif self.p2 not in o:
# p2 not in o but p1 is. Either there is a
# segment as an intersection, or they only
# intersect at an endpoint
if self.p1 == o.p2:
return [o.p2]
return [Segment(o.p2, self.p1)]
# Both points of self in o so the whole segment
# is in o
return [self]
# Unknown linear entity
return []
# Not parallel, so find the point of intersection
px = simplify((b1*c2 - c1*b2) / t)
py = simplify((a2*c1 - a1*c2) / t)
inter = Point(px, py)
if inter in self and inter in o:
return [inter]
return []
raise NotImplementedError()
def random_point(self):
"""Returns a random point on this Ray."""
from random import randint
from sys import maxint
# The lower and upper
lower, upper = -maxint-1, maxint
if self.slope is S.Infinity:
if isinstance(self, Ray):
if self.ydirection is S.Infinity:
lower = self.p1[1]
else:
upper = self.p1[1]
elif isinstance(self, Segment):
lower = self.p1[1]
upper = self.p2[1]
x = self.p1[0]
y = randint(lower, upper)
else:
if isinstance(self, Ray):
if self.xdirection is S.Infinity:
lower = self.p1[0]
else:
upper = self.p1[0]
elif isinstance(self, Segment):
lower = self.p1[0]
upper = self.p2[0]
a,b,c = self.coefficients
x = randint(lower, upper)
y = simplify( (-c - a*x) / b )
return Point(x, y)
def __eq__(self, other):
raise NotImplementedError()
def __contains__(self, other):
raise NotImplementedError()
class Line(LinearEntity):
"""A line in space."""
def arbitrary_point(self, parameter_name='t'):
"""Returns a symbolic point that is on this line."""
t = C.Symbol(parameter_name, real=True)
x = simplify(self.p1[0] + t*(self.p2[0] - self.p1[0]))
y = simplify(self.p1[1] + t*(self.p2[1] - self.p1[1]))
return Point(x, y)
def plot_interval(self, parameter_name='t'):
"""Returns the plot interval for the default geometric plot of line"""
t = C.Symbol(parameter_name, real=True)
return [t, -5, 5]
def equation(self, xaxis_name='x', yaxis_name='y'):
"""
Returns the equation for this line. Optional parameters xaxis_name
and yaxis_name can be used to specify the names of the symbols used
for the equation.
"""
x = C.Symbol(xaxis_name, real=True)
y = C.Symbol(yaxis_name, real=True)
a,b,c = self.coefficients
return simplify(a*x + b*y + c)
def __contains__(self, o):
"""Return True if o is on this Line, or False otherwise."""
if isinstance(o, Line):
return self.__eq__(o)
elif isinstance(o, Point):
x = C.Symbol('x', real=True)
y = C.Symbol('y', real=True)
r = self.equation().subs({x: o[0], y: o[1]})
x = simplify(r)
return simplify(x) == 0
else:
return False
def __eq__(self, other):
"""Return True if other is equal to this Line, or False otherwise."""
if not isinstance(other, Line): return False
return Point.is_collinear(self.p1, self.p2, other.p1, other.p2)
class Ray(LinearEntity):
"""A ray in space."""
@property
def source(self):
"""The point from which the ray eminates."""
return self.p1
@property
def xdirection(self):
"""
The x direction of the ray. Positive infinity if the ray points in
the positive x direction, negative infinity if the ray points
in the negative x direction, or 0 if the ray is vertical.
"""
if self.p1[0] < self.p2[0]:
return S.Infinity
elif self.p1[0] == self.p2[0]:
return S.Zero
else:
return S.NegativeInfinity
@property
def ydirection(self):
"""
The y direction of the ray. Positive infinity if the ray points in
the positive y direction, negative infinity if the ray points
in the negative y direction, or 0 if the ray is horizontal.
"""
if self.p1[1] < self.p2[1]:
return S.Infinity
elif self.p1[1] == self.p2[1]:
return S.Zero
else:
return S.NegativeInfinity
def __eq__(self, other):
"""Return True if other is equal to this Ray, or False otherwise."""
if not isinstance(other, Ray):
return False
return ((self.source == other.source) and (other.p2 in self))
def __contains__(self, o):
"""Return True if o is on this Ray, or False otherwise."""
if isinstance(o, Ray):
d = o.p2 - o.p1
return Point.is_collinear(self.p1, self.p2, o.p1, o.p2) \
and (self.xdirection == o.xdirection) \
and (self.ydirection == o.ydirection)
elif isinstance(o, Segment):
return ((o.p1 in self) and (o.p2 in self))
elif isinstance(o, Point):
if Point.is_collinear(self.p1, self.p2, o):
if (not self.p1[0].atoms(C.Symbol)) and (not self.p1[1].atoms(C.Symbol)) \
and (not self.p2[0].atoms(C.Symbol)) and (not self.p2[1].atoms(C.Symbol)):
if self.xdirection is S.Infinity:
return o[0] >= self.source[0]
elif self.xdirection is S.NegativeInfinity:
return o[0] <= self.source[0]
elif self.ydirection is S.Infinity:
return o[1] >= self.source[1]
return o[1] <= self.source[1]
else:
# There are symbols lying around, so assume that o
# is contained in this ray (for now)
return True
else:
# Points are not collinear, so the rays are not parallel
# and hence it isimpossible for self to contain o
return False
# No other known entity can be contained in a Ray
return False
class Segment(LinearEntity):
"""An undirected line segment in space."""
def __new__(cls, p1, p2, **kwargs):
# Reorder the two points under the following ordering:
# if p1[0] != p2[0] then p1[0] < p2[0]
# if p1[0] == p2[0] then p1[1] < p2[1]
if p1[0] > p2[0]:
p1, p2 = p2, p1
elif p1[0] == p2[0] and p1[1] > p2[0]:
p1, p2 = p2, p1
return LinearEntity.__new__(cls, p1, p2, **kwargs)
def arbitrary_point(self, parameter_name='t'):
"""Returns a symbolic point that is on this line segment."""
t = C.Symbol(parameter_name, real=True)
x = simplify(self.p1[0] + t*(self.p2[0] - self.p1[0]))
y = simplify(self.p1[1] + t*(self.p2[1] - self.p1[1]))
return Point(x, y)
def plot_interval(self, parameter_name='t'):
t = C.Symbol(parameter_name, real=True)
return [t, 0, 1]
def perpendicular_bisector(self, p=None):
"""
Returns the perpendicular bisector of this segment. If no point is
specified or the point specified is not on the bisector then the
bisector is returned as a Line. Otherwise a Segment is returned
that joins the point specified and the intersection of the bisector
and the segment.
"""
l = LinearEntity.perpendicular_line(self, self.midpoint)
if p is None or p not in l:
return l
else:
return Segment(self.midpoint, p)
@property
def length(self):
"""The length of the segment."""
return Point.distance(self.p1, self.p2)
@property
def midpoint(self):
"""The midpoint of the segment."""
return Point.midpoint(self.p1, self.p2)
def __eq__(self, other):
"""Return True if other is equal to this Line, or False otherwise."""
if not isinstance(other, Segment):
return False
return ((self.p1 == other.p1) and (self.p2 == other.p2))
def __contains__(self, o):
"""Return True if o is on this Segment, or False otherwise."""
if isinstance(o, Segment):
return ((o.p1 in self) and (o.p2 in self))
elif isinstance(o, Point):
if Point.is_collinear(self.p1, self.p2, o):
x1,x2 = self.p1[0], self.p2[0]
if not (x1.atoms(C.Symbol)) or (x2.atoms(C.Symbol)):
return (min(x1,x2) <= o[0]) and (o[0] <= max(x1,x2))
else:
return True
else:
return False
# No other known entity can be contained in a Ray
return False
| bsd-3-clause | -1,037,525,842,769,122,200 | 36.323136 | 98 | 0.509631 | false |
HewlettPackard/oneview-ansible | library/oneview_id_pools_ipv4_range.py | 1 | 6647 | #!/usr/bin/python
# -*- coding: utf-8 -*-
###
# Copyright (2016-2021) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: oneview_id_pools_ipv4_range
short_description: Manage OneView ID pools IPV4 Range resources.
description:
- Provides an interface to manage ID pools IPV4 Range resources. Can create, update, or delete.
version_added: "2.3"
requirements:
- "python >= 2.7.9"
- "hpeOneView >= 6.0.0"
- "ansible >= 2.9"
author: "Thiago Miotto (@tmiotto)"
options:
state:
description:
- Indicates the desired state for the ID pools IPV4 Range resource.
C(present) will ensure data properties are compliant with OneView.
C(absent) will remove the resource from OneView, if it exists.
choices: ['present', 'absent']
data:
description:
- List with ID pools IPV4 Range properties.
required: true
extends_documentation_fragment:
- oneview
- oneview.validateetag
'''
EXAMPLES = '''
- name: Ensure that ID pools IPV4 Range is present using the default configuration
oneview_id_pools_ipv4_range:
config: "{{ config_file_path }}"
state: present
data:
name: 'Test ID pools IPV4 Range'
- name: Ensure that ID pools IPV4 Range is absent
oneview_id_pools_ipv4_range:
config: "{{ config_file_path }}"
state: absent
data:
name: 'ID pools IPV4 Range'
'''
RETURN = '''
id_pools_ipv4_range:
description: Has the facts about the OneView ID pools IPV4 Ranges.
returned: On state 'present'. Can be null.
type: dict
'''
from ansible.module_utils.oneview import OneViewModule
class IdPoolsIpv4RangeModule(OneViewModule):
MSG_CREATED = 'ID pools IPV4 Range created successfully.'
MSG_UPDATED = 'ID pools IPV4 Range updated successfully.'
MSG_DELETED = 'ID pools IPV4 Range deleted successfully.'
MSG_ALREADY_PRESENT = 'ID pools IPV4 Range is already present.'
MSG_ALREADY_ABSENT = 'ID pools IPV4 Range is already absent.'
RESOURCE_FACT_NAME = 'id_pools_ipv4_range'
def __init__(self):
additional_arg_spec = dict(data=dict(required=True, type='dict'),
state=dict(
required=True,
choices=['present', 'absent']))
super(IdPoolsIpv4RangeModule, self).__init__(additional_arg_spec=additional_arg_spec,
validate_etag_support=True)
self.resource_client = self.oneview_client.id_pools_ipv4_ranges
def execute_module(self):
self.current_resource = None
# If Range URI is provided then it sets the resource client
if self.data.get('uri'):
self.current_resource = self.resource_client.get_by_uri(self.data.get('uri'))
# Do preliminary check before creating a new range
elif self.data.get('subnetUri') and self.data.get('name'):
subnet = self.oneview_client.id_pools_ipv4_subnets.get_by_uri(self.data.get('subnetUri'))
for range_uri in subnet.data['rangeUris']:
maybe_resource = self.resource_client.get_by_uri(range_uri)
if maybe_resource.data['name'] == self.data['name']:
self.current_resource = maybe_resource
if self.state == 'present':
return self._present()
elif self.state == 'absent':
return self.resource_absent()
def _present(self):
# If no resource was found during get operation, it creates new one
if not self.current_resource:
response = self.resource_present("id_pools_ipv4_range")
else:
# setting current resource for _update_resource
# Enabled can be True, False or None. Using not found default to false for comparison purposes.
enabled = self.data.pop('enabled', 'not_given')
# sets update_collector/update_allocator if Given to True.
update_collector = self.data.pop('update_collector', False)
update_allocator = self.data.pop('update_allocator', False)
id_list = self.data.pop('idList', False)
count = self.data.pop('count', False)
# In case newName is given it sets it correctly
if self.data.get('newName'):
self.data['name'] = self.data.pop('newName')
# It Performs the update operation
response = self.resource_present("id_pools_ipv4_range")
# Checks enabled status in latest data and performas accordingly
if enabled != 'not_given' and enabled != self.current_resource.data.get('enabled'):
response['msg'] = self.MSG_UPDATED
response['changed'] = True
response['ansible_facts']['id_pools_ipv4_range'] = \
self.resource_client.enable(dict(enabled=enabled, type='Range'), self.current_resource.data['uri'])
self.data['enabled'] = enabled
return response
elif update_collector:
response['msg'] = self.MSG_UPDATED
response['changed'] = True
self.data['idList'] = id_list
response['ansible_facts']['id_pools_ipv4_range'] = \
self.resource_client.update_collector(dict(idList=id_list), self.data.get('uri'))
return response
elif update_allocator:
self.data['idList'] = id_list
self.data['count'] = count
response['msg'] = self.MSG_UPDATED
response['changed'] = True
response['ansible_facts']['id_pools_ipv4_range'] = \
self.resource_client.update_allocator(dict(idList=id_list, count=count), self.data.get('uri'))
return response
def main():
IdPoolsIpv4RangeModule().run()
if __name__ == '__main__':
main()
| apache-2.0 | 579,202,707,146,734,800 | 39.779141 | 119 | 0.613811 | false |
nick-huang-cc/GraffitiSpaceTT | UnderstandStudyPython/awesome-python3-webapp/www/config_override.py | 1 | 1354 | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
#Copyright (c) 1986 Nick Wong.
#Copyright (c) 2016-2026 TP-NEW Corp.
# License: TP-NEW (www.tp-new.com)
__author__ = "Nick Wong"
"""
如果要部署到服务器时,通常需要修改数据库的host等信息,直接修改config_default.py不是一个好办法,
更好的方法是编写一个config_override.py,用来覆盖某些默认设置
"""
#由于密码等信息保存于本地,所以先读取自定义的配置文档文档获取需要的密码:
#import properties
# import sys
# sys.path.append('E:\\GitHub\\GraffitiSpaceTT\\UnderstandStudyPython')
import properties
valueProperties = properties.getValue(object)
sys_address = valueProperties.get('service_sys_ip') #服务器地址
# sys_port = valueProperties.get('service_sys_mysql_port') #服务器端口
# sys_user = valueProperties.get('service_sys_admin_user') #系统登录用户
# sys_password = valueProperties.get('service_sys_admin_password') #系统登录用户密码
# mysql_user = valueProperties.get('service_mysql_ordinary_user') #mysql用户
# mysql_password = valueProperties.get('service_mysql_ordinary_password') #mysql用户密码
# mysql_database = valueProperties.get('service_mysql_database') #数据库名
configs = {
'db': {
'host': sys_address
}
}
| agpl-3.0 | -1,793,919,373,691,236,600 | 32.272727 | 84 | 0.709472 | false |
volk0ff/fred | fred/settings.py | 1 | 2229 | """
Django settings for fred project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'hbvld(uz$xaze5)kw$&$*%wqwo%v)=im^3&p5)@!=@)i8kl4rn'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*',
'www.breadandcircuits.org',
'127.0.0.1:8001']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'home',
'search',
'graph',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'fred.urls'
WSGI_APPLICATION = 'fred.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
BASE_DIR,
)
#Templates
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
| mit | 7,103,168,443,666,669,000 | 21.979381 | 71 | 0.696276 | false |
lfalvarez/votai | medianaranja2/forms.py | 1 | 12510 | # coding=utf-8
from django import forms
from popular_proposal.models import (PopularProposal
)
from elections.models import Area, QuestionCategory, Election
from django.conf import settings
from formtools.wizard.views import SessionWizardView
from medianaranja2.proposals_getter import ProposalsGetter, ProposalsGetterByReadingGroup
from django.shortcuts import render
from medianaranja2.calculator import Calculator
from constance import config
from organization_profiles.models import OrganizationTemplate
from django.views.generic.base import TemplateView
from django.core.cache import cache
from django.utils.safestring import mark_safe
from django.db.models import Q
from medianaranja2.grouped_multiple_choice_fields import GroupedModelMultiChoiceField
from medianaranja2.candidate_proposals_matrix_generator import OrganizationMatrixCreator
from django.forms import ModelForm
class CategoryMultipleChoiceField(forms.ModelMultipleChoiceField):
template_name = 'django/forms/widgets/checkbox_select.html'
option_template_name = 'django/forms/widgets/checkbox_option.html'
def label_from_instance(self, obj):
return obj.name
class PositionChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return obj.label
class ProposalModelMultipleChoiceField(GroupedModelMultiChoiceField):
def label_from_instance(self, obj):
return mark_safe( obj.get_one_liner() )
area_field = forms.ModelChoiceField(label=u"¿En qué comuna votas?",
help_text=u"Si quieres conocer con qué candidatura al Congreso eres más compatible, elige la comuna en la que votas. Si sólo te interesa tu media naranja presidencial, elige “no aplica”.",
empty_label=u"NO APLICA",
required=False,
queryset=Area.objects.filter(classification__in=settings.FILTERABLE_AREAS_TYPE).order_by('name'))
categories_field = CategoryMultipleChoiceField(label=u"De estos temas, ¿cuáles son los que te parecen más importantes para el país?",
queryset=QuestionCategory.objects.none(),
widget=forms.CheckboxSelectMultiple(),)
class SetupForm(forms.Form):
def __init__(self, *args, **kwargs):
should_use_categories = kwargs.pop('should_use_categories', True)
super(SetupForm, self).__init__(*args, **kwargs)
if should_use_categories:
self.fields['categories'] = categories_field
if settings.SECOND_ROUND_ELECTION is None:
self.fields['area'] = area_field
if 'categories' in self.fields:
self.fields['categories'].queryset = QuestionCategory.objects.all().order_by('-name')
else:
self.election = Election.objects.get(slug=settings.SECOND_ROUND_ELECTION)
if 'categories' in self.fields:
self.fields['categories'].queryset = self.election.categories.order_by('-name')
def clean(self):
cleaned_data = super(SetupForm, self).clean()
if settings.SECOND_ROUND_ELECTION is not None:
cleaned_data['element_selector'] = Election.objects.get(slug=settings.SECOND_ROUND_ELECTION)
else:
if cleaned_data['area'] is None:
cleaned_data['area'] = Area.objects.get(slug=config.DEFAULT_AREA)
if 'area' in cleaned_data.keys():
cleaned_data['element_selector'] = cleaned_data['area']
return cleaned_data
class QuestionsForm(forms.Form):
topic_fields = []
def __init__(self, *args, **kwargs):
categories = kwargs.pop('categories')
super(QuestionsForm, self).__init__(*args, **kwargs)
self.set_fields(categories)
def set_fields(self, categories):
self.categories = categories
for category in self.categories:
for topic in category.topics.order_by('id'):
field = PositionChoiceField(label=topic.label,
empty_label=None,
queryset=topic.positions,
widget=forms.RadioSelect
)
self.fields[topic.slug] = field
self.topic_fields.append(topic.slug)
def clean(self):
cleaned_data = super(QuestionsForm, self).clean()
r = {"positions": []}
for topic in cleaned_data:
if topic in self.topic_fields:
r['positions'].append(cleaned_data[topic])
else:
r[topic] = cleaned_data[topic]
return r
class ProposalsForm(forms.Form):
proposals = ProposalModelMultipleChoiceField(queryset=PopularProposal.objects.none(),
group_by_field='clasification',
widget=forms.CheckboxSelectMultiple(attrs={'class': 'proposal_option'}))
def __init__(self, *args, **kwargs):
self.proposals = kwargs.pop('proposals')
element_selector = kwargs.pop('element_selector')
super(ProposalsForm, self).__init__(*args, **kwargs)
proposals_qs_cache_key = 'proposals_qs_' + str(element_selector.id)
if cache.get(proposals_qs_cache_key) is not None:
self.fields['proposals'].queryset = cache.get(proposals_qs_cache_key)
return
self.proposals = self.proposals[:config.MEDIA_NARANJA_MAX_NUM_PR]
qs = PopularProposal.objects.filter(id__in=[p.id for p in self.proposals]).order_by('clasification')
cache.set(proposals_qs_cache_key, qs)
self.fields['proposals'].queryset = qs
class MediaNaranjaException(Exception):
pass
class MediaNaranjaWizardFormBase(SessionWizardView):
template_name = 'medianaranja2/paso_default.html'
done_template_name = 'medianaranja2/resultado.html'
calculator_class = Calculator
calculator_extra_kwargs = {}
def get_proposal_class(self):
if config.ESTRATEGIA_SELECCION_PROPUESTAS == 'reading_group':
return ProposalsGetterByReadingGroup
return ProposalsGetter
def get_proposal_getter_kwargs(self):
return {}
def get_proposal_getter(self):
return self.get_proposal_class()(**self.get_proposal_getter_kwargs())
def get_organization_templates(self, proposals):
if settings.RECOMMENDED_ORGS_FROM_CACHE:
c = OrganizationMatrixCreator()
return c.get_organizations(proposals)
else:
is_creator_of_this_proposals_filter = Q(organization__proposals__in=proposals)
is_liker_of_this_proposals = Q(organization__likes__proposal__in=proposals)
organization_templates = OrganizationTemplate.objects.filter(is_creator_of_this_proposals_filter|is_liker_of_this_proposals).distinct()
return organization_templates
def done(self, form_list, **kwargs):
cleaned_data = self.get_all_cleaned_data()
results = []
has_parent = True
element_selector = self.get_element_selector_from_cleaned_data(cleaned_data)
elections = self.get_proposal_getter().get_elections(element_selector)
proposals = cleaned_data.get('proposals', [])
positions = cleaned_data.get('positions', [])
for election in elections:
calculator = self.calculator_class(election, positions, proposals, **self.calculator_extra_kwargs)
results.append(calculator.get_result())
if settings.ORGANIZATIONS_IN_12_RESULT:
organization_templates = self.get_organization_templates(proposals)
else:
organization_templates = []
return render(self.request, self.done_template_name, {
'results': results,
'organizations': organization_templates
})
def get_template_names(self):
return [self.templates[self.steps.current]]
def post(self, *args, **kwargs):
try:
return super(MediaNaranjaWizardFormBase, self).post(*args, **kwargs)
except MediaNaranjaException:
self.storage.reset()
self.storage.current_step = self.steps.first
return self.render(self.get_form())
def get_categories_form_kwargs(self, cleaned_data):
return {'categories': list(cleaned_data['categories'])}
def get_element_selector_from_cleaned_data(self, cleaned_data):
if 'element_selector' not in cleaned_data:
return Area.objects.get(slug=config.DEFAULT_AREA)
return cleaned_data['element_selector']
def get_proposals_form_kwargs(self, cleaned_data):
proposal_getter_kwargs = self.get_proposal_getter_kwargs()
getter = self.get_proposal_class()(**proposal_getter_kwargs)
element_selector = self.get_element_selector_from_cleaned_data(cleaned_data)
proposals = getter.get_all_proposals(element_selector)
return {'proposals': proposals, 'element_selector': element_selector}
def get_kwargs_from_step_number(self, number, cleaned_data):
func_name = self.steps_and_functions.get(number, None)
if func_name is None:
return {}
func = getattr(self, func_name, None)
return func(cleaned_data)
def get_form_kwargs(self, step):
step = int(step)
cleaned_data = {}
if step:
cleaned_data = self.get_cleaned_data_for_step(str(0))
if cleaned_data is None:
raise MediaNaranjaException()
return self.get_kwargs_from_step_number(step, cleaned_data)
class MediaNaranjaWizardForm(MediaNaranjaWizardFormBase):
form_list = [SetupForm, QuestionsForm, ProposalsForm]
steps_and_functions = {
1: 'get_categories_form_kwargs',
2: 'get_proposals_form_kwargs'
}
templates = {"0": "medianaranja2/paso_0_setup.html",
"1": "medianaranja2/paso_1_preguntas_y_respuestas.html",
"2": "medianaranja2/paso_2_proposals_list.html"}
class MediaNaranjaNoQuestionsWizardForm(MediaNaranjaWizardFormBase):
form_list = [SetupForm, ProposalsForm]
steps_and_functions = {
1: 'get_proposals_form_kwargs'
}
templates = {"0": "medianaranja2/paso_0_setup.html",
"1": "medianaranja2/paso_2_proposals_list.html"}
def get_form_kwargs(self, step):
kwargs = super(MediaNaranjaNoQuestionsWizardForm, self).get_form_kwargs(step)
if step == '0':
kwargs['should_use_categories'] = False
return kwargs
class MediaNaranjaOnlyProposals(MediaNaranjaWizardFormBase):
form_list = [ProposalsForm, ]
steps_and_functions = {
0: 'get_proposals_form_kwargs'
}
templates = {"0": "medianaranja2/paso_2_proposals_list.html"}
class MediaNaranjaResultONLYFORDEBUG(TemplateView):# pragma: no cover
template_name = 'medianaranja2/resultado.html'
def get_context_data(self, **kwargs):
context = super(MediaNaranjaResultONLYFORDEBUG, self).get_context_data(**kwargs)
from elections.models import Candidate, Election
from organization_profiles.models import OrganizationTemplate
templates = OrganizationTemplate.objects.all()[:3]
context['organizations'] = templates
e1 = Election.objects.exclude(candidates__isnull=True)[0]
context['results'] = [
{'election': e1,
'candidates': [{'value': 2.0, 'candidate': e1.candidates.all()[0]},
{'value': 1.0, 'candidate': e1.candidates.all()[1]},
{'value': 0.5, 'candidate': e1.candidates.all()[2]}]}]
return context
from medianaranja2.models import SharedResult
class ShareForm(ModelForm):
object_id = forms.CharField()
percentage = forms.FloatField(required=False)
class Meta:
model = SharedResult
fields = ['object_id', 'percentage']
def __init__(self, *args, **kwargs):
self.content_type = kwargs.pop('content_type')
super(ShareForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
instance = super(ShareForm, self).save(commit=False)
instance.content_type = self.content_type
instance.data = self.cleaned_data
if commit:
instance.save()
return instance
| gpl-3.0 | -2,809,643,480,602,538,500 | 40.936242 | 224 | 0.641114 | false |
sserrot/champion_relationships | venv/Lib/site-packages/ipywidgets/widgets/__init__.py | 1 | 1512 | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from .widget import Widget, CallbackDispatcher, register, widget_serialization
from .domwidget import DOMWidget
from .valuewidget import ValueWidget
from .trait_types import Color, Datetime, NumberFormat
from .widget_core import CoreWidget
from .widget_bool import Checkbox, ToggleButton, Valid
from .widget_button import Button, ButtonStyle
from .widget_box import Box, HBox, VBox, GridBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider, FloatLogSlider
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider, Play, SliderStyle
from .widget_color import ColorPicker
from .widget_date import DatePicker
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, ToggleButtonsStyle, Dropdown, Select, SelectionSlider, SelectMultiple, SelectionRangeSlider
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, HTMLMath, Label, Text, Textarea, Password, Combobox
from .widget_controller import Controller
from .interaction import interact, interactive, fixed, interact_manual, interactive_output
from .widget_link import jslink, jsdlink
from .widget_layout import Layout
from .widget_media import Image, Video, Audio
from .widget_style import Style
from .widget_templates import TwoByTwoLayout, AppLayout, GridspecLayout
from .widget_upload import FileUpload
| mit | -6,107,679,430,006,972,000 | 51.137931 | 150 | 0.829365 | false |
sreidy/roboticsclub.org | api/urls.py | 2 | 1047 | from rest_framework import routers, serializers, viewsets
from .views import *
from django.conf.urls import include, patterns, url
router = routers.DefaultRouter()
router.register(r'api_requests', APIRequestViewSet)
router.register(r'webcams', WebcamViewSet)
router.register(r'datetime', DateTimeViewSet, base_name="datetime")
router.register(r'users', RoboUserViewSet)
router.register(r'officers', OfficerViewSet)
router.register(r'projects', ProjectViewSet)
router.register(r'channels', ChannelViewSet, base_name="channels")
router.register(r'calendar', CalendarViewSet, base_name="calendar")
router.register(r'sponsors', SponsorViewSet)
router.register(r'social_medias', SocialMediaViewSet)
router.register(r'machines', MachineViewSet)
router.register(r'faq', CategoryViewSet)
router.register(r'tshirts', TShirtViewSet)
router.register(r'posters', PosterViewSet)
router.register(r'upcs', UPCItemViewSet, base_name="upcs")
urlpatterns = router.urls + [
url(r'^magnetic/$', MagneticView.as_view()),
url(r'^rfid/$', RFIDView.as_view()),
]
| mit | -4,484,347,834,260,364,000 | 37.777778 | 67 | 0.78319 | false |
PeRDy/django-audit-tools | audit_tools/tests/audit/views/api/test_mixins.py | 1 | 5328 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from collections import OrderedDict
from unittest import TestCase
from django.core.exceptions import ImproperlyConfigured
from mock import patch, MagicMock, call
from audit_tools.audit.views.api.mixins import AjaxFormMixin
class AjaxFormMixinTestCase(TestCase):
def setUp(self):
self.mixin = AjaxFormMixin()
@patch.object(AjaxFormMixin, 'initial_data')
def test_get_initial_data(self, initial_data_mock):
self.mixin.get_initial_data()
self.assertEqual(initial_data_mock.copy.call_count, 1)
def test_get_form_class(self):
form_class = MagicMock()
self.mixin.form_class['GET'] = form_class
request = MagicMock()
request.method = 'GET'
self.mixin.request = request
result = self.mixin.get_form_class()
self.assertEqual(result, form_class)
@patch.object(AjaxFormMixin, 'get_form_kwargs', return_value={})
def test_get_form(self, get_form_kwargs_mock):
form_class = MagicMock()
form = MagicMock()
form_class.return_value = form
result = self.mixin.get_form(form_class)
self.assertEqual(form_class.call_count, 1)
self.assertEqual(result, form)
def test_get_form_without_class(self):
result = self.mixin.get_form(None)
self.assertIsNone(result)
@patch.object(AjaxFormMixin, 'get_initial_data', return_value={})
def test_get_form_kwargs_post(self, get_initial_data_mock):
request = MagicMock()
request.method = 'POST'
request.POST = 'foo'
request.FILES = 'bar'
self.mixin.request = request
result = self.mixin.get_form_kwargs()
self.assertIn('initial', result)
self.assertIn('data', result)
self.assertIn('files', result)
@patch.object(AjaxFormMixin, 'get_initial_data', return_value={})
def test_get_form_kwargs_put(self, get_initial_data_mock):
request = MagicMock()
request.method = 'PUT'
request.POST = 'foo'
request.FILES = 'bar'
self.mixin.request = request
result = self.mixin.get_form_kwargs()
self.assertIn('initial', result)
self.assertIn('data', result)
self.assertIn('files', result)
@patch.object(AjaxFormMixin, 'get_initial_data', return_value={})
def test_get_form_kwargs_patch(self, get_initial_data_mock):
request = MagicMock()
request.method = 'PATCH'
request.POST = 'foo'
request.FILES = 'bar'
self.mixin.request = request
result = self.mixin.get_form_kwargs()
self.assertIn('initial', result)
self.assertIn('data', result)
self.assertIn('files', result)
@patch.object(AjaxFormMixin, 'get_initial_data', return_value={})
def test_get_form_kwargs_get(self, get_initial_data_mock):
request = MagicMock()
request.method = 'GET'
request.POST = 'foo'
request.FILES = 'bar'
self.mixin.request = request
result = self.mixin.get_form_kwargs()
self.assertIn('initial', result)
self.assertIn('data', result)
def test_get_context_data(self):
kwargs = {'foo': 1, 'bar': 2}
result = self.mixin.get_context_data(**kwargs)
self.assertDictEqual(kwargs, result)
def test_get_success_url(self):
url = 'http://www.foo.bar'
self.mixin.success_url = url
result = self.mixin.get_success_url()
self.assertEqual(url, result)
def test_get_success_url_fail(self):
self.mixin.success_url = None
self.assertRaises(ImproperlyConfigured, self.mixin.get_success_url)
@patch.object(AjaxFormMixin, 'error_response')
def test_form_invalid(self, error_response_mock):
form = MagicMock()
errors = OrderedDict()
errors['foo'] = 'foobar'
errors['bar'] = 'barfoo'
form.errors = errors
self.mixin.form_invalid(form)
expected_call = call('Error in fields: foo, bar')
self.assertEqual(error_response_mock.call_count, 1)
self.assertEqual(error_response_mock.call_args, expected_call)
@patch('audit_tools.audit.views.api.mixins.json')
@patch.object(AjaxFormMixin, 'response_class')
def test_error_response(self, response_class_mock, json_mock):
msg = 'foo'
kwargs = {'foo': 'bar'}
expected_context = '{"status": 400, "general_message": "foo", "success": false}'
json_mock.dumps.return_value = expected_context
expected_kwargs = {'foo': 'bar', 'content_type': 'application/json', 'status': 400}
self.mixin.error_response(msg, **kwargs)
context_result = response_class_mock.call_args[0][0]
kwargs_result = response_class_mock.call_args[1]
self.assertEqual(expected_context, context_result)
self.assertDictEqual(expected_kwargs, kwargs_result)
def test_order_query(self):
self.mixin.order_by = 'id'
queryset = MagicMock()
queryset.order_by.return_value = queryset
self.mixin.queryset = queryset
self.mixin.order_query()
self.assertEqual(self.mixin.queryset, queryset)
self.assertEqual(queryset.order_by.call_count, 1)
def tearDown(self):
pass
| gpl-2.0 | 8,098,287,858,996,121,000 | 30.904192 | 91 | 0.632695 | false |
bastibl/gnuradio | gr-digital/python/digital/qa_ofdm_frame_equalizer_vcvc.py | 1 | 16372 | #!/usr/bin/env python
# Copyright 2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from __future__ import division
import numpy
from gnuradio import gr, gr_unittest, digital, blocks
import pmt
class qa_ofdm_frame_equalizer_vcvc (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
self.tsb_key = "tsb_key"
def tearDown (self):
self.tb = None
def test_001_simple (self):
""" Very simple functionality testing:
- static equalizer
- init channel state with all ones
- transmit all ones
- make sure we rx all ones
- Tag check: put in frame length tag and one other random tag,
make sure they're propagated
"""
fft_len = 8
equalizer = digital.ofdm_equalizer_static(fft_len)
n_syms = 3
tx_data = (1,) * fft_len * n_syms
chan_tag = gr.tag_t()
chan_tag.offset = 0
chan_tag.key = "ofdm_sync_chan_taps"
chan_tag.value = pmt.init_c32vector(fft_len, (1,) * fft_len)
random_tag = gr.tag_t()
random_tag.offset = 1
random_tag.key = "foo"
random_tag.value = pmt.from_long(42)
src = blocks.vector_source_c(tx_data, False, fft_len, (chan_tag, random_tag))
eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, self.tsb_key)
sink = blocks.tsb_vector_sink_c(fft_len, tsb_key=self.tsb_key)
self.tb.connect(
src,
blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, fft_len, n_syms, self.tsb_key),
eq,
sink
)
self.tb.run ()
# Check data
self.assertEqual(tx_data, sink.data()[0])
# Check tags
tag_dict = dict()
for tag in sink.tags():
ptag = gr.tag_to_python(tag)
tag_dict[ptag.key] = ptag.value
expected_dict = {
'foo': 42
}
self.assertEqual(tag_dict, expected_dict)
def test_001b_simple_skip_nothing (self):
"""
Same as before, but put a skip-header in there
"""
fft_len = 8
equalizer = digital.ofdm_equalizer_static(fft_len, symbols_skipped=1)
n_syms = 3
tx_data = (1,) * fft_len * n_syms
chan_tag = gr.tag_t()
chan_tag.offset = 0
chan_tag.key = "ofdm_sync_chan_taps"
chan_tag.value = pmt.init_c32vector(fft_len, (1,) * fft_len)
src = blocks.vector_source_c(tx_data, False, fft_len, (chan_tag,))
eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, self.tsb_key)
sink = blocks.tsb_vector_sink_c(fft_len, tsb_key=self.tsb_key)
self.tb.connect(
src,
blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, fft_len, n_syms, self.tsb_key),
eq,
sink
)
self.tb.run ()
# Check data
self.assertEqual(tx_data, sink.data()[0])
def test_001c_carrier_offset_no_cp (self):
"""
Same as before, but put a carrier offset in there
"""
fft_len = 8
cp_len = 0
n_syms = 1
carr_offset = 1
occupied_carriers = ((-2, -1, 1, 2),)
tx_data = (
0, 0, 0, -1j, -1j, 0, -1j, -1j,
)
# The rx'd signal is shifted
rx_expected = (0, 0, 1, 1, 0, 1, 1, 0) * n_syms
equalizer = digital.ofdm_equalizer_static(fft_len, occupied_carriers)
chan_tag = gr.tag_t()
chan_tag.offset = 0
chan_tag.key = "ofdm_sync_chan_taps"
# Note: this is shifted to the correct position!
chan_tag.value = pmt.init_c32vector(fft_len, (0, 0, -1j, -1j, 0, -1j, -1j, 0))
offset_tag = gr.tag_t()
offset_tag.offset = 0
offset_tag.key = "ofdm_sync_carr_offset"
offset_tag.value = pmt.from_long(carr_offset)
src = blocks.vector_source_c(tx_data, False, fft_len, (chan_tag, offset_tag))
eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), cp_len, self.tsb_key)
sink = blocks.tsb_vector_sink_c(fft_len, tsb_key=self.tsb_key)
self.tb.connect(
src,
blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, fft_len, n_syms, self.tsb_key),
eq,
sink
)
self.tb.run ()
# Check data
self.assertComplexTuplesAlmostEqual(rx_expected, sink.data()[0], places=4)
def test_001c_carrier_offset_cp (self):
"""
Same as before, but put a carrier offset in there and a CP
"""
fft_len = 8
cp_len = 2
n_syms = 3
# cp_len/fft_len == 1/4, therefore, the phase is rotated by
# carr_offset * \pi/2 in every symbol
occupied_carriers = ((-2, -1, 1, 2),)
carr_offset = -1
tx_data = (
0,-1j,-1j, 0,-1j,-1j, 0, 0,
0, -1, -1, 0, -1, -1, 0, 0,
0, 1j, 1j, 0, 1j, 1j, 0, 0,
)
# Rx'd signal is corrected
rx_expected = (0, 0, 1, 1, 0, 1, 1, 0) * n_syms
equalizer = digital.ofdm_equalizer_static(fft_len, occupied_carriers)
chan_tag = gr.tag_t()
chan_tag.offset = 0
chan_tag.key = "ofdm_sync_chan_taps"
chan_tag.value = pmt.init_c32vector(fft_len, (0, 0, 1, 1, 0, 1, 1, 0))
offset_tag = gr.tag_t()
offset_tag.offset = 0
offset_tag.key = "ofdm_sync_carr_offset"
offset_tag.value = pmt.from_long(carr_offset)
src = blocks.vector_source_c(tx_data, False, fft_len, (chan_tag, offset_tag))
eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), cp_len, self.tsb_key)
sink = blocks.tsb_vector_sink_c(fft_len, tsb_key=self.tsb_key)
self.tb.connect(
src,
blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, fft_len, n_syms, self.tsb_key),
eq,
sink
)
self.tb.run ()
# Check data
self.assertComplexTuplesAlmostEqual(rx_expected, sink.data()[0], places=4)
def test_002_static (self):
"""
- Add a simple channel
- Make symbols QPSK
"""
fft_len = 8
# 4 5 6 7 0 1 2 3
tx_data = [-1, -1, 1, 2, -1, 3, 0, -1, # 0
-1, -1, 0, 2, -1, 2, 0, -1, # 8
-1, -1, 3, 0, -1, 1, 0, -1, # 16 (Pilot symbols)
-1, -1, 1, 1, -1, 0, 2, -1] # 24
cnst = digital.constellation_qpsk()
tx_signal = [cnst.map_to_points_v(x)[0] if x != -1 else 0 for x in tx_data]
occupied_carriers = ((1, 2, 6, 7),)
pilot_carriers = ((), (), (1, 2, 6, 7), ())
pilot_symbols = (
[], [], [cnst.map_to_points_v(x)[0] for x in (1, 0, 3, 0)], []
)
equalizer = digital.ofdm_equalizer_static(fft_len, occupied_carriers, pilot_carriers, pilot_symbols)
channel = [
0, 0, 1, 1, 0, 1, 1, 0,
0, 0, 1, 1, 0, 1, 1, 0, # These coefficients will be rotated slightly (but less than \pi/2)
0, 0, 1j, 1j, 0, 1j, 1j, 0, # Go crazy here!
0, 0, 1j, 1j, 0, 1j, 1j, 0
]
channel = [
0, 0, 1, 1, 0, 1, 1, 0,
0, 0, 1, 1, 0, 1, 1, 0, # These coefficients will be rotated slightly (but less than \pi/2)
0, 0, 1j, 1j, 0, 1j, 1j, 0, # Go crazy here!
0, 0, 1j, 1j, 0, 1j, 1j, 0
]
for idx in range(fft_len, 2*fft_len):
channel[idx] = channel[idx-fft_len] * numpy.exp(1j * .1 * numpy.pi * (numpy.random.rand()-.5))
chan_tag = gr.tag_t()
chan_tag.offset = 0
chan_tag.key = "ofdm_sync_chan_taps"
chan_tag.value = pmt.init_c32vector(fft_len, channel[:fft_len])
src = blocks.vector_source_c(numpy.multiply(tx_signal, channel), False, fft_len, (chan_tag,))
sink = blocks.tsb_vector_sink_c(vlen=fft_len, tsb_key=self.tsb_key)
eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, self.tsb_key, True)
self.tb.connect(
src,
blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, fft_len, len(tx_data) // fft_len, self.tsb_key),
eq,
sink
)
self.tb.run ()
rx_data = [cnst.decision_maker_v((x,)) if x != 0 else -1 for x in sink.data()[0]]
# Check data
self.assertEqual(tx_data, rx_data)
# Check tags
tag_dict = dict()
for tag in sink.tags():
ptag = gr.tag_to_python(tag)
tag_dict[ptag.key] = ptag.value
if ptag.key == 'ofdm_sync_chan_taps':
tag_dict[ptag.key] = list(pmt.c32vector_elements(tag.value))
else:
tag_dict[ptag.key] = pmt.to_python(tag.value)
expected_dict = {
'ofdm_sync_chan_taps': channel[-fft_len:]
}
self.assertEqual(tag_dict, expected_dict)
def test_002_static_wo_tags (self):
""" Same as before, but the input stream has no tag.
We specify the frame size in the constructor.
We also specify a tag key, so the output stream *should* have
a TSB tag.
"""
fft_len = 8
n_syms = 4
# 4 5 6 7 0 1 2 3
tx_data = [-1, -1, 1, 2, -1, 3, 0, -1, # 0
-1, -1, 0, 2, -1, 2, 0, -1, # 8
-1, -1, 3, 0, -1, 1, 0, -1, # 16 (Pilot symbols)
-1, -1, 1, 1, -1, 0, 2, -1] # 24
cnst = digital.constellation_qpsk()
tx_signal = [cnst.map_to_points_v(x)[0] if x != -1 else 0 for x in tx_data]
occupied_carriers = ((1, 2, 6, 7),)
pilot_carriers = ((), (), (1, 2, 6, 7), ())
pilot_symbols = (
[], [], [cnst.map_to_points_v(x)[0] for x in (1, 0, 3, 0)], []
)
equalizer = digital.ofdm_equalizer_static(fft_len, occupied_carriers, pilot_carriers, pilot_symbols)
channel = [
0, 0, 1, 1, 0, 1, 1, 0,
0, 0, 1, 1, 0, 1, 1, 0, # These coefficients will be rotated slightly (below)...
0, 0, 1j, 1j, 0, 1j, 1j, 0, # Go crazy here!
0, 0, 1j, 1j, 0, 1j, 1j, 0 # ...and again here.
]
for idx in range(fft_len, 2*fft_len):
channel[idx] = channel[idx-fft_len] * numpy.exp(1j * .1 * numpy.pi * (numpy.random.rand()-.5))
idx2 = idx+2*fft_len
channel[idx2] = channel[idx2] * numpy.exp(1j * 0 * numpy.pi * (numpy.random.rand()-.5))
src = blocks.vector_source_c(numpy.multiply(tx_signal, channel), False, fft_len)
eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, self.tsb_key, False, n_syms)
sink = blocks.tsb_vector_sink_c(vlen=fft_len, tsb_key=self.tsb_key)
self.tb.connect(
src,
blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, fft_len, len(tx_data) // fft_len, self.tsb_key),
eq,
sink
)
self.tb.run ()
rx_data = [cnst.decision_maker_v((x,)) if x != 0 else -1 for x in sink.data()[0]]
self.assertEqual(tx_data, rx_data)
# Check TSB Functionality
packets = sink.data()
self.assertEqual(len(packets), 1)
self.assertEqual(len(packets[0]), len(tx_data))
def test_002_static_wo_tags (self):
fft_len = 8
# 4 5 6 7 0 1 2 3
tx_data = [-1, -1, 1, 2, -1, 3, 0, -1, # 0
-1, -1, 0, 2, -1, 2, 0, -1, # 8
-1, -1, 3, 0, -1, 1, 0, -1, # 16 (Pilot symbols)
-1, -1, 1, 1, -1, 0, 2, -1] # 24
cnst = digital.constellation_qpsk()
tx_signal = [cnst.map_to_points_v(x)[0] if x != -1 else 0 for x in tx_data]
occupied_carriers = ((1, 2, 6, 7),)
pilot_carriers = ((), (), (1, 2, 6, 7), ())
pilot_symbols = (
[], [], [cnst.map_to_points_v(x)[0] for x in (1, 0, 3, 0)], []
)
equalizer = digital.ofdm_equalizer_static(fft_len, occupied_carriers, pilot_carriers, pilot_symbols)
channel = [
0, 0, 1, 1, 0, 1, 1, 0,
0, 0, 1, 1, 0, 1, 1, 0, # These coefficients will be rotated slightly...
0, 0, 1j, 1j, 0, 1j, 1j, 0, # Go crazy here!
0, 0, 1j, 1j, 0, 1j, 1j, 0 # ...and again here.
]
for idx in range(fft_len, 2*fft_len):
channel[idx] = channel[idx-fft_len] * numpy.exp(1j * .1 * numpy.pi * (numpy.random.rand()-.5))
idx2 = idx+2*fft_len
channel[idx2] = channel[idx2] * numpy.exp(1j * 0 * numpy.pi * (numpy.random.rand()-.5))
src = blocks.vector_source_c(numpy.multiply(tx_signal, channel), False, fft_len)
sink = blocks.vector_sink_c(fft_len)
eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, "", False, 4)
self.tb.connect(src, eq, sink)
self.tb.run ()
rx_data = [cnst.decision_maker_v((x,)) if x != 0 else -1 for x in sink.data()]
self.assertEqual(tx_data, rx_data)
def test_002_simpledfe (self):
""" Use the simple DFE equalizer. """
fft_len = 8
# 4 5 6 7 0 1 2 3
tx_data = [-1, -1, 1, 2, -1, 3, 0, -1, # 0
-1, -1, 0, 2, -1, 2, 0, -1, # 8
-1, -1, 3, 0, -1, 1, 0, -1, # 16 (Pilot symbols)
-1, -1, 1, 1, -1, 0, 2, -1] # 24
cnst = digital.constellation_qpsk()
tx_signal = [cnst.map_to_points_v(x)[0] if x != -1 else 0 for x in tx_data]
occupied_carriers = ((1, 2, 6, 7),)
pilot_carriers = ((), (), (1, 2, 6, 7), ())
pilot_symbols = (
[], [], [cnst.map_to_points_v(x)[0] for x in (1, 0, 3, 0)], []
)
equalizer = digital.ofdm_equalizer_simpledfe(
fft_len, cnst.base(), occupied_carriers, pilot_carriers, pilot_symbols, 0, 0.01
)
channel = [
0, 0, 1, 1, 0, 1, 1, 0,
0, 0, 1, 1, 0, 1, 1, 0, # These coefficients will be rotated slightly...
0, 0, 1j, 1j, 0, 1j, 1j, 0, # Go crazy here!
0, 0, 1j, 1j, 0, 1j, 1j, 0 # ...and again here.
]
for idx in range(fft_len, 2*fft_len):
channel[idx] = channel[idx-fft_len] * numpy.exp(1j * .1 * numpy.pi * (numpy.random.rand()-.5))
idx2 = idx+2*fft_len
channel[idx2] = channel[idx2] * numpy.exp(1j * 0 * numpy.pi * (numpy.random.rand()-.5))
chan_tag = gr.tag_t()
chan_tag.offset = 0
chan_tag.key = "ofdm_sync_chan_taps"
chan_tag.value = pmt.init_c32vector(fft_len, channel[:fft_len])
src = blocks.vector_source_c(numpy.multiply(tx_signal, channel), False, fft_len, (chan_tag,))
eq = digital.ofdm_frame_equalizer_vcvc(equalizer.base(), 0, self.tsb_key, True)
sink = blocks.tsb_vector_sink_c(fft_len, tsb_key=self.tsb_key)
self.tb.connect(
src,
blocks.stream_to_tagged_stream(gr.sizeof_gr_complex, fft_len, len(tx_data) // fft_len, self.tsb_key),
eq,
sink
)
self.tb.run ()
rx_data = [cnst.decision_maker_v((x,)) if x != 0 else -1 for x in sink.data()[0]]
self.assertEqual(tx_data, rx_data)
self.assertEqual(len(sink.tags()), 1)
tag = sink.tags()[0]
self.assertEqual(tag.key, "ofdm_sync_chan_taps")
self.assertComplexTuplesAlmostEqual(list(pmt.c32vector_elements(tag.value)), channel[-fft_len:], places=1)
if __name__ == '__main__':
gr_unittest.run(qa_ofdm_frame_equalizer_vcvc, "qa_ofdm_frame_equalizer_vcvc.xml")
| gpl-3.0 | 3,725,604,465,256,258,600 | 42.084211 | 117 | 0.524127 | false |
google/feedloader | appengine/uploader/shoptimizer_client.py | 1 | 9956 | # coding=utf-8
# Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client to send product data to the Shoptimizer (optimization) API and parse the results."""
import json
import logging
from typing import Any, Dict
import requests
import constants
_CONFIG_FILE_PATH = 'config/shoptimizer_config.json'
_ERROR_MSG_TEMPLATE = ('Request for batch #%d with operation %s encountered an '
'error: %s. Error: %s')
_METADATA_SERVER_TOKEN_URL = 'http://metadata/computeMetadata/v1/instance/service-accounts/default/identity?audience='
class ShoptimizerClient(object):
"""Client to send product data to the Shoptimizer (optimization) API and parse the results."""
def __init__(self, batch_number: int, operation: constants.Operation):
"""Inits ShoptimizerClient.
Args:
batch_number: The number that identifies this batch.
operation: The operation to be performed on this batch (upsert, delete,
prevent_expiring).
"""
self._batch_number = batch_number
self._operation = operation
self._optimization_params = _load_optimization_params(
self._batch_number, self._operation)
self._config_params = _load_config_params()
def shoptimize(self, batch: constants.Batch) -> constants.Batch:
"""Optimizes a batch of product data by sending it to the Shoptimizer (optimization) API.
Args:
batch: The batch of product data to be optimized.
Returns:
The optimized batch of product data if no errors encountered,
or the original batch of product data otherwise.
"""
if not self._is_input_valid(batch):
return batch
try:
response_dict = self._send_to_shoptimizer(batch)
except (TypeError, requests.exceptions.RequestException, ValueError):
return batch
# Checks for some top-level failure in response
# (received response in correct format without exceptions,
# but something went wrong in Shoptimizer)
if response_dict.get('error-msg', ''):
logging.error(_ERROR_MSG_TEMPLATE, self._batch_number,
self._operation.value,
'Encountered an error in the Shoptimizer API response',
response_dict['error-msg'])
return batch
self._log_results(response_dict)
return response_dict.get('optimized-data', batch)
def _is_input_valid(self, batch: constants.Batch) -> bool:
"""Checks input parameters are valid.
Args:
batch: The batch of product data to be optimized.
Returns:
True if the input is valid, False otherwise.
"""
if not constants.SHOPTIMIZER_BASE_URL:
logging.warning(
_ERROR_MSG_TEMPLATE, self._batch_number, self._operation.value,
'Shoptimizer API URL is not set. '
'Check the SHOPTIMIZER_URL environment variable is correctly set', '')
return False
if not batch:
logging.warning(_ERROR_MSG_TEMPLATE, self._batch_number,
self._operation.value,
'Batch was empty. Shoptimizer API not called', '')
return False
if not self._optimization_params:
logging.info(
_ERROR_MSG_TEMPLATE, self._batch_number, self._operation.value,
'Optimization parameters were empty. Shoptimizer API not called')
return False
if 'true' not in [
val.lower() for val in self._optimization_params.values()
]:
logging.info(
_ERROR_MSG_TEMPLATE, self._batch_number, self._operation.value,
'no true optimization parameter. Shoptimizer API not called.', '')
return False
return True
def _send_to_shoptimizer(self, batch) -> Dict[str, Any]:
"""Logs errors returned by individual Shoptimizer API optimizers.
Args:
batch: The batch of product data to be optimized.
Returns:
A dictionary containing the results of the Shoptimizer API call.
"""
try:
batch_as_json = json.dumps(batch)
except TypeError as type_error:
logging.exception(
_ERROR_MSG_TEMPLATE, self._batch_number, self._operation.value,
'Failed to convert batch to JSON. Shoptimizer API not called',
type_error)
raise
try:
jwt = self._get_jwt()
except requests.exceptions.RequestException:
raise
try:
headers = {
'Authorization': f'bearer {jwt}',
'Content-Type': 'application/json'
}
request_params = {}
request_params.update(self._optimization_params)
request_params.update(self._config_params)
response = requests.request(
'POST',
constants.SHOPTIMIZER_ENDPOINT,
data=batch_as_json,
headers=headers,
params=request_params)
response.raise_for_status()
response_dict = json.loads(response.text)
except requests.exceptions.RequestException as request_exception:
logging.exception(
_ERROR_MSG_TEMPLATE, self._batch_number, self._operation.value,
'Did not receive a successful response from the Shoptimizer API',
request_exception)
raise
except ValueError as value_error:
logging.exception(
_ERROR_MSG_TEMPLATE, self._batch_number, self._operation.value,
'Failed to deserialize JSON returned from Shoptimizer API',
value_error)
raise
return response_dict
def _get_jwt(self) -> str:
"""Retrieves a JSON web token from the Google metadata server for Cloud Run authentication.
Returns:
A JSON web token that can be used for Cloud Run authentication.
"""
try:
token_request_url = _METADATA_SERVER_TOKEN_URL + constants.SHOPTIMIZER_BASE_URL
token_request_headers = {'Metadata-Flavor': 'Google'}
# Fetches the token
response = requests.get(token_request_url, headers=token_request_headers)
response.raise_for_status()
jwt = response.content.decode('utf-8')
except requests.exceptions.RequestException as request_exception:
logging.exception(
_ERROR_MSG_TEMPLATE, self._batch_number, self._operation.value,
'Failed get an authentication JWT. Shoptimizer API not called',
request_exception)
raise
return jwt
def _log_results(self, response_dict: Dict[str, Any]) -> None:
"""Logs the results of the call to the Shoptimizer API.
Args:
response_dict: The results of the call to the Shoptimizer API.
"""
optimization_results = response_dict.get('optimization-results', '')
plugin_results = response_dict.get('plugin-results', '')
self._log_optimizer_error_msgs(optimization_results)
self._log_optimizer_error_msgs(plugin_results)
logging.info(
'Shoptimizer API finished running for batch #%d with operation %s. '
'Optimizer Results: %s | Plugin Results: %s', self._batch_number,
self._operation.value, optimization_results, plugin_results)
def _log_optimizer_error_msgs(
self, shoptimizer_results: Dict[str, Dict[str, Any]]) -> None:
"""Logs errors returned by individual Shoptimizer API optimizers.
Args:
shoptimizer_results: The results of each individual optimizer returned
from the Shoptimizer API.
"""
if not shoptimizer_results:
return
for optimizer_name, optimizer_results in shoptimizer_results.items():
if optimizer_results.get('result', '') == 'failure':
logging.error(
'Request for batch #%d with operation %s encountered an error when '
'running optimizer %s. Error: %s', self._batch_number,
self._operation.value, optimizer_name,
optimizer_results.get('error_msg', '(error_msg missing)'))
def _load_optimization_params(batch_number: int,
operation: constants.Operation) -> Dict[str, str]:
"""Loads optimization parameters for the Shoptimizer API.
Args:
batch_number: The number that identifies this batch.
operation: The operation to be performed on this batch (upsert, delete,
prevent_expiring).
Returns:
The optimization parameters for the Shoptimizer API.
"""
try:
with open(_CONFIG_FILE_PATH) as shoptimizer_config:
optimization_params = json.loads(shoptimizer_config.read())
except OSError as os_error:
logging.exception(
_ERROR_MSG_TEMPLATE, batch_number, operation.value,
'Failed to read the shoptimizer config. '
'Check config/shoptimizer_config.json exists and has read permissions. '
'Shoptimizer API not called', os_error)
raise
except ValueError as value_error:
logging.exception(
_ERROR_MSG_TEMPLATE, batch_number, operation.value,
'Failed to read the shoptimizer config. '
'Check config/shoptimizer_config.json is valid JSON. '
'Shoptimizer API not called', value_error)
raise
return optimization_params
def _load_config_params() -> Dict[str, str]:
"""Loads configuration parameters for the Shoptimizer API.
The configuration parameters include attributes listed below:
- lang: The code of the language.
- country: The code of the country.
- currency: The code of the currency.
Returns:
The configuration parameters for the Shoptimizer API.
"""
config_params = {
'lang': constants.CONTENT_LANGUAGE,
'country': constants.TARGET_COUNTRY,
'currency': constants.TARGET_CURRENCY,
}
return config_params
| apache-2.0 | -8,745,425,183,561,239,000 | 34.304965 | 118 | 0.67045 | false |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/pyshared/pyatspi/state.py | 1 | 6076 | #Copyright (C) 2008 Codethink Ltd
#copyright: Copyright (c) 2005, 2007 IBM Corporation
#This library is free software; you can redistribute it and/or
#modify it under the terms of the GNU Lesser General Public
#License version 2 as published by the Free Software Foundation.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU Lesser General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#Portions of this code originally licensed and copyright (c) 2005, 2007
#IBM Corporation under the BSD license, available at
#U{http://www.opensource.org/licenses/bsd-license.php}
#authors: Peter Parente, Mark Doffman
from gi.repository import Atspi
from gi.repository import GObject
from enum import Enum as _Enum
#------------------------------------------------------------------------------
class StateType(_Enum):
_enum_lookup = {
0:'STATE_INVALID',
1:'STATE_ACTIVE',
2:'STATE_ARMED',
3:'STATE_BUSY',
4:'STATE_CHECKED',
5:'STATE_COLLAPSED',
6:'STATE_DEFUNCT',
7:'STATE_EDITABLE',
8:'STATE_ENABLED',
9:'STATE_EXPANDABLE',
10:'STATE_EXPANDED',
11:'STATE_FOCUSABLE',
12:'STATE_FOCUSED',
13:'STATE_HAS_TOOLTIP',
14:'STATE_HORIZONTAL',
15:'STATE_ICONIFIED',
16:'STATE_MODAL',
17:'STATE_MULTI_LINE',
18:'STATE_MULTISELECTABLE',
19:'STATE_OPAQUE',
20:'STATE_PRESSED',
21:'STATE_RESIZABLE',
22:'STATE_SELECTABLE',
23:'STATE_SELECTED',
24:'STATE_SENSITIVE',
25:'STATE_SHOWING',
26:'STATE_SINGLE_LINE',
27:'STATE_STALE',
28:'STATE_TRANSIENT',
29:'STATE_VERTICAL',
30:'STATE_VISIBLE',
31:'STATE_MANAGES_DESCENDANTS',
32:'STATE_INDETERMINATE',
33:'STATE_REQUIRED',
34:'STATE_TRUNCATED',
35:'STATE_ANIMATED',
36:'STATE_INVALID_ENTRY',
37:'STATE_SUPPORTS_AUTOCOMPLETION',
38:'STATE_SELECTABLE_TEXT',
39:'STATE_IS_DEFAULT',
40:'STATE_VISITED',
41:'STATE_LAST_DEFINED',
}
#------------------------------------------------------------------------------
STATE_ACTIVE = StateType(1)
STATE_ANIMATED = StateType(35)
STATE_ARMED = StateType(2)
STATE_BUSY = StateType(3)
STATE_CHECKED = StateType(4)
STATE_COLLAPSED = StateType(5)
STATE_DEFUNCT = StateType(6)
STATE_EDITABLE = StateType(7)
STATE_ENABLED = StateType(8)
STATE_EXPANDABLE = StateType(9)
STATE_EXPANDED = StateType(10)
STATE_FOCUSABLE = StateType(11)
STATE_FOCUSED = StateType(12)
STATE_HAS_TOOLTIP = StateType(13)
STATE_HORIZONTAL = StateType(14)
STATE_ICONIFIED = StateType(15)
STATE_INDETERMINATE = StateType(32)
STATE_INVALID = StateType(0)
STATE_INVALID_ENTRY = StateType(36)
STATE_IS_DEFAULT = StateType(39)
STATE_LAST_DEFINED = StateType(41)
STATE_MANAGES_DESCENDANTS = StateType(31)
STATE_MODAL = StateType(16)
STATE_MULTISELECTABLE = StateType(18)
STATE_MULTI_LINE = StateType(17)
STATE_OPAQUE = StateType(19)
STATE_PRESSED = StateType(20)
STATE_REQUIRED = StateType(33)
STATE_RESIZABLE = StateType(21)
STATE_SELECTABLE = StateType(22)
STATE_SELECTABLE_TEXT = StateType(38)
STATE_SELECTED = StateType(23)
STATE_SENSITIVE = StateType(24)
STATE_SHOWING = StateType(25)
STATE_SINGLE_LINE = StateType(26)
STATE_STALE = StateType(27)
STATE_SUPPORTS_AUTOCOMPLETION = StateType(37)
STATE_TRANSIENT = StateType(28)
STATE_TRUNCATED = StateType(34)
STATE_VERTICAL = StateType(29)
STATE_VISIBLE = StateType(30)
STATE_VISITED = StateType(40)
#------------------------------------------------------------------------------
# Build a dictionary mapping state values to names based on the prefix of the enum constants.
STATE_VALUE_TO_NAME = dict(((value, name[6:].lower().replace('_', ' '))
for name, value
in globals().items()
if name.startswith('STATE_')))
#------------------------------------------------------------------------------
def _marshal_state_set(bitfield):
"""
The D-Bus protocol has a stateset object passed
as a 64bit bitfield. The Bits are passed as two 32bit
integers.
This function marshals the D-Bus message into a
StateSet object that corresponds to these states.
"""
(lower, upper) = bitfield
states = []
pos = 0
while (lower):
if (1L)&lower:
states.append(StateType(pos))
pos+=1
lower >>= 1
pos = 32
while (upper):
if (1L)&upper:
states.append(StateType(pos))
pos+=1
upper >>= 1
return StateSet(*states)
#------------------------------------------------------------------------------
def stateset_init(self, *states):
GObject.GObject.__init__(self)
map(self.add, states)
# TODO: Probably remove this hack for 2.2, since BGO#646581 is fixed
def StateSet_getStates(self):
ret = []
for i in range(0, 64):
if (self.states & (1 << i)):
ret.append(Atspi.StateType(i))
return ret
StateSet = Atspi.StateSet
StateSet.getStates = StateSet_getStates
StateSet.isEmpty = StateSet.is_empty
StateSet.raw = lambda x: x
StateSet.unref = lambda x: None
StateSet.__init__ = stateset_init
| gpl-3.0 | -7,356,553,817,032,895,000 | 33.134831 | 93 | 0.563858 | false |
Cadene/pretrained-models.pytorch | pretrainedmodels/models/inceptionv4.py | 1 | 11479 | from __future__ import print_function, division, absolute_import
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
import os
import sys
__all__ = ['InceptionV4', 'inceptionv4']
pretrained_settings = {
'inceptionv4': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/inceptionv4-8e4777a0.pth',
'input_space': 'RGB',
'input_size': [3, 299, 299],
'input_range': [0, 1],
'mean': [0.5, 0.5, 0.5],
'std': [0.5, 0.5, 0.5],
'num_classes': 1000
},
'imagenet+background': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/inceptionv4-8e4777a0.pth',
'input_space': 'RGB',
'input_size': [3, 299, 299],
'input_range': [0, 1],
'mean': [0.5, 0.5, 0.5],
'std': [0.5, 0.5, 0.5],
'num_classes': 1001
}
}
}
class BasicConv2d(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size, stride, padding=0):
super(BasicConv2d, self).__init__()
self.conv = nn.Conv2d(in_planes, out_planes,
kernel_size=kernel_size, stride=stride,
padding=padding, bias=False) # verify bias false
self.bn = nn.BatchNorm2d(out_planes,
eps=0.001, # value found in tensorflow
momentum=0.1, # default pytorch value
affine=True)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.relu(x)
return x
class Mixed_3a(nn.Module):
def __init__(self):
super(Mixed_3a, self).__init__()
self.maxpool = nn.MaxPool2d(3, stride=2)
self.conv = BasicConv2d(64, 96, kernel_size=3, stride=2)
def forward(self, x):
x0 = self.maxpool(x)
x1 = self.conv(x)
out = torch.cat((x0, x1), 1)
return out
class Mixed_4a(nn.Module):
def __init__(self):
super(Mixed_4a, self).__init__()
self.branch0 = nn.Sequential(
BasicConv2d(160, 64, kernel_size=1, stride=1),
BasicConv2d(64, 96, kernel_size=3, stride=1)
)
self.branch1 = nn.Sequential(
BasicConv2d(160, 64, kernel_size=1, stride=1),
BasicConv2d(64, 64, kernel_size=(1,7), stride=1, padding=(0,3)),
BasicConv2d(64, 64, kernel_size=(7,1), stride=1, padding=(3,0)),
BasicConv2d(64, 96, kernel_size=(3,3), stride=1)
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
out = torch.cat((x0, x1), 1)
return out
class Mixed_5a(nn.Module):
def __init__(self):
super(Mixed_5a, self).__init__()
self.conv = BasicConv2d(192, 192, kernel_size=3, stride=2)
self.maxpool = nn.MaxPool2d(3, stride=2)
def forward(self, x):
x0 = self.conv(x)
x1 = self.maxpool(x)
out = torch.cat((x0, x1), 1)
return out
class Inception_A(nn.Module):
def __init__(self):
super(Inception_A, self).__init__()
self.branch0 = BasicConv2d(384, 96, kernel_size=1, stride=1)
self.branch1 = nn.Sequential(
BasicConv2d(384, 64, kernel_size=1, stride=1),
BasicConv2d(64, 96, kernel_size=3, stride=1, padding=1)
)
self.branch2 = nn.Sequential(
BasicConv2d(384, 64, kernel_size=1, stride=1),
BasicConv2d(64, 96, kernel_size=3, stride=1, padding=1),
BasicConv2d(96, 96, kernel_size=3, stride=1, padding=1)
)
self.branch3 = nn.Sequential(
nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),
BasicConv2d(384, 96, kernel_size=1, stride=1)
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Reduction_A(nn.Module):
def __init__(self):
super(Reduction_A, self).__init__()
self.branch0 = BasicConv2d(384, 384, kernel_size=3, stride=2)
self.branch1 = nn.Sequential(
BasicConv2d(384, 192, kernel_size=1, stride=1),
BasicConv2d(192, 224, kernel_size=3, stride=1, padding=1),
BasicConv2d(224, 256, kernel_size=3, stride=2)
)
self.branch2 = nn.MaxPool2d(3, stride=2)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
out = torch.cat((x0, x1, x2), 1)
return out
class Inception_B(nn.Module):
def __init__(self):
super(Inception_B, self).__init__()
self.branch0 = BasicConv2d(1024, 384, kernel_size=1, stride=1)
self.branch1 = nn.Sequential(
BasicConv2d(1024, 192, kernel_size=1, stride=1),
BasicConv2d(192, 224, kernel_size=(1,7), stride=1, padding=(0,3)),
BasicConv2d(224, 256, kernel_size=(7,1), stride=1, padding=(3,0))
)
self.branch2 = nn.Sequential(
BasicConv2d(1024, 192, kernel_size=1, stride=1),
BasicConv2d(192, 192, kernel_size=(7,1), stride=1, padding=(3,0)),
BasicConv2d(192, 224, kernel_size=(1,7), stride=1, padding=(0,3)),
BasicConv2d(224, 224, kernel_size=(7,1), stride=1, padding=(3,0)),
BasicConv2d(224, 256, kernel_size=(1,7), stride=1, padding=(0,3))
)
self.branch3 = nn.Sequential(
nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),
BasicConv2d(1024, 128, kernel_size=1, stride=1)
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Reduction_B(nn.Module):
def __init__(self):
super(Reduction_B, self).__init__()
self.branch0 = nn.Sequential(
BasicConv2d(1024, 192, kernel_size=1, stride=1),
BasicConv2d(192, 192, kernel_size=3, stride=2)
)
self.branch1 = nn.Sequential(
BasicConv2d(1024, 256, kernel_size=1, stride=1),
BasicConv2d(256, 256, kernel_size=(1,7), stride=1, padding=(0,3)),
BasicConv2d(256, 320, kernel_size=(7,1), stride=1, padding=(3,0)),
BasicConv2d(320, 320, kernel_size=3, stride=2)
)
self.branch2 = nn.MaxPool2d(3, stride=2)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
out = torch.cat((x0, x1, x2), 1)
return out
class Inception_C(nn.Module):
def __init__(self):
super(Inception_C, self).__init__()
self.branch0 = BasicConv2d(1536, 256, kernel_size=1, stride=1)
self.branch1_0 = BasicConv2d(1536, 384, kernel_size=1, stride=1)
self.branch1_1a = BasicConv2d(384, 256, kernel_size=(1,3), stride=1, padding=(0,1))
self.branch1_1b = BasicConv2d(384, 256, kernel_size=(3,1), stride=1, padding=(1,0))
self.branch2_0 = BasicConv2d(1536, 384, kernel_size=1, stride=1)
self.branch2_1 = BasicConv2d(384, 448, kernel_size=(3,1), stride=1, padding=(1,0))
self.branch2_2 = BasicConv2d(448, 512, kernel_size=(1,3), stride=1, padding=(0,1))
self.branch2_3a = BasicConv2d(512, 256, kernel_size=(1,3), stride=1, padding=(0,1))
self.branch2_3b = BasicConv2d(512, 256, kernel_size=(3,1), stride=1, padding=(1,0))
self.branch3 = nn.Sequential(
nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),
BasicConv2d(1536, 256, kernel_size=1, stride=1)
)
def forward(self, x):
x0 = self.branch0(x)
x1_0 = self.branch1_0(x)
x1_1a = self.branch1_1a(x1_0)
x1_1b = self.branch1_1b(x1_0)
x1 = torch.cat((x1_1a, x1_1b), 1)
x2_0 = self.branch2_0(x)
x2_1 = self.branch2_1(x2_0)
x2_2 = self.branch2_2(x2_1)
x2_3a = self.branch2_3a(x2_2)
x2_3b = self.branch2_3b(x2_2)
x2 = torch.cat((x2_3a, x2_3b), 1)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class InceptionV4(nn.Module):
def __init__(self, num_classes=1001):
super(InceptionV4, self).__init__()
# Special attributs
self.input_space = None
self.input_size = (299, 299, 3)
self.mean = None
self.std = None
# Modules
self.features = nn.Sequential(
BasicConv2d(3, 32, kernel_size=3, stride=2),
BasicConv2d(32, 32, kernel_size=3, stride=1),
BasicConv2d(32, 64, kernel_size=3, stride=1, padding=1),
Mixed_3a(),
Mixed_4a(),
Mixed_5a(),
Inception_A(),
Inception_A(),
Inception_A(),
Inception_A(),
Reduction_A(), # Mixed_6a
Inception_B(),
Inception_B(),
Inception_B(),
Inception_B(),
Inception_B(),
Inception_B(),
Inception_B(),
Reduction_B(), # Mixed_7a
Inception_C(),
Inception_C(),
Inception_C()
)
self.last_linear = nn.Linear(1536, num_classes)
def logits(self, features):
#Allows image of any size to be processed
adaptiveAvgPoolWidth = features.shape[2]
x = F.avg_pool2d(features, kernel_size=adaptiveAvgPoolWidth)
x = x.view(x.size(0), -1)
x = self.last_linear(x)
return x
def forward(self, input):
x = self.features(input)
x = self.logits(x)
return x
def inceptionv4(num_classes=1000, pretrained='imagenet'):
if pretrained:
settings = pretrained_settings['inceptionv4'][pretrained]
assert num_classes == settings['num_classes'], \
"num_classes should be {}, but is {}".format(settings['num_classes'], num_classes)
# both 'imagenet'&'imagenet+background' are loaded from same parameters
model = InceptionV4(num_classes=1001)
model.load_state_dict(model_zoo.load_url(settings['url']))
if pretrained == 'imagenet':
new_last_linear = nn.Linear(1536, 1000)
new_last_linear.weight.data = model.last_linear.weight.data[1:]
new_last_linear.bias.data = model.last_linear.bias.data[1:]
model.last_linear = new_last_linear
model.input_space = settings['input_space']
model.input_size = settings['input_size']
model.input_range = settings['input_range']
model.mean = settings['mean']
model.std = settings['std']
else:
model = InceptionV4(num_classes=num_classes)
return model
'''
TEST
Run this code with:
```
cd $HOME/pretrained-models.pytorch
python -m pretrainedmodels.inceptionv4
```
'''
if __name__ == '__main__':
assert inceptionv4(num_classes=10, pretrained=None)
print('success')
assert inceptionv4(num_classes=1000, pretrained='imagenet')
print('success')
assert inceptionv4(num_classes=1001, pretrained='imagenet+background')
print('success')
# fail
assert inceptionv4(num_classes=1001, pretrained='imagenet')
| bsd-3-clause | 6,074,634,826,971,776,000 | 31.064246 | 94 | 0.554055 | false |
xuru/pyvisdk | pyvisdk/do/cluster_vm_host_rule_info.py | 1 | 1827 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def ClusterVmHostRuleInfo(vim, *args, **kwargs):
'''A ClusterVmHostRuleInfo object identifies virtual machines and host groups that
determine virtual machine placement. The virtual machines and hosts referenced
by a VM-Host rule must be in the same cluster.A VM-Host rule identifies the
following groups.* A virtual machine group (ClusterVmGroup). * Two host groups
- an affine host group and an anti-affine host group (ClusterHostGroup). At
least one of the groups must contain one or more hosts.ClusterVmHostRuleInfo
stores only the names of the relevant virtual machine and host groups. The
group contents are stored in the virtual machine and host group objects.When
you modify a VM-Host rule, only the fields that are specified are set.'''
obj = vim.client.factory.create('ns0:ClusterVmHostRuleInfo')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'affineHostGroupName', 'antiAffineHostGroupName', 'vmGroupName', 'enabled',
'inCompliance', 'key', 'mandatory', 'name', 'status', 'userCreated',
'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit | 8,006,430,162,297,734,000 | 41.511628 | 124 | 0.665572 | false |
Alignak-monitoring-contrib/alignak-backend | test/test_config.py | 1 | 3245 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This test checks backend configuration endpoint
"""
from __future__ import print_function
import os
import json
import time
import shlex
from random import randint
import subprocess
import requests
import requests_mock
import unittest2
from bson.objectid import ObjectId
from alignak_backend import manifest
class TestConfig(unittest2.TestCase):
"""This class test backend configuration endpoint"""
maxDiff = None
@classmethod
def setUpClass(cls):
"""This method:
* deletes mongodb database
* starts the backend with uwsgi
* logs in the backend and get the token
* gets the default realm and admn user
:return: None
"""
# Set test mode for Alignak backend
os.environ['ALIGNAK_BACKEND_TEST'] = '1'
os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'] = 'alignak-backend-test'
os.environ['ALIGNAK_BACKEND_CONFIGURATION_FILE'] = './cfg/settings/settings.json'
# Delete used mongo DBs
exit_code = subprocess.call(
shlex.split(
'mongo %s --eval "db.dropDatabase()"' % os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'])
)
assert exit_code == 0
cls.p = subprocess.Popen(['uwsgi', '--plugin', 'python', '-w', 'alignak_backend.app:app',
'--socket', '0.0.0.0:5000',
'--protocol=http', '--enable-threads', '--pidfile',
'/tmp/uwsgi.pid', '--logto=/tmp/alignak_backend.log'])
time.sleep(3)
cls.endpoint = 'http://127.0.0.1:5000'
headers = {'Content-Type': 'application/json'}
params = {'username': 'admin', 'password': 'admin', 'action': 'generate'}
# get token
response = requests.post(cls.endpoint + '/login', json=params, headers=headers)
resp = response.json()
cls.token = resp['token']
cls.auth = requests.auth.HTTPBasicAuth(cls.token, '')
# Get default realm
response = requests.get(cls.endpoint + '/realm', auth=cls.auth)
resp = response.json()
cls.realm_all = resp['_items'][0]['_id']
# Get admin user
response = requests.get(cls.endpoint + '/user', {"name": "admin"}, auth=cls.auth)
resp = response.json()
cls.user_admin = resp['_items'][0]['_id']
@classmethod
def tearDownClass(cls):
"""Kill uwsgi
:return: None
"""
subprocess.call(['uwsgi', '--stop', '/tmp/uwsgi.pid'])
time.sleep(2)
# os.unlink("/tmp/alignak_backend.log")
def test_config_endpoint(self):
"""Get backend configuration
:return: None
"""
# Get backend configuration
response = requests.get(self.endpoint + '/backendconfig')
resp = response.json()
assert resp == {u'PAGINATION_DEFAULT': 50, u'PAGINATION_LIMIT': 5000}
def test_version_endpoint(self):
"""Get backend version
:return: None
"""
# Get backend version
response = requests.get(self.endpoint + '/version')
resp = response.json()
assert resp == {u'version': manifest['version']}
| agpl-3.0 | 7,079,667,960,244,954,000 | 30.813725 | 99 | 0.580586 | false |
jeremiah-c-leary/vhdl-style-guide | vsg/tests/generate/test_rule_007.py | 1 | 1243 |
import os
import unittest
from vsg.rules import generate
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_007_test_input.vhd'))
dIndentMap = utils.read_indent_file()
lExpected = []
lExpected.append('')
utils.read_file(os.path.join(sTestDir, 'rule_007_test_input.fixed.vhd'), lExpected)
class test_generate_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
self.oFile.set_indent_map(dIndentMap)
def test_rule_007(self):
oRule = generate.rule_007()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'generate')
self.assertEqual(oRule.identifier, '007')
lExpected = [45, 54, 65]
oRule.analyze(self.oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_fix_rule_007(self):
oRule = generate.rule_007()
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])
| gpl-3.0 | -8,039,394,553,296,809,000 | 24.895833 | 106 | 0.679807 | false |
SamyCookie/python-ant | demos/ant.core/10-weight.py | 1 | 3095 | """
Extending on demo-03, implements an event callback we can use to process the
incoming data.
"""
from __future__ import print_function
import sys
import time
from ant.core import driver
from ant.core import node
from ant.core import event
from ant.core import message
from ant.core.constants import *
from config import *
NETKEY = '\xB9\xA5\x21\xFB\xBD\x72\xC3\x45'
command_id = 0x46
send_times = 2
pg_num = 1
DP_PAYLOAD = bytearray([command_id, 0xFF, 0xFF, 0, 0, send_times, pg_num, 1])
#DP_PAYLOAD = bytearray([255, 255, 0, 0, send_times, pg_num, 1])
CHANNEL = 1 #TODO: not really, channel is set much later
pay = DP_PAYLOAD
p1 = message.ChannelAcknowledgedDataMessage(number=CHANNEL,data=pay)
pay[6] = 2
p2 = message.ChannelAcknowledgedDataMessage(number=CHANNEL,data=pay)
pay[6] = 3
p3 = message.ChannelAcknowledgedDataMessage(number=CHANNEL,data=pay)
pay[6] = 4
p4 = message.ChannelAcknowledgedDataMessage(number=CHANNEL,data=pay)
RSP = bytearray([0xFF, 0x3A])
class RsMessage(message.ChannelMessage):
type = 0x63
def __init__(self, number=0x00):
super(RsMessage, self).__init__(number=number, payload=RSP)
rs = RsMessage(0)
RECV = 0
class WeightListener(event.EventCallback):
def process(self, msg, _channel):
global RECV
if isinstance(msg, message.ChannelBroadcastDataMessage):
# print('R%04X: ' % RECV, *('%02X' % ord(byte) for byte in msg.payload))
data = str(msg.payload)
print('%04X' % RECV, *('%02X' % ord(byte) for byte in data))
# print [map(ord, msg.payload)]
page_number = msg.payload[1]
RECV += 1
if page_number == 1:
pass
elif page_number == 2:
pass
elif page_number == 3:
pass
elif page_number == 4:
pass
def delete_channel(channel):
channel.close()
channel.unassign()
def reset_channel(antnode, channel=None):
if channel:
delete_channel(channel)
channel = antnode.getFreeChannel()
channel.name = 'C:WGT'
channel.assign(net, CHANNEL_TYPE_TWOWAY_RECEIVE)
channel.setID(119, 0, 0)
channel.period = 0x2000 # nebo 0x0020 ???
channel.frequency = 0x39
rs.channelNumber = channel.number
channel.node.evm.writeMessage(rs)
channel.searchTimeout = TIMEOUT_NEVER
channel.open()
channel.registerCallback(WeightListener())
return channel
# Initialize
#LOG=None
#DEBUG=False
stick = driver.USB1Driver(SERIAL, log=LOG, debug=DEBUG)
antnode = node.Node(stick)
antnode.start()
# Setup channel
net = node.Network(name='N:ANT+', key=NETKEY)
antnode.setNetworkKey(0, net)
channel = reset_channel(antnode)
restart = int(time.time())
# Wait
print("Listening for weight scale events ...")
while True:
time.sleep(0.1)
# Restart channel every 3 seconds
now = int(time.time())
if (now % 3 == 0) and (now != restart):
channel = reset_channel(antnode, channel)
RECV = 0
restart = now
# Shutdown
delete_channel(channel)
antnode.stop()
| mit | 1,154,412,797,407,415,800 | 24.791667 | 83 | 0.652666 | false |
rkspsm/fixed-grid-image-viewer | main.py | 1 | 9870 | #! /usr/bin/env python3
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtMultimedia import *
from PyQt5.QtMultimediaWidgets import *
from math import *
from hashlib import sha256
import sys, os
import traceback
_args = sys.argv
class Stuff :
width = 800
height = 600
scale_factor = 0.01
next_k = {Qt.Key_D, Qt.Key_6, Qt.Key_Y}
prev_k = {Qt.Key_A, Qt.Key_4, Qt.Key_E}
refresh = {Qt.Key_X}
pan_toggle = {Qt.Key_Z, Qt.Key_W, Qt.Key_2}
remove_lines_button = {Qt.Key_C, Qt.Key_Home, Qt.Key_9}
pick_line_color = {Qt.Key_Q}
inc_play_rate = {Qt.Key_Up}
dec_play_rate = {Qt.Key_Down}
res_play_rate = {Qt.Key_F}
seek_f = {Qt.Key_Right}
seek_b = {Qt.Key_Left}
seek_0 = {Qt.Key_R}
play_pause = {Qt.Key_Space}
overlays = ['grid.png']
overlay_toggle = {Qt.Key_S, Qt.Key_5}
seek_t = 2 # seconds
zoom_button = Qt.MiddleButton
pan_button = Qt.LeftButton
pick_color_button = Qt.RightButton
@staticmethod
def isImage (f) :
return f.endswith ('.jpg') or f.endswith ('.png') or f.endswith ('.jpeg')
@staticmethod
def isMovie (f) :
return f.endswith ('.mkv') or f.endswith ('.avi') or f.endswith ('.mp4')
@staticmethod
def dist (p1, p2) :
dx = p1[0] - p2[0]
dy = p1[1] - p2[1]
return sqrt (dx*dx + dy*dy)
@staticmethod
def tscale (t) :
return (t.m11 (), t.m22 ())
@staticmethod
def string_of_rect (r) :
return f'rect({r.x()}, {r.y()}, {r.width()}, {r.height()}'
class GfxView (QGraphicsView) :
def setMHandlers (self, mp, mm, mr) :
self.mp = mp
self.mm = mm
self.mr = mr
def setKHandlers (self, kp, kr) :
self.kp = kp
self.kr = kr
def mousePressEvent (self, e) :
self.mp (e)
def mouseReleaseEvent (self, e) :
self.mr (e)
def mouseMoveEvent (self, e) :
self.mm (e)
def keyPressEvent (self, e) :
self.kp (e)
def keyReleaseEvent (self, e) :
self.kr (e)
def sizeHint (self) :
return QSize (Stuff.width, Stuff.height)
class App (QApplication) :
def __init__ (self) :
QApplication.__init__ (self, _args)
self.args = _args[1:]
self.scene = QGraphicsScene ()
self.scene.setSceneRect (0, 0, Stuff.width, Stuff.height)
self.gv = GfxView (self.scene)
self.gv.setHorizontalScrollBarPolicy (Qt.ScrollBarAlwaysOff)
self.gv.setVerticalScrollBarPolicy (Qt.ScrollBarAlwaysOff)
self.gv.show ()
self.err = ''
try :
self.imgdir = self.args[0]
assert (os.path.isdir (self.imgdir))
self.setup ()
except :
traceback.print_exc ()
self.err = 'usage: <prog> <imgdir>'
self.exit (1)
def getFiles (self) :
files = os.listdir (self.imgdir)
files = [os.path.join (self.imgdir, x) for x in files]
files = [x for x in files if os.path.isfile (x)]
files = [x for x in files if Stuff.isImage (x) or Stuff.isMovie (x)]
if len (files) == 0 :
raise Exception ('no images in the dir')
files = list (sorted (files))
return files
def setup (self) :
self.isMedia = False
self.playratepow = 0
self.pan_on = True
self.files = self.getFiles ()
self.index = 0
self.savedTransforms = dict ()
self.lineColor = QColor (0, 0, 0)
self.lines = []
self.player = QMediaPlayer ()
self.overlayItems = [self.scene.addPixmap (QPixmap (x)) for x in Stuff.overlays]
for i, item in enumerate (self.overlayItems) :
item.setZValue (10 + i)
item.setVisible (False)
try :
skip = int (self.args[1])
except :
skip = 0
self.filesOrIndexUpdated (True, skip)
self.m_init ()
self.k_init ()
def removeLines (self) :
for line in self.lines :
self.scene.removeItem (line)
self.lines = []
def playrateUpdated (self) :
pos = self.player.position ()
self.player.setPlaybackRate (pow (2, self.playratepow))
self.player.setPosition (pos)
def getseekt (self) :
factor = pow (2, self.playratepow)
return Stuff.seek_t * factor * 1000
def filesOrIndexUpdated (self, isFirst = False, skip = 0) :
self.isMedia = False
if not isFirst :
self.player.stop ()
skip = 0
self.savedTransforms[self.lastDigest] = QTransform (self.imgItem.transform ())
self.scene.removeItem (self.imgItem)
self.index += skip
self.index = 0 if self.index >= len (self.files) else self.index
f = self.files[self.index]
s = sha256 ()
if Stuff.isImage (f) :
with open (self.files[self.index], 'rb') as handle :
s.update (handle.read ())
else :
s.update (f.encode ('utf-8'))
d = s.digest ()
if Stuff.isImage (f) :
img = QPixmap (self.files[self.index])
self.imgItem = self.scene.addPixmap (img)
wrat = img.width () / Stuff.width
hrat = img.height () / Stuff.height
else :
self.playratepow = 0
self.mediaContent = QMediaContent (QUrl.fromLocalFile (f))
self.player.setMedia (self.mediaContent)
self.player.setMuted (True)
self.imgItem = QGraphicsVideoItem ()
self.player.setVideoOutput (self.imgItem)
self.scene.addItem (self.imgItem)
self.player.play ()
self.isMedia = True
wrat = 1
hrat = 1
rat = wrat if wrat > hrat else hrat
if d in self.savedTransforms :
self.curt = self.savedTransforms[d]
else :
self.curt = QTransform (self.imgItem.transform ()).scale (1 / rat, 1 / rat)
self.imgItem.setTransform (self.curt)
self.lastDigest = d
self.removeLines ()
def m_init (self) :
self.gv.setMHandlers (self.mp, self.mm, self.mr)
self.zoom_origin = None
self.noscale = True
pass
def mp (self, e) :
if e.button () == Stuff.zoom_button or e.button () == Stuff.pan_button :
self.zoom_origin = (e.x (), e.y ())
self.curt = QTransform (self.imgItem.transform ())
if e.button () == Stuff.pan_button :
self.noscale = True
self.linePt = self.gv.mapToScene (QPoint (e.x (), e.y ()))
else :
self.noscale = False
def mr (self, e) :
self.zoom_origin = None
if e.button () == Stuff.pick_color_button :
self.lineColor = QColorDialog.getColor ()
def zoi (self) :
pt = QPoint (self.zoom_origin[0], self.zoom_origin[1])
pts = self.gv.mapToScene (pt)
pti = self.imgItem.mapFromScene (pts)
return pti
def mm (self, e) :
if self.zoom_origin is None :
return
pt = (e.x (), e.y ())
#d = Stuff.dist (pt, self.zoom_origin)
dx = pt[0] - self.zoom_origin[0]
dy = pt[1] - self.zoom_origin[1]
if self.noscale :
if not self.pan_on :
newPt = self.gv.mapToScene (QPoint (e.x (), e.y ()))
line = self.scene.addLine (QLineF (self.linePt, newPt), QPen (self.lineColor, 2))
line.setZValue (500)
self.lines.append (line)
self.linePt = newPt
return
scale = self.curt.m11 ()
self.tempt = QTransform (self.curt).translate (dx / scale, dy / scale)
self.imgItem.setTransform (self.tempt)
else :
scale = 1 + dx * Stuff.scale_factor
#self.tempt = QTransform (self.curt).scale (scale, scale)
z1 = self.zoi ()
self.tempt = QTransform (self.curt).translate (- self.curt.dx (), - self.curt.dy ()).scale (scale, scale).translate (self.curt.dx (), self.curt.dy ())
self.imgItem.setTransform (self.tempt)
z2 = self.zoi ()
dx = z2.x () - z1.x ()
dy = z2.y () - z1.y ()
self.tempt.translate (dx, dy)
self.imgItem.setTransform (self.tempt)
def k_init (self) :
self.gv.setKHandlers (self.kp, self.kr)
def kp (self, e) :
pass
def kr (self, e) :
if e.key () in Stuff.next_k :
self.index += 1
self.filesOrIndexUpdated ()
elif e.key () in Stuff.prev_k :
self.index -= 1
self.filesOrIndexUpdated ()
elif e.key () in Stuff.overlay_toggle :
for item in self.overlayItems :
item.setVisible (not item.isVisible ())
elif e.key () in Stuff.refresh :
newFiles = self.getFiles ()
curFile = self.files[self.index]
if curFile in newFiles :
newIndex = newFiles.index (curFile)
else :
newIndex = self.index
self.files = newFiles
self.index = newIndex
elif e.key () in Stuff.pan_toggle :
self.pan_on = not self.pan_on
elif e.key () in Stuff.remove_lines_button :
self.removeLines ()
elif e.key () in Stuff.pick_line_color :
self.lineColor = QColorDialog.getColor ()
elif self.isMedia and e.key () in Stuff.inc_play_rate :
self.playratepow += 1
self.playrateUpdated ()
elif self.isMedia and e.key () in Stuff.dec_play_rate :
self.playratepow -= 1
self.playrateUpdated ()
elif self.isMedia and e.key () in Stuff.res_play_rate :
self.playratepow = 0
self.playrateUpdated ()
elif self.isMedia and e.key () in Stuff.seek_f :
t = self.getseekt ()
pos = self.player.position ()
pos += t
pos = 0 if pos < 0 else pos
self.player.setPosition (pos)
elif self.isMedia and e.key () in Stuff.seek_b :
t = self.getseekt ()
pos = self.player.position ()
pos -= t
pos = 0 if pos < 0 else pos
self.player.setPosition (pos)
elif self.isMedia and e.key () in Stuff.seek_0 :
self.player.setPosition (0)
self.player.play ()
elif self.isMedia and e.key () in Stuff.play_pause :
state = self.player.state ()
if state == QMediaPlayer.StoppedState :
self.player.setPosition (0)
self.player.play ()
elif state == QMediaPlayer.PlayingState :
self.player.pause ()
elif state == QMediaPlayer.PausedState :
self.player.play ()
def go (self) :
if self.err != '' :
print (self.err)
sys.exit (1)
else :
sys.exit (self.exec_ ())
App ().go ()
| agpl-3.0 | 5,272,151,468,709,609,000 | 26.115385 | 156 | 0.60233 | false |
xme1226/horizon | openstack_dashboard/dashboards/admin/instances/tables.py | 1 | 7326 | # Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.template.defaultfilters import title # noqa
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import tables
from horizon.utils import filters
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.instances \
import tables as project_tables
from openstack_dashboard import policy
class AdminEditInstance(project_tables.EditInstance):
url = "horizon:admin:instances:update"
class MigrateInstance(policy.PolicyTargetMixin, tables.BatchAction):
name = "migrate"
classes = ("btn-migrate", "btn-danger")
policy_rules = (("compute", "compute_extension:admin_actions:migrate"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Migrate Instance",
u"Migrate Instances",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled migration (pending confirmation) of Instance",
u"Scheduled migration (pending confirmation) of Instances",
count
)
def allowed(self, request, instance):
return ((instance.status in project_tables.ACTIVE_STATES
or instance.status == 'SHUTOFF')
and not project_tables.is_deleting(instance))
def action(self, request, obj_id):
api.nova.server_migrate(request, obj_id)
class LiveMigrateInstance(policy.PolicyTargetMixin,
tables.LinkAction):
name = "live_migrate"
verbose_name = _("Live Migrate Instance")
url = "horizon:admin:instances:live_migrate"
classes = ("ajax-modal", "btn-migrate", "btn-danger")
policy_rules = (
("compute", "compute_extension:admin_actions:migrateLive"),)
def allowed(self, request, instance):
return ((instance.status in project_tables.ACTIVE_STATES)
and not project_tables.is_deleting(instance))
class AdminUpdateRow(project_tables.UpdateRow):
def get_data(self, request, instance_id):
instance = super(AdminUpdateRow, self).get_data(request, instance_id)
tenant = api.keystone.tenant_get(request,
instance.tenant_id,
admin=True)
instance.tenant_name = getattr(tenant, "name", None)
return instance
class AdminInstanceFilterAction(tables.FilterAction):
# Change default name of 'filter' to distinguish this one from the
# project instances table filter, since this is used as part of the
# session property used for persisting the filter.
name = "filter_admin_instances"
filter_type = "server"
filter_choices = (('project', _("Project"), True),
('host', _("Host ="), True),
('name', _("Name"), True),
('ip', _("IPv4 Address ="), True),
('ip6', _("IPv6 Address ="), True),
('status', _("Status ="), True),
('image', _("Image ID ="), True),
('flavor', _("Flavor ID ="), True))
class AdminInstancesTable(tables.DataTable):
TASK_STATUS_CHOICES = (
(None, True),
("none", True)
)
STATUS_CHOICES = (
("active", True),
("shutoff", True),
("suspended", True),
("paused", True),
("error", False),
("rescue", True),
("shelved_offloaded", True),
)
tenant = tables.Column("tenant_name", verbose_name=_("Project"))
# NOTE(gabriel): Commenting out the user column because all we have
# is an ID, and correlating that at production scale using our current
# techniques isn't practical. It can be added back in when we have names
# returned in a practical manner by the API.
# user = tables.Column("user_id", verbose_name=_("User"))
host = tables.Column("OS-EXT-SRV-ATTR:host",
verbose_name=_("Host"),
classes=('nowrap-col',))
name = tables.Column("name",
link=("horizon:admin:instances:detail"),
verbose_name=_("Name"))
image_name = tables.Column("image_name",
verbose_name=_("Image Name"))
ip = tables.Column(project_tables.get_ips,
verbose_name=_("IP Address"),
attrs={'data-type': "ip"})
size = tables.Column(project_tables.get_size,
verbose_name=_("Size"),
classes=('nowrap-col',),
attrs={'data-type': 'size'})
status = tables.Column(
"status",
filters=(title, filters.replace_underscores),
verbose_name=_("Status"),
status=True,
status_choices=STATUS_CHOICES,
display_choices=project_tables.STATUS_DISPLAY_CHOICES)
task = tables.Column("OS-EXT-STS:task_state",
verbose_name=_("Task"),
filters=(title, filters.replace_underscores),
status=True,
status_choices=TASK_STATUS_CHOICES,
display_choices=project_tables.TASK_DISPLAY_CHOICES)
state = tables.Column(project_tables.get_power_state,
filters=(title, filters.replace_underscores),
verbose_name=_("Power State"))
created = tables.Column("created",
verbose_name=_("Time since created"),
filters=(filters.parse_isotime,
filters.timesince_sortable),
attrs={'data-type': 'timesince'})
class Meta:
name = "instances"
verbose_name = _("Instances")
status_columns = ["status", "task"]
table_actions = (project_tables.TerminateInstance,
AdminInstanceFilterAction)
row_class = AdminUpdateRow
row_actions = (project_tables.ConfirmResize,
project_tables.RevertResize,
AdminEditInstance,
project_tables.ConsoleLink,
project_tables.LogLink,
project_tables.CreateSnapshot,
project_tables.TogglePause,
project_tables.ToggleSuspend,
MigrateInstance,
LiveMigrateInstance,
project_tables.SoftRebootInstance,
project_tables.RebootInstance,
project_tables.TerminateInstance)
| apache-2.0 | -1,049,580,477,358,358,000 | 39.927374 | 78 | 0.578078 | false |
AutorestCI/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_06_01/operations/public_ip_addresses_operations.py | 1 | 33216 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.exceptions import DeserializationError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class PublicIPAddressesOperations(object):
"""PublicIPAddressesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def _delete_initial(
self, resource_group_name, public_ip_address_name, custom_headers=None, raw=False, **operation_config):
api_version = "2017-06-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, public_ip_address_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the specified public IP address.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the subnet.
:type public_ip_address_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns None or
ClientRawResponse if raw=true
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, public_ip_address_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Gets the specified public IP address in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the subnet.
:type public_ip_address_name: str
:param expand: Expands referenced resources.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PublicIPAddress or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2017_06_01.models.PublicIPAddress or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PublicIPAddress', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def _create_or_update_initial(
self, resource_group_name, public_ip_address_name, parameters, custom_headers=None, raw=False, **operation_config):
api_version = "2017-06-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'PublicIPAddress')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PublicIPAddress', response)
if response.status_code == 201:
deserialized = self._deserialize('PublicIPAddress', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, public_ip_address_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a static or dynamic public IP address.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the public IP address.
:type public_ip_address_name: str
:param parameters: Parameters supplied to the create or update public
IP address operation.
:type parameters:
~azure.mgmt.network.v2017_06_01.models.PublicIPAddress
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns
PublicIPAddress or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2017_06_01.models.PublicIPAddress]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = self._deserialize('PublicIPAddress', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_all(
self, custom_headers=None, raw=False, **operation_config):
"""Gets all the public IP addresses in a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of PublicIPAddress
:rtype:
~azure.mgmt.network.v2017_06_01.models.PublicIPAddressPaged[~azure.mgmt.network.v2017_06_01.models.PublicIPAddress]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Network/publicIPAddresses'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.PublicIPAddressPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.PublicIPAddressPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets all public IP addresses in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of PublicIPAddress
:rtype:
~azure.mgmt.network.v2017_06_01.models.PublicIPAddressPaged[~azure.mgmt.network.v2017_06_01.models.PublicIPAddress]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.PublicIPAddressPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.PublicIPAddressPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_virtual_machine_scale_set_public_ip_addresses(
self, resource_group_name, virtual_machine_scale_set_name, custom_headers=None, raw=False, **operation_config):
"""Gets information about all public IP addresses on a virtual machine
scale set level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine
scale set.
:type virtual_machine_scale_set_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of PublicIPAddress
:rtype:
~azure.mgmt.network.v2017_06_01.models.PublicIPAddressPaged[~azure.mgmt.network.v2017_06_01.models.PublicIPAddress]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-03-30"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/publicipaddresses'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.PublicIPAddressPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.PublicIPAddressPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_virtual_machine_scale_set_vm_public_ip_addresses(
self, resource_group_name, virtual_machine_scale_set_name, virtualmachine_index, network_interface_name, ip_configuration_name, custom_headers=None, raw=False, **operation_config):
"""Gets information about all public IP addresses in a virtual machine IP
configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine
scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The network interface name.
:type network_interface_name: str
:param ip_configuration_name: The IP configuration name.
:type ip_configuration_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of PublicIPAddress
:rtype:
~azure.mgmt.network.v2017_06_01.models.PublicIPAddressPaged[~azure.mgmt.network.v2017_06_01.models.PublicIPAddress]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-03-30"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.PublicIPAddressPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.PublicIPAddressPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get_virtual_machine_scale_set_public_ip_address(
self, resource_group_name, virtual_machine_scale_set_name, virtualmachine_index, network_interface_name, ip_configuration_name, public_ip_address_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Get the specified public IP address in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine
scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the IP configuration.
:type ip_configuration_name: str
:param public_ip_address_name: The name of the public IP Address.
:type public_ip_address_name: str
:param expand: Expands referenced resources.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PublicIPAddress or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2017_06_01.models.PublicIPAddress or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-03-30"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses/{publicIpAddressName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PublicIPAddress', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| mit | 4,068,427,091,507,885,600 | 46.451429 | 321 | 0.640595 | false |
onepercentclub/onepercentclub-site | apps/homepage/tests/test_api.py | 1 | 3550 | from datetime import timedelta
from bluebottle.bb_projects.models import ProjectPhase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from bluebottle.test.factory_models.utils import LanguageFactory
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.utils import timezone
from django.utils.text import slugify
from onepercentclub.tests.factory_models.fundraiser_factories import FundRaiserFactory
from onepercentclub.tests.factory_models.project_factories import OnePercentProjectFactory
from onepercentclub.tests.utils import OnePercentTestCase
from rest_framework import status
from apps.campaigns.models import Campaign
from apps.fund.models import Donation, DonationStatuses, Order
class HomepageTestCase(OnePercentTestCase):
""" Test that the homepage doesn't error out if no/a campaign is available """
def setUp(self):
self.init_projects()
# Create and activate user.
self.user = BlueBottleUserFactory.create(email='[email protected]', primary_language='en')
title = u'Mobile payments for everyone 2!'
language = LanguageFactory.create(code='en')
self.project = OnePercentProjectFactory.create(title=title, slug=slugify(title), amount_asked=100000, owner=self.user)
self.project.status = ProjectPhase.objects.get(slug='campaign')
self.project.is_campaign = True
self.project.money_donated = 0
self.project.language = language
self.project.save()
self.homepage_url = '/api/homepage/en'
# def test_homepage_without_campaign(self):
# response = self.client.get(self.homepage_url)
# self.assertEquals(response.status_code, status.HTTP_200_OK)
#
# self.assertEqual(None, response.data['campaign'])
#
# project = response.data['projects'][0]
# self.assertTrue(project['is_campaign'])
def test_homepage_with_campaign(self):
now = timezone.now()
start, end = now - timedelta(hours=8), now + timedelta(weeks=1)
Campaign.objects.create(start=start, end=end, title='FooBarCaMpAIgN', target=100000)
# make a donation before the campaign starts
order = Order.objects.create(user=self.user, order_number=1)
Donation.objects.create(amount=1000, user=self.user, project=self.project,
status=DonationStatuses.paid, order=order, ready=now-timedelta(days=1))
# and a couple of donations in campaign, for a total amount of 2000+3000+4000 cents = 90 euros
for i in range(1,4):
amount = (i+1)*1000
Donation.objects.create(amount=amount, user=self.user, project=self.project,
status=DonationStatuses.paid, order=order, ready=now+timedelta(days=i))
# and one after the campaign
Donation.objects.create(amount=5000, user=self.user, project=self.project,
status=DonationStatuses.paid, order=order, ready=now+timedelta(weeks=2))
self.project_with_fundraiser = OnePercentProjectFactory.create(amount_asked=50000)
self.project_with_fundraiser.is_campaign = True
self.project_with_fundraiser.save()
self.fundraiser = FundRaiserFactory.create(owner=self.user, project=self.project_with_fundraiser)
response = self.client.get(self.homepage_url)
self.assertNotEqual(None, response.data['campaign'])
self.assertEqual(response.data['campaign']['amount_donated'], '90.00')
| bsd-3-clause | -6,083,391,665,746,222,000 | 45.103896 | 126 | 0.70338 | false |
xinwu/bosi-1 | bosi/bosi.py | 1 | 18445 | import argparse
import datetime
import lib.constants as const
import os
import Queue
import random
import subprocess32 as subprocess
import threading
import time
import yaml
from collections import OrderedDict
from lib.environment import Environment
from lib.helper import Helper
from lib.util import safe_print
# queue to store all controller nodes
controller_node_q = Queue.Queue()
# queue to store all nodes
node_q = Queue.Queue()
# copy the node_q to this when original list is created
certify_node_q = Queue.Queue()
verify_node_q = Queue.Queue()
support_node_q = Queue.Queue()
# keep track of verified nodes
node_pass = {}
node_fail = {}
# result dict
node_dict = {}
time_dict = {}
def worker_upgrade_node(q):
while True:
node = q.get()
# copy ivs pkg to node
Helper.copy_pkg_scripts_to_remote(node)
# deploy node
safe_print("Start to deploy %(fqdn)s\n" %
{'fqdn': node.fqdn})
start_time = datetime.datetime.now()
Helper.run_command_on_remote(node,
(r'''sudo bash %(dst_dir)s/%(hostname)s_upgrade.sh''' %
{'dst_dir': node.dst_dir,
'hostname': node.hostname,
'log': node.log}))
end_time = datetime.datetime.now()
# parse setup log
diff = Helper.timedelta_total_seconds(end_time - start_time)
node.set_time_diff(diff)
node = Helper.update_last_log(node)
node_dict[node.fqdn] = node
time_dict[node.fqdn] = diff
safe_print("Finish upgrading %(fqdn)s, cost time: %(diff).2f\n" %
{'fqdn': node.fqdn, 'diff': node.time_diff})
q.task_done()
def worker_setup_node(q):
while True:
node = q.get()
# copy ivs pkg to node
Helper.copy_pkg_scripts_to_remote(node)
# deploy node
safe_print("Start to deploy %(fqdn)s\n" %
{'fqdn': node.fqdn})
if node.cleanup and node.role == const.ROLE_NEUTRON_SERVER:
Helper.run_command_on_remote(node,
(r'''sudo bash %(dst_dir)s/%(hostname)s_ospurge.sh''' %
{'dst_dir': node.dst_dir,
'hostname': node.hostname,
'log': node.log}))
# a random delay to smooth apt-get/yum
delay = random.random() * 10.0
time.sleep(delay)
start_time = datetime.datetime.now()
Helper.run_command_on_remote(node,
(r'''sudo bash %(dst_dir)s/%(hostname)s.sh''' %
{'dst_dir': node.dst_dir,
'hostname': node.hostname,
'log': node.log}))
end_time = datetime.datetime.now()
# parse setup log
diff = Helper.timedelta_total_seconds(end_time - start_time)
node.set_time_diff(diff)
node = Helper.update_last_log(node)
node_dict[node.fqdn] = node
time_dict[node.fqdn] = diff
# when deploying T5 on UBUNTU, reboot compute nodes
Helper.reboot_if_necessary(node)
safe_print("Finish deploying %(fqdn)s, cost time: %(diff).2f\n" %
{'fqdn': node.fqdn, 'diff': node.time_diff})
q.task_done()
def certify_node_setup(q):
while True:
node = q.get()
if node.certificate_dir:
if not os.path.isfile("%s/ca.cert" % node.certificate_dir):
safe_print("Missing ca.cert in %s\n" % node.certificate_dir)
break
Helper.certify_node(node)
q.task_done()
def support_node_setup(q):
while True:
node = q.get()
Helper.support_node(node)
q.task_done()
def verify_node_setup(q):
while True:
node = q.get()
all_service_status = 'Service status for node: ' + node.fqdn
# check services are running and IVS version is correct
if node.deploy_dhcp_agent:
dhcp_status = Helper.check_os_service_status(
node, "neutron-dhcp-agent")
all_service_status = (all_service_status +
' | DHCP Agent ' + dhcp_status)
metadata_status = Helper.check_os_service_status(
node, "neutron-metadata-agent")
all_service_status = (all_service_status +
' | Metadata Agent ' + metadata_status)
if node.deploy_l3_agent and node.deploy_mode == const.T5:
l3_status = Helper.check_os_service_status(
node, "neutron-l3-agent")
all_service_status = (all_service_status +
' | L3 Agent ' + l3_status)
# for T5 deployment, check LLDP service status on compute nodes
if node.deploy_mode == const.T5 and node.role != const.ROLE_NEUTRON_SERVER:
lldp_status = Helper.check_os_service_status(node, "send_lldp")
all_service_status = (all_service_status +
' | LLDP Service ' + lldp_status)
# for T6 deployment, check IVS status and version too
if node.deploy_mode == const.T6:
# check ivs status and version
ivs_status = Helper.check_os_service_status(node, "ivs")
if ivs_status == ':-)':
# ivs is OK. check version
ivs_version = Helper.check_ivs_version(node)
all_service_status = (all_service_status +
' | IVS version ' + ivs_version)
else:
# ivs not OK
all_service_status = (all_service_status +
' | IVS ' + ivs_status)
# check neutron-bsn-agent status
bsn_agent_status = Helper.check_os_service_status(
node, "neutron-bsn-agent")
all_service_status = (all_service_status +
' | BSN Agent ' + bsn_agent_status)
# after forming the complete string, put in respective list
if ":-(" not in all_service_status:
node_pass[node.fqdn] = all_service_status
else:
node_fail[node.fqdn] = all_service_status
q.task_done()
def upgrade_bcf(node_dic):
for hostname, node in node_dic.iteritems():
if node.skip:
safe_print("skip node %(fqdn)s due to %(error)s\n" %
{'fqdn': node.fqdn, 'error': node.error})
continue
if node.tag != node.env_tag:
safe_print("skip node %(fqdn)s due to mismatched tag\n" %
{'fqdn': node.fqdn})
continue
if node.os == const.CENTOS:
Helper.generate_upgrade_scripts_for_centos(node)
elif node.os == const.UBUNTU:
Helper.generate_upgrade_scripts_for_ubuntu(node)
elif node.os == const.REDHAT:
Helper.generate_upgrade_scripts_for_redhat(node)
node_q.put(node)
with open(const.LOG_FILE, "a") as log_file:
for hostname, node in node_dic.iteritems():
log_file.write(str(node))
# Use multiple threads to setup compute nodes
for i in range(const.MAX_WORKERS):
t = threading.Thread(target=worker_upgrade_node, args=(node_q,))
t.daemon = True
t.start()
node_q.join()
sorted_time_dict = OrderedDict(sorted(time_dict.items(),
key=lambda x: x[1]))
for fqdn, h_time in sorted_time_dict.items():
safe_print("node: %(fqdn)s, time: %(time).2f\n" %
{'fqdn': fqdn, 'time': h_time})
safe_print("Big Cloud Fabric deployment finished! "
"Check %(log)s on each node for details.\n" %
{'log': const.LOG_FILE})
def deploy_bcf(config, mode, fuel_cluster_id, rhosp, tag, cleanup,
verify, verify_only, skip_ivs_version_check,
certificate_dir, certificate_only, generate_csr,
support, upgrade_dir):
# Deploy setup node
safe_print("Start to prepare setup node\n")
env = Environment(config, mode, fuel_cluster_id, rhosp, tag, cleanup,
skip_ivs_version_check, certificate_dir, upgrade_dir)
Helper.common_setup_node_preparation(env)
controller_nodes = []
# Generate detailed node information
safe_print("Start to setup Big Cloud Fabric\n")
nodes_yaml_config = config['nodes'] if 'nodes' in config else None
node_dic = Helper.load_nodes(nodes_yaml_config, env)
if upgrade_dir:
return upgrade_bcf(node_dic)
if generate_csr:
safe_print("Start to generate csr for virtual switches.\n")
# create ~/csr and ~/key directory
Helper.run_command_on_local("mkdir -p %s" % const.CSR_DIR)
Helper.run_command_on_local("mkdir -p %s" % const.KEY_DIR)
for hostname, node in node_dic.iteritems():
if node.skip:
safe_print("skip node %(fqdn)s due to %(error)s\n" %
{'fqdn': node.fqdn, 'error': node.error})
continue
if node.tag != node.env_tag:
safe_print("skip node %(fqdn)s due to mismatched tag\n" %
{'fqdn': node.fqdn})
continue
if node.deploy_mode == const.T6 and node.role == const.ROLE_COMPUTE:
Helper.generate_csr(node)
safe_print("Finish generating csr for virtual switches.\n")
return
# copy neutron config from neutron server to setup node
for hostname, node in node_dic.iteritems():
if node.role == const.ROLE_NEUTRON_SERVER:
controller_nodes.append(node)
Helper.copy_neutron_config_from_controllers(controller_nodes)
# check if vlan is the tenant network type for fuel environment
if not Helper.check_if_vlan_is_used(controller_nodes):
safe_print("tenant network type is not vlan. Stop deploying.\n")
return
# prepare keystone client from /etc/neutron/api-paste.ini
#Helper.prepare_keystone_client(controller_nodes)
# Generate scripts for each node
for hostname, node in node_dic.iteritems():
if support:
support_node_q.put(node)
if node.skip:
safe_print("skip node %(fqdn)s due to %(error)s\n" %
{'fqdn': node.fqdn, 'error': node.error})
continue
if node.tag != node.env_tag:
safe_print("skip node %(fqdn)s due to mismatched tag\n" %
{'fqdn': node.fqdn})
continue
if node.os == const.CENTOS:
Helper.generate_scripts_for_centos(node)
elif node.os == const.UBUNTU:
Helper.generate_scripts_for_ubuntu(node)
elif node.os == const.REDHAT:
Helper.generate_scripts_for_redhat(node)
if node.role == const.ROLE_NEUTRON_SERVER:
controller_node_q.put(node)
else:
# python doesn't have deep copy for Queue, hence add to all
node_q.put(node)
verify_node_q.put(node)
if node.deploy_mode == const.T6 and node.role == const.ROLE_COMPUTE:
certify_node_q.put(node)
if node.rhosp:
Helper.chmod_node(node)
with open(const.LOG_FILE, "a") as log_file:
version = Helper.run_command_on_local("pip show bosi")
log_file.write(str(version))
for hostname, node in node_dic.iteritems():
log_file.write(str(node))
if support:
safe_print("Start to collect logs.\n")
# copy installer logs to ~/support
Helper.run_command_on_local("mkdir -p %s" % const.SUPPORT_DIR)
Helper.run_command_on_local("cp -r %(src)s %(dst)s" %
{"src": const.LOG_FILE,
"dst": const.SUPPORT_DIR})
Helper.run_command_on_local("cp -r %(setup_node_dir)s/%(generated_script_dir)s %(dst)s" %
{"setup_node_dir": env.setup_node_dir,
"generated_script_dir": const.GENERATED_SCRIPT_DIR,
"dst": const.SUPPORT_DIR})
for i in range(const.MAX_WORKERS):
t = threading.Thread(target=support_node_setup,
args=(support_node_q,))
t.daemon = True
t.start()
support_node_q.join()
# compress ~/support
Helper.run_command_on_local("cd /tmp; tar -czf support.tar.gz support")
safe_print("Finish collecting logs. logs are at /tmp/support.tar.gz.\n")
return
# in case of verify_only or certificate_only, do not deploy
if (not verify_only) and (not certificate_only):
# Use single thread to setup controller nodes
t = threading.Thread(target=worker_setup_node,
args=(controller_node_q,))
t.daemon = True
t.start()
controller_node_q.join()
# Use multiple threads to setup compute nodes
for i in range(const.MAX_WORKERS):
t = threading.Thread(target=worker_setup_node, args=(node_q,))
t.daemon = True
t.start()
node_q.join()
sorted_time_dict = OrderedDict(sorted(time_dict.items(),
key=lambda x: x[1]))
for fqdn, h_time in sorted_time_dict.items():
safe_print("node: %(fqdn)s, time: %(time).2f\n" %
{'fqdn': fqdn, 'time': h_time})
safe_print("Big Cloud Fabric deployment finished! "
"Check %(log)s on each node for details.\n" %
{'log': const.LOG_FILE})
if certificate_dir or certificate_only:
# certify each node
safe_print("Start to certify virtual switches.\n")
for i in range(const.MAX_WORKERS):
t = threading.Thread(target=certify_node_setup,
args=(certify_node_q,))
t.daemon = True
t.start()
certify_node_q.join()
safe_print('Certifying virtual switches done.\n')
if verify or verify_only:
# verify each node and post results
safe_print("Verifying deployment for all compute nodes.\n")
for i in range(const.MAX_WORKERS):
t = threading.Thread(target=verify_node_setup,
args=(verify_node_q,))
t.daemon = True
t.start()
verify_node_q.join()
# print status
# success nodes
safe_print('Deployed successfully to: \n')
for node_element in node_pass:
safe_print(node_element + '\n')
# failed nodes
safe_print('Deployment to following failed: \n')
for node_element in node_fail:
safe_print(str(node_element) + ' : '
+ str(node_fail[node_element]) + '\n')
def main():
# Parse configuration
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config-file", required=True,
help="BCF YAML configuration file")
parser.add_argument("-m", "--deploy-mode", required=False,
choices=['pfabric', 'pvfabric']),
parser.add_argument('-f', "--fuel-cluster-id", required=False,
help=("Fuel cluster ID. Fuel settings may override "
"YAML configuration. "
"Please refer to config.yaml"))
parser.add_argument('-t', "--tag", required=False,
help="Deploy to tagged nodes only.")
parser.add_argument('--cleanup', action='store_true', default=False,
help="Clean up existing routers, "
"networks and projects.")
parser.add_argument('--skip-ivs-version-check', action='store_true',
default=False, help="Skip ivs version check.")
parser.add_argument('--verify', action='store_true', default=False,
help="Verify service status for compute nodes "
"after deployment.")
parser.add_argument('--verifyonly', action='store_true', default=False,
help=("Verify service status for compute nodes "
"after deployment. Does not deploy BCF "
"specific changes."))
parser.add_argument('--certificate-dir', required=False,
help=("The directory that has the certificates for "
"virtual switches. This option requires certificates "
"to be ready in the directory. This option will deploy "
"certificate to the corresponding node based on the mac "
"address. Virtual switch will talk TLS afterward."))
parser.add_argument('--certificate-only', action='store_true', default=False,
help=("By turning on this flag, bosi will only deploy certificate "
"to each node. It requires --certificate-dir to be specified."))
parser.add_argument('--generate-csr', action='store_true', default=False,
help=("By turning on this flag, bosi will generate csr on behalf of "
"virtual switches. User needs to certify these csr and use "
"--certificate-dir to specify the certificate directory."))
parser.add_argument('--support', action='store_true', default=False,
help=("Collect openstack logs."))
parser.add_argument('--upgrade-dir', required=False,
help=("The directory that has the packages for upgrade."))
args = parser.parse_args()
if args.certificate_only and (not args.certificate_dir):
safe_print("--certificate-only requires the existence of --certificate-dir.\n")
return
with open(args.config_file, 'r') as config_file:
config = yaml.load(config_file)
# bosi is not used for redhat any more since 3.6
rhosp = False
deploy_bcf(config, args.deploy_mode, args.fuel_cluster_id, rhosp,
args.tag, args.cleanup,
args.verify, args.verifyonly,
args.skip_ivs_version_check,
args.certificate_dir, args.certificate_only,
args.generate_csr, args.support,
args.upgrade_dir)
if __name__ == '__main__':
main()
| apache-2.0 | -5,766,354,665,940,184,000 | 39.717439 | 97 | 0.557658 | false |
rstojonic/cheez_cave | cheez_cave/monitor.py | 1 | 4329 | #!/usr/bin/python
# Copyright (c) 2017
# Author: Ray Stojonic
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import ConfigParser
import logging
import logging.config
from apscheduler.schedulers.blocking import BlockingScheduler
import cheez_cave.service.chart_service as chart_service
import cheez_cave.service.display_service as display_service
import cheez_cave.service.humid_service as humid_service
import cheez_cave.service.data_service as data_service
import cheez_cave.service.sensor_service as sensor_service
class Monitor():
def __init__(self):
self.config = ConfigParser.ConfigParser()
config_file = '/home/pi/cheez_cave/cheez_cave.conf'
self.config.read(config_file)
logging.config.fileConfig(self.config.get('AppOptions', 'logging_conf'))
self.logger = logging.getLogger('Monitor')
self.chart = chart_service.ChartService(self.config)
self.display = display_service.DisplayService(self.config)
self.humidifier = humid_service.HumidService(self.config, self.display)
self.dao = data_service.DataService(self.config)
self.sensor = sensor_service.SensorService(self.config)
def persist_reading(self):
''' Get the current sensor reading and persist in database. '''
humidity, temperature = self.read_sensor()
result = self.dao.insert_reading(humidity, temperature)
self.logger.debug('Reading insert attempt: temp : {}, rh : {}, result: {}'
.format(temperature, humidity, result)
)
self.display.update(humidity, temperature)
self.chart.generate_default_chart()
def update_humidifier(self):
''' Get the current humidity and update humidifier control. '''
humidity = self.read_sensor()[0]
self.logger.debug('Updating humidifer, current rh: {}%'.format(humidity))
self.humidifier.update_humidifier(humidity)
def read_sensor(self):
return self.sensor.read_f()
def tick(self):
self.display.update_time()
def main(self):
# Initialize the display with the current sensor reading.
humidity, temperature = self.read_sensor()
self.display.update(humidity, temperature)
# Schedule the jobs.
sched = BlockingScheduler()
# Schedule persist_reading for every 5 minutes.
sched.add_job(self.persist_reading, trigger='cron', minute='*/5')
self.logger.info('Monitor persist_reading job added to schedule')
# Schedule humidifier for every minute, at 30 seconds.
# Initially had at every minute, 0 seconds, but the extra load
# caused the tick job to miss its scheduled time, resulting in a
# blank display.
sched.add_job(self.update_humidifier, trigger='cron', minute='*/1', second=30)
self.logger.info('Monitor update_humidifier job added to schedule')
# Schedule tick for every second.
sched.add_job(self.tick, trigger='cron', second='*')
self.logger.info('Monitor tick job added to schedule')
try:
self.logger.info('Starting jobs')
sched.start()
finally:
self.display.off()
if __name__ == '__main__':
Monitor().main()
| mit | 8,860,405,932,261,304,000 | 40.228571 | 86 | 0.683992 | false |
capecchi/capecchi.github.io | projects/ParkMaps/helpers.py | 1 | 4081 | from bs4 import BeautifulSoup
import geopy.distance as dist
import glob
import numpy as np
import gpxpy
def consecutive_arrays(arr):
consec_arr = []
consec = []
for i, arr_val in enumerate(arr):
if len(consec) == 0 or arr_val == consec[-1] + 1:
consec.append(arr_val)
else:
consec_arr.append(consec)
consec = [arr_val]
if i == len(arr)-1:
consec_arr.append(consec)
return consec_arr
def point_to_point_dist(pt1, pt2):
return dist.distance(pt1[::-1][1:], pt2[::-1][1:]).m
def point_to_route_min_dist(pt, route, return_dist=False):
# find index of closest approach
lat_m_per_deg = dist.distance([pt[1], pt[0]], [pt[1] + 1., pt[0]]).m
lon_m_per_deg = dist.distance([pt[1], pt[0]], [pt[1], pt[0] + 1.]).m
# slow
# dis = [dist.distance(pt[::-1][1:], route[i, ::-1][1:]).m for i in np.arange(len(route))]
# faster
dis = list(
np.sqrt(((pt[0] - route[:, 0]) * lon_m_per_deg) ** 2 + ((pt[1] - route[:, 1]) * lat_m_per_deg) ** 2)) # [m]
if return_dist:
return dis
else:
i_close_approach = dis.index(min(dis))
min_dist = min(dis)
return min_dist, i_close_approach
def index_path_dist_to_ends(index, route, return_both=False):
# get the distance along the path between a route index and the route ends
pt = route[index, :]
lat_m_per_deg = dist.distance([pt[1], pt[0]], [pt[1] + 1., pt[0]]).m
lon_m_per_deg = dist.distance([pt[1], pt[0]], [pt[1], pt[0] + 1.]).m
lon = route[:, 0]
lat = route[:, 1]
dis = np.sqrt(((lon - np.roll(lon, -1)) * lon_m_per_deg) ** 2 + ((lat - np.roll(lat, -1)) * lat_m_per_deg) ** 2) # [m]
dis[0] = 0
if return_both:
return np.sum(dis[:index]), np.sum(dis[index:])
else:
return min([np.sum(dis[:index]), np.sum(dis[index:])])
def chop_off_ends(coords, thresh=75.):
i1, i2 = 0, len(coords[:, 0])-1
try:
while point_to_point_dist(coords[i1, :], coords[0, :]) < thresh:
i1 += 1
while point_to_point_dist(coords[i2, :], coords[-1, :]) < thresh:
i2 -= 1
return coords[i1:i2+1, :] # +1 to include i2 in returned array
except IndexError:
return []
def extract_coords_kml(runfile):
with open(runfile, 'r') as f:
s = BeautifulSoup(f, 'xml')
run_coords = []
for coords in s.find_all('coordinates'):
if len(run_coords) == 0:
run_coords = np.array(process_coordinate_string(coords.string))
else:
run_coords = np.append(run_coords, process_coordinate_string(coords.string))
return run_coords
def extract_coords_gpx(runfile):
with open(runfile, 'r') as f:
gpx = gpxpy.parse(f)
run_coords = []
for track in gpx.tracks:
for segment in track.segments:
for point in segment.points:
run_coords.append([point.longitude, point.latitude, point.elevation])
return np.array(run_coords)
def process_coordinate_string(str):
"""
Take the coordinate string from the KML file, and break it up into [[Lon,Lat,Alt],[Lon,Lat,Alt],...]
"""
long_lat_alt_arr = []
for point in str.split('\n'):
if len(point) > 0:
long_lat_alt_arr.append(
[float(point.split(',')[0]), float(point.split(',')[1]), float(point.split(',')[2])])
return long_lat_alt_arr
def get_dummy_coordinates(top=True):
m_per_deg = dist.distance([0, 0], [0, 1]).m
lon = np.linspace(-2500 / m_per_deg, 2500 / m_per_deg, num=500) # 5k
alt = np.zeros_like(lon)
lat = np.array([np.sqrt(1000 ** 2 - dist.distance([0, 0], [0, lon[i]]).m ** 2) / m_per_deg if dist.distance(
[0, 0], [0, lon[i]]).m <= 1000. else 0. for i in np.arange(len(lon))])
if not top:
lat *= -1.
run_coords = np.zeros((500, 3))
run_coords[:, 0] = lon
run_coords[:, 1] = lat
run_coords[:, 2] = alt
return run_coords
if __name__ == '__main__':
park_dir = 'ElmCreekRuns/'
for file in glob.glob(park_dir + '*.tcx'):
if file.split('.')[-1] == 'kml':
rc = extract_coords_kml(file)
elif file.split('.')[-1] == 'gpx':
rc = extract_coords_gpx(file)
# elif file.split('.')[-1] == 'tcx':
# rc = extract_coords_tcx(file)
print('{} : {}'.format(file, len(rc)))
if __name__ == '__main__':
itest = [1, 2, 3, 4, 6, 7, 10, 11, 12, 13, 25, 26, 28]
consec = consecutive_arrays(itest)
aa=1
| mit | 6,959,912,168,072,804,000 | 28.572464 | 120 | 0.608674 | false |
mozman/ezdxf | src/ezdxf/tools/zipmanager.py | 1 | 2785 | # Copyright (c) 2014-2020, Manfred Moitzi
# License: MIT License
from typing import BinaryIO, cast, TextIO, List, Optional
import zipfile
from contextlib import contextmanager
from ezdxf.lldxf.validator import is_dxf_stream, dxf_info
CRLF = b'\r\n'
LF = b'\n'
class ZipReader:
def __init__(self, zip_archive_name: str, errors='surrogateescape'):
if not zipfile.is_zipfile(zip_archive_name):
raise IOError(f"'{zip_archive_name}' is not a zip archive.")
self.zip_archive_name = zip_archive_name
self.zip_archive: Optional[zipfile.ZipFile] = None
self.dxf_file_name: Optional[str] = None
self.dxf_file: Optional[BinaryIO] = None
self.encoding = 'cp1252'
self.errors = errors
self.dxfversion = 'AC1009'
def open(self, dxf_file_name: str = None) -> None:
def open_dxf_file() -> BinaryIO:
# Open always in binary mode:
return cast(BinaryIO, self.zip_archive.open(self.dxf_file_name))
self.zip_archive = zipfile.ZipFile(self.zip_archive_name)
self.dxf_file_name = dxf_file_name if dxf_file_name is not None \
else self.get_first_dxf_file_name()
self.dxf_file = open_dxf_file()
# Reading with standard encoding 'cp1252' - readline() fails if leading
# comments contain none ASCII characters.
if not is_dxf_stream(cast(TextIO, self)):
raise IOError(f"'{self.dxf_file_name}' is not a DXF file.")
self.dxf_file = open_dxf_file() # restart
self.get_dxf_info()
self.dxf_file = open_dxf_file() # restart
def get_first_dxf_file_name(self) -> str:
dxf_file_names = self.get_dxf_file_names()
if len(dxf_file_names) > 0:
return dxf_file_names[0]
else:
raise IOError("No DXF files found.")
def get_dxf_file_names(self) -> List[str]:
return [name for name in self.zip_archive.namelist()
if name.lower().endswith('.dxf')]
def get_dxf_info(self) -> None:
info = dxf_info(cast(TextIO, self))
# Since DXF R2007 (AC1021) file encoding is always 'utf-8'
self.encoding = info.encoding if info.version < 'AC1021' else 'utf-8'
self.dxfversion = info.version
# Required TextIO interface
def readline(self) -> str:
next_line = self.dxf_file.readline().replace(CRLF, LF)
return str(next_line, self.encoding, self.errors)
def close(self) -> None:
self.zip_archive.close()
@contextmanager
def ctxZipReader(zipfilename: str, filename: str = None,
errors: str = 'surrogateescape') -> ZipReader:
zip_reader = ZipReader(zipfilename, errors=errors)
zip_reader.open(filename)
yield zip_reader
zip_reader.close()
| mit | -9,120,386,809,456,004,000 | 36.133333 | 79 | 0.628725 | false |
membase/membase-cli | util_cli.py | 1 | 2446 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import math
import itertools
BIG_VALUE = 2 ** 60
SMALL_VALUE = - (2 ** 60)
def hostport(hoststring, default_port=8091):
""" finds the host and port given a host:port string """
try:
host, port = hoststring.split(':')
port = int(port)
except ValueError:
host = hoststring
port = default_port
return (host, port)
def time_label(s):
# -(2**64) -> '-inf'
# 2**64 -> 'inf'
# 0 -> '0'
# 4 -> '4us'
# 838384 -> '838ms'
# 8283852 -> '8s'
if s > BIG_VALUE:
return 'inf'
elif s < SMALL_VALUE:
return '-inf'
elif s == 0:
return '0'
product = 1
sizes = (('us', 1), ('ms', 1000), ('s', 1000), ('m', 60))
sizeMap = []
for l,sz in sizes:
product = sz * product
sizeMap.insert(0, (l, product))
lbl, factor = itertools.dropwhile(lambda x: x[1] > s, sizeMap).next()
return "%d %s" % (s / factor, lbl)
def size_label(s):
if type(s) in (int, long, float, complex) :
if s == 0:
return "0"
sizes=['', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB']
e = math.floor(math.log(abs(s), 1024))
suffix = sizes[int(e)]
return "%d %s" % (s/(1024 ** math.floor(e)), suffix)
else:
return s
def linreg(X, Y):
"""
Summary
Linear regression of y = ax + b
Usage
real, real, real = linreg(list, list)
Returns coefficients to the regression line "y=ax+b" from x[] and y[], and R^2 Value
"""
if len(X) != len(Y): raise ValueError, 'unequal length'
N = len(X)
Sx = Sy = Sxx = Syy = Sxy = 0.0
for x, y in map(None, X, Y):
Sx = Sx + x
Sy = Sy + y
Sxx = Sxx + x*x
Syy = Syy + y*y
Sxy = Sxy + x*y
det = Sxx * N - Sx * Sx
if det == 0:
return 0, 0
else:
a, b = (Sxy * N - Sy * Sx)/det, (Sxx * Sy - Sx * Sxy)/det
return a, b
def two_pass_variance(data):
n = 0
sum1 = 0
sum2 = 0
for x in data:
n = n + 1
sum1 = sum1 + x
mean = sum1/n
for x in data:
sum2 = sum2 + (x - mean)*(x - mean)
if n <= 1:
return 0
variance = sum2/(n - 1)
return variance
def pretty_float(number, precision=2):
return '%.*f' % (precision, number)
def pretty_print(obj):
import simplejson as json
return json.dumps(obj, indent=4, sort_keys=True)
| apache-2.0 | 7,375,997,589,813,253,000 | 23.46 | 88 | 0.498365 | false |
obeattie/sqlalchemy | examples/graphs/directed_graph.py | 1 | 2180 | """a directed graph example."""
from sqlalchemy import MetaData, Table, Column, Integer, ForeignKey
from sqlalchemy.orm import mapper, relation, create_session
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
meta = MetaData('sqlite://')
nodes = Table('nodes', meta,
Column("nodeid", Integer, primary_key=True)
)
# here we have lower.nodeid <= higher.nodeid
edges = Table('edges', meta,
Column("lower_id", Integer, ForeignKey('nodes.nodeid'), primary_key=True),
Column("higher_id", Integer, ForeignKey('nodes.nodeid'), primary_key=True)
)
meta.create_all()
class Node(object):
def __init__(self, id):
self.nodeid = id
def add_neighbor(self, othernode):
Edge(self, othernode)
def higher_neighbors(self):
return [x.higher_node for x in self.lower_edges]
def lower_neighbors(self):
return [x.lower_node for x in self.higher_edges]
class Edge(object):
def __init__(self, n1, n2):
if n1.nodeid < n2.nodeid:
self.lower_node = n1
self.higher_node = n2
else:
self.lower_node = n2
self.higher_node = n1
mapper(Node, nodes)
mapper(Edge, edges, properties={
'lower_node':relation(Node,
primaryjoin=edges.c.lower_id==nodes.c.nodeid, backref='lower_edges'),
'higher_node':relation(Node,
primaryjoin=edges.c.higher_id==nodes.c.nodeid, backref='higher_edges')
}
)
session = create_session()
# create a directed graph like this:
# n1 -> n2 -> n5
# -> n7
# -> n3 -> n6
n1 = Node(1)
n2 = Node(2)
n3 = Node(3)
n4 = Node(4)
n5 = Node(5)
n6 = Node(6)
n7 = Node(7)
n2.add_neighbor(n5)
n3.add_neighbor(n6)
n7.add_neighbor(n2)
n1.add_neighbor(n3)
n2.add_neighbor(n1)
[session.add(x) for x in [n1, n2, n3, n4, n5, n6, n7]]
session.flush()
session.expunge_all()
n2 = session.query(Node).get(2)
n3 = session.query(Node).get(3)
assert [x.nodeid for x in n3.higher_neighbors()] == [6]
assert [x.nodeid for x in n3.lower_neighbors()] == [1]
assert [x.nodeid for x in n2.lower_neighbors()] == [1]
assert [x.nodeid for x in n2.higher_neighbors()] == [5,7]
| mit | -4,302,172,050,639,920,600 | 24.348837 | 78 | 0.640367 | false |
jbohren-forks/catkin_tools | catkin_tools/verbs/catkin_list/cli.py | 1 | 3698 | # Copyright 2014 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
from catkin_tools.argument_parsing import add_context_args
from catkin_tools.context import Context
from catkin_pkg.packages import find_packages
from catkin_pkg.package import InvalidPackage
from catkin_tools.terminal_color import ColorMapper
color_mapper = ColorMapper()
clr = color_mapper.clr
def prepare_arguments(parser):
add_context_args(parser)
add = parser.add_argument
# What packages to build
add('folders', nargs='*',
help='Folders in which to find packages. (default: workspace source space)')
add('--deps', '--dependencies', default=False, action='store_true',
help="List dependencies of each package.")
add('--depends-on', nargs='*',
help="List all packages that depend on supplied argument package(s).")
add('--quiet', default=False, action='store_true',
help="Don't print out detected package warnings.")
add('--unformatted', '-u', default=None, action='store_true',
help='Print list without punctuation and additional details.')
return parser
def main(opts):
if opts.folders:
folders = opts.folders
else:
# Load the context
ctx = Context.load(opts.workspace, opts.profile, load_env=False)
if not ctx:
print(clr("@{rf}ERROR: Could not determine workspace.@|"), file=sys.stderr)
sys.exit(1)
folders = [ctx.source_space_abs]
list_entry_format = '@{pf}-@| @{cf}%s@|' if not opts.unformatted else '%s'
opts.depends_on = set(opts.depends_on) if opts.depends_on else set()
warnings = []
try:
for folder in folders:
for pkg_pth, pkg in find_packages(folder, warnings=warnings).items():
build_depend_names = [d.name for d in pkg.build_depends]
is_build_dep = opts.depends_on.intersection(
build_depend_names)
run_depend_names = [d.name for d in pkg.run_depends]
is_run_dep = opts.depends_on.intersection(
run_depend_names)
if not opts.depends_on or is_build_dep or is_run_dep:
print(clr(list_entry_format % pkg.name))
if opts.deps:
if build_depend_names:
print(clr(' @{yf}build_depend:@|'))
for dep in build_depend_names:
print(clr(' @{pf}-@| %s' % dep))
if run_depend_names:
print(clr(' @{yf}run_depend:@|'))
for dep in run_depend_names:
print(clr(' @{pf}-@| %s' % dep))
except InvalidPackage as ex:
message = '\n'.join(ex.args)
print(clr("@{rf}Error:@| The directory %s contains an invalid package."
" See below for details:\n\n%s" % (folder, message)))
# Print out warnings
if not opts.quiet:
for warning in warnings:
print(clr("@{yf}Warning:@| %s" % warning), file=sys.stderr)
| apache-2.0 | 8,735,294,792,840,117,000 | 36.353535 | 87 | 0.604922 | false |
davidam/python-examples | basics/json/json2pandas.py | 1 | 1436 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2018 David Arroyo Menéndez
# Author: David Arroyo Menéndez <[email protected]>
# Maintainer: David Arroyo Menéndez <[email protected]>
# This file is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with GNU Emacs; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301 USA,
#!/usr/bin/python
# -*- coding: utf-8 -*-
import json
from pprint import pprint
import pandas as pd
from pandas.io.json import json_normalize
data = [
{
'name': {
'first': 'vikash',
'last': 'singh'
},
'age': 27
},
{
'name': {
'first': 'satyam',
'last': 'singh'
},
'age': 14
}
]
df = pd.DataFrame.from_dict(json_normalize(data), orient='columns')
print(df)
print(json_normalize(data))
jsondata = open('perceval.json').read()
json_object = json.loads(jsondata)
print(json_object)
| gpl-3.0 | 6,866,459,863,526,685,000 | 26.037736 | 70 | 0.691556 | false |
mckayward/floyd-cli | floyd/client/dataset.py | 1 | 1802 | import sys
from floyd.manager.auth_config import AuthConfigManager
from floyd.client.base import FloydHttpClient
from floyd.exceptions import (
FloydException, AuthenticationException, NotFoundException
)
from floyd.model.dataset import Dataset
from floyd.log import logger as floyd_logger
from floyd.manager.data_config import DataConfigManager
class DatasetClient(FloydHttpClient):
"""
Client to get datasets from the server
"""
def __init__(self):
self.url = "/datasets"
super(DatasetClient, self).__init__()
def get_datasets(self):
try:
response = self.request("GET", self.url)
datasets_dict = response.json()
return [Dataset.from_dict(dataset) for dataset in datasets_dict.get("datasets", [])]
except FloydException as e:
if isinstance(e, AuthenticationException):
# exit now since there is nothing we can do without login
sys.exit(1)
floyd_logger.info("Error while retrieving datasets: {}".format(e.message))
return []
def get_by_name(self, name, username=None):
username = username or AuthConfigManager.get_access_token().username
try:
response = self.request('GET', '%s/%s/%s' % (self.url, username, name))
return Dataset.from_dict(response.json())
except NotFoundException:
return None
def add_data(self, source):
data_config = DataConfigManager.get_config()
dataset_id = data_config.family_id
if not dataset_id:
sys.exit('Please initialize current directory with \'floyd data init DATASET_NAME\' first.')
re = self.request('POST', '%s/%s' % (self.url, dataset_id), json={'source': source})
return re.json()
| apache-2.0 | -508,118,536,616,188,600 | 38.173913 | 104 | 0.643729 | false |
rhyolight/nupic.son | tests/app/melange/views/test_connection.py | 1 | 69883 | # Copyright 2014 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for connection related views."""
import mock
import httplib
import unittest
from django import http
from melange.logic import connection as connection_logic
from melange.logic import profile as profile_logic
from melange.models import connection as connection_model
from melange.request import exception
from melange.views import connection as connection_view
from melange.utils import rich_bool
from soc.views.helper import request_data
# TODO(daniel): Summer Of code module cannot be imported here
from soc.modules.gsoc.logic import profile as soc_profile_logic
from tests import org_utils
from tests import profile_utils
from tests import program_utils
from tests.utils import connection_utils
class UrlConnectionIsForCurrentUserAccessCheckerTest(unittest.TestCase):
"""Unit tests for UrlConnectionIsForCurrentUserAccessChecker class."""
def setUp(self):
"""See unittest.TestCase.setUp for specification."""
sponsor = program_utils.seedSponsor()
self.program = program_utils.seedProgram(sponsor_key=sponsor.key())
self.organization = org_utils.seedOrganization(self.program.key())
self.user = profile_utils.seedNDBUser()
profile = profile_utils.seedNDBProfile(
self.program.key(), user=self.user)
connection = connection_utils.seed_new_connection(
profile.key, self.organization.key)
kwargs = {
'sponsor': sponsor.key().name(),
'program': self.program.program_id,
'organization': self.organization.org_id,
'user': self.user.user_id,
'id': str(connection.key.id())
}
self.data = request_data.RequestData(None, None, kwargs)
def testConnectedUserAccessGranted(self):
"""Tests that access is granted for the connected user."""
profile_utils.loginNDB(self.user)
access_checker = (
connection_view.UrlConnectionIsForCurrentUserAccessChecker())
access_checker.checkAccess(self.data, None)
def testAnotherUserAccessDenied(self):
"""Tests that another (not connected) user is denied access."""
# seed another user who is currently logged in
other_user = profile_utils.seedNDBUser()
profile_utils.loginNDB(other_user)
access_checker = (
connection_view.UrlConnectionIsForCurrentUserAccessChecker())
with self.assertRaises(exception.UserError) as context:
access_checker.checkAccess(self.data, None)
self.assertEqual(context.exception.status, httplib.FORBIDDEN)
def testUserWithNoProfileAccessDenied(self):
"""Tests that access for a user with no profile is denied."""
# check for not logged-in user with no profile
profile_utils.logout()
access_checker = (
connection_view.UrlConnectionIsForCurrentUserAccessChecker())
with self.assertRaises(exception.UserError) as context:
access_checker.checkAccess(self.data, None)
self.assertEqual(context.exception.status, httplib.FORBIDDEN)
# check for another user who is currently logged in but
# does not have a profile
other_user = profile_utils.seedNDBUser()
profile_utils.loginNDB(other_user)
access_checker = (
connection_view.UrlConnectionIsForCurrentUserAccessChecker())
with self.assertRaises(exception.UserError) as context:
access_checker.checkAccess(self.data, None)
self.assertEqual(context.exception.status, httplib.FORBIDDEN)
def testOrgAdminAccessDenied(self):
"""Tests that org admin for connected organization is denied access."""
# seed another user who is currently logged in
other_user = profile_utils.seedNDBUser()
profile_utils.loginNDB(other_user)
profile_utils.seedNDBProfile(
self.program.key(), user=other_user, admin_for=[self.organization.key])
access_checker = (
connection_view.UrlConnectionIsForCurrentUserAccessChecker())
with self.assertRaises(exception.UserError) as context:
access_checker.checkAccess(self.data, None)
self.assertEqual(context.exception.status, httplib.FORBIDDEN)
class IsUserOrgAdminForUrlConnectionTest(unittest.TestCase):
"""Unit tests for IsUserOrgAdminForUrlConnection class."""
def setUp(self):
"""See unittest.TestCase.setUp for specification."""
sponsor = program_utils.seedSponsor()
self.program = program_utils.seedProgram(sponsor_key=sponsor.key())
self.organization = org_utils.seedOrganization(self.program.key())
self.user = profile_utils.seedNDBUser()
profile = profile_utils.seedNDBProfile(
self.program.key(), user=self.user)
connection = connection_utils.seed_new_connection(
profile.key, self.organization.key)
kwargs = {
'sponsor': sponsor.key().name(),
'program': self.program.program_id,
'organization': self.organization.org_id,
'user': self.user.user_id,
'id': str(connection.key.id())
}
self.data = request_data.RequestData(None, None, kwargs)
def testOrgAdminAccessGranted(self):
"""Tests that access is granted for org admin for the connected org."""
# seed a user who is currently logged in
other_user = profile_utils.seedNDBUser()
profile_utils.loginNDB(other_user)
profile_utils.seedNDBProfile(
self.program.key(), user=other_user, admin_for=[self.organization.key])
access_checker = connection_view.IsUserOrgAdminForUrlConnection()
access_checker.checkAccess(self.data, None)
def testConnectedUserAccessDenied(self):
"""Tests that access is denied for connected user."""
profile_utils.loginNDB(self.user)
access_checker = connection_view.IsUserOrgAdminForUrlConnection()
with self.assertRaises(exception.UserError) as context:
access_checker.checkAccess(self.data, None)
self.assertEqual(context.exception.status, httplib.FORBIDDEN)
def testOtherOrgAdminAccessDenied(self):
"""Tests that access is denied for org admin for another org."""
# seed another organization
other_org = org_utils.seedOrganization(self.program.key())
# seed a user who is currently logged in
other_user = profile_utils.seedNDBUser()
profile_utils.loginNDB(other_user)
profile_utils.seedNDBProfile(
self.program.key(), user=other_user, admin_for=[other_org.key])
access_checker = connection_view.IsUserOrgAdminForUrlConnection()
with self.assertRaises(exception.UserError) as context:
access_checker.checkAccess(self.data, None)
self.assertEqual(context.exception.status, httplib.FORBIDDEN)
class _MockView(object):
"""Simple request handler to be used as a callback for other handlers."""
def get(self, data, access, mutator):
"""See base.RequestHandler.get for specification."""
pass
class UserActionsFormHandlerTest(unittest.TestCase):
"""Unit tests for UserActionsFormHandler class."""
def setUp(self):
"""See unittest.TestCase.setUp for specification."""
self.sponsor = program_utils.seedSponsor()
self.program = program_utils.seedProgram(sponsor_key=self.sponsor.key())
self.org = org_utils.seedOrganization(self.program.key())
# unused object used as a callback for the handler
self.view = _MockView()
def testUserNoRoleToNoRoleWhileNoRoleOffered(self):
"""Tests NO ROLE if user has no role and no role is offered."""
profile = profile_utils.seedNDBProfile(self.program.key())
# no role is offered to the user; the user does not request any role
connection = connection_utils.seed_new_connection(profile.key, self.org.key)
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user still does not request any role
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.NO_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testUserNoRoleToNoRoleWhileMentorRoleOffered(self):
"""Tests NO ROLE if user has no role and mentor role is offered."""
profile = profile_utils.seedNDBProfile(self.program.key())
# mentor role is offered to the user; the user does not request any role
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, org_role=connection_model.MENTOR_ROLE)
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user still does not request any role
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testUserNoRoleToNoRoleWhileOrgAdminRoleOffered(self):
"""Tests NO ROLE if user has no role and org admin role is offered."""
profile = profile_utils.seedNDBProfile(self.program.key())
# org admin role is offered to the user; the user does not request any role
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, org_role=connection_model.ORG_ADMIN_ROLE)
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user still does not request any role
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testUserNoRoleToRoleWhileNoRoleOffered(self):
"""Tests ROLE if user has no role and no role is offered."""
profile = profile_utils.seedNDBProfile(self.program.key())
# no role is offered to the user; the user does not request any role
connection = connection_utils.seed_new_connection(profile.key, self.org.key)
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user requests a role now
request.POST = {'role': connection_model.ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.NO_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertTrue(connection.seen_by_user)
self.assertFalse(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(connection_logic._USER_REQUESTS_ROLE, message.content)
def testUserNoRoleToRoleWhileMentorRoleOffered(self):
"""Tests ROLE if user has no role and mentor role is offered."""
profile = profile_utils.seedNDBProfile(self.program.key())
# mentor role is offered to the user; the user does not request any role
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, org_role=connection_model.MENTOR_ROLE)
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user requests a role now
request.POST = {'role': connection_model.ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertTrue(connection.seen_by_user)
self.assertFalse(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(connection_logic._USER_REQUESTS_ROLE, message.content)
def testUserNoRoleToRoleWhileOrgAdminRoleOffered(self):
"""Tests ROLE if user has no role and org admin role is offered."""
profile = profile_utils.seedNDBProfile(self.program.key())
# org admin role is offered to the user; the user does not request any role
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, org_role=connection_model.ORG_ADMIN_ROLE)
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user requests a role now
request.POST = {'role': connection_model.ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertTrue(connection.seen_by_user)
self.assertFalse(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(connection_logic._USER_REQUESTS_ROLE, message.content)
def testUserRoleToRoleWhileNoRoleOffered(self):
"""Tests ROLE if user has role and no role is offered."""
profile = profile_utils.seedNDBProfile(self.program.key())
# no role is offered to the user; the user requests role
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, user_role=connection_model.ROLE)
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user still requests a role
request.POST = {'role': connection_model.ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.NO_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testUserRoleToRoleWhileMentorRoleOffered(self):
"""Tests ROLE if user has role and mentor role is offered."""
# mentor role is offered to the user; the user requests role
profile = profile_utils.seedNDBProfile(
self.program.key(), mentor_for=[self.org.key])
connection = connection_model.Connection.query(
connection_model.Connection.organization == self.org.key,
ancestor=profile.key).get()
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user still requests a role
request.POST = {'role': connection_model.ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testUserRoleToRoleWhileOrgAdminRoleOffered(self):
"""Tests ROLE if user has role and org admin role is offered."""
# org admin role is offered to the user; the user requests role
profile = profile_utils.seedNDBProfile(
self.program.key(), admin_for=[self.org.key])
connection = connection_model.Connection.query(
connection_model.Connection.organization == self.org.key,
ancestor=profile.key).get()
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user still requests a role
request.POST = {'role': connection_model.ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testUserRoleToNoRoleWhileNoRoleOffered(self):
"""Tests NO ROLE if user has role and no role is offered."""
profile = profile_utils.seedNDBProfile(self.program.key())
# no role is offered to the user; the user requests role
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, user_role=connection_model.ROLE)
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user does not request role anymore
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.NO_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertTrue(connection.seen_by_user)
self.assertFalse(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(
connection_logic._USER_DOES_NOT_REQUEST_ROLE, message.content)
def testUserRoleToNoRoleWhileMentorRoleOffered(self):
"""Tests NO ROLE if user has role and mentor role is offered."""
# mentor role is offered to the user; the user requests role
profile = profile_utils.seedNDBProfile(
self.program.key(), mentor_for=[self.org.key])
connection = connection_model.Connection.query(
connection_model.Connection.organization == self.org.key,
ancestor=profile.key).get()
old_seen_by_user = connection.seen_by_user
old_seen_by_org = connection.seen_by_org
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id()),
}
request = http.HttpRequest()
# the user does not request role anymore
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
# assume that mentor is not eligible to quit
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
with mock.patch.object(
soc_profile_logic, 'isNoRoleEligibleForOrg', return_value=rich_bool.FALSE):
with self.assertRaises(exception.UserError) as context:
handler.handle(data, None, None)
self.assertEqual(context.exception.status, httplib.BAD_REQUEST)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
# try again but now, the user is eligible to quit
request = http.HttpRequest()
# the user does not request role anymore
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
# assume that mentor is eligible to quit
with mock.patch.object(
soc_profile_logic, 'isNoRoleEligibleForOrg', return_value=rich_bool.TRUE):
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertTrue(connection.seen_by_user)
self.assertFalse(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(
connection_logic._USER_DOES_NOT_REQUEST_ROLE, message.content)
def testUserRoleToNoRoleWhileOrgAdminRoleOffered(self):
"""Tests NO ROLE if user has role and org admin role is offered."""
# org admin role is offered to the user; the user requests role
profile = profile_utils.seedNDBProfile(
self.program.key(), admin_for=[self.org.key])
connection = connection_model.Connection.query(
connection_model.Connection.organization == self.org.key,
ancestor=profile.key).get()
old_seen_by_user = connection.seen_by_user
old_seen_by_org = connection.seen_by_org
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': str(connection.key.id())
}
request = http.HttpRequest()
# the user does not request role anymore
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
# assume that mentor is not eligible to quit
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
with mock.patch.object(
soc_profile_logic, 'isNoRoleEligibleForOrg', return_value=rich_bool.FALSE):
with self.assertRaises(exception.UserError) as context:
handler.handle(data, None, None)
self.assertEqual(context.exception.status, httplib.BAD_REQUEST)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
# try again but now, the user is eligible to quit
request = http.HttpRequest()
# the user does not request role anymore
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.UserActionsFormHandler(self.view, url='unsed')
# assume that mentor is eligible to quit
with mock.patch.object(
soc_profile_logic, 'isNoRoleEligibleForOrg', return_value=rich_bool.TRUE):
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertTrue(connection.seen_by_user)
self.assertFalse(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(connection_logic._USER_DOES_NOT_REQUEST_ROLE, message.content)
def _generatedMessageContent(org_role, org_admin):
"""Returns part of content of a message that is generated when role offered
by organization changes.
Args:
org_role: new role offered by organization.
org_admin: profile entity of org admin who changed the role
Returns:
a string that is a part of message content that is generated.
"""
return connection_logic._ORG_ROLE_CHANGED % (
connection_model.VERBOSE_ROLE_NAMES[org_role], org_admin.public_name)
class OrgActionsFormHandlerTest(unittest.TestCase):
"""Unit tests for OrgActionsFormHandler class."""
def setUp(self):
"""See unittest.TestCase.setUp for specification."""
self.sponsor = program_utils.seedSponsor()
self.program = program_utils.seedProgram(sponsor_key=self.sponsor.key())
self.org = org_utils.seedOrganization(self.program.key())
# unused object used as a callback for the handler
self.view = _MockView()
def testNoRoleToNoRoleWhileNoRoleRequested(self):
"""Tests NO ROLE if no role offered and user requests no role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user does not request any role from organization
connection = connection_utils.seed_new_connection(profile.key, self.org.key)
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# no role is still offered
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.NO_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testNoRoleToNoRoleWhileRoleRequested(self):
"""Tests NO ROLE if no role offered and user requests role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user requests role from organization
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, user_role=connection_model.ROLE)
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# no role is still offered
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.NO_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testNoRoleToMentorRoleWhileNoRoleRequested(self):
"""Tests MENTOR ROLE if no role offered and user requests no role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user does not request any role from organization
connection = connection_utils.seed_new_connection(profile.key, self.org.key)
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# mentor role is offered now
request.POST = {'role': connection_model.MENTOR_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.MENTOR_ROLE, data.ndb_profile), message.content)
def testNoRoleToMentorRoleWhileRoleRequested(self):
"""Tests MENTOR ROLE if no role offered and user requests role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user requests role from organization
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, user_role=connection_model.ROLE)
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# mentor role is offered now
request.POST = {'role': connection_model.MENTOR_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.MENTOR_ROLE, data.ndb_profile), message.content)
def testNoRoleToOrgAdminRoleWhileNoRoleRequested(self):
"""Tests ORG ADMIN ROLE if no role offered and user requests no role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user does not request any role from organization
connection = connection_utils.seed_new_connection(profile.key, self.org.key)
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# org admin role is offered now
request.POST = {'role': connection_model.ORG_ADMIN_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.ORG_ADMIN_ROLE, data.ndb_profile), message.content)
def testNoRoleToOrgAdminRoleWhileRoleRequested(self):
"""Tests ORG ADMIN ROLE if no role offered and user requests role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user requests role from organization
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, user_role=connection_model.ROLE)
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# org admin role is offered now
request.POST = {'role': connection_model.ORG_ADMIN_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.ORG_ADMIN_ROLE, data.ndb_profile), message.content)
def testMentorRoleToNoRoleWhileNoRoleRequested(self):
"""Tests NO ROLE if mentor role offered and user requests no role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user does not request any role from organization
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, org_role=connection_model.MENTOR_ROLE)
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# no role is offered now
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.NO_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.NO_ROLE, data.ndb_profile), message.content)
def testMentorRoleToNoRoleWhileRoleRequested(self):
"""Tests NO ROLE if mentor role offered and user requests role."""
# user is a mentor for organization
profile = profile_utils.seedNDBProfile(
self.program.key(), mentor_for=[self.org.key])
connection = connection_model.Connection.query(
connection_model.Connection.organization == self.org.key,
ancestor=profile.key).get()
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# no role is offered now
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# assume that mentor cannot be removed
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
with mock.patch.object(
soc_profile_logic, 'isNoRoleEligibleForOrg', return_value=rich_bool.FALSE):
with self.assertRaises(exception.UserError) as context:
handler.handle(data, None, None)
self.assertEqual(context.exception.status, httplib.BAD_REQUEST)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
# now the mentor can be removed
request = http.HttpRequest()
# no role is offered now
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
# assume that mentor can be removed
with mock.patch.object(
soc_profile_logic, 'isNoRoleEligibleForOrg', return_value=rich_bool.TRUE):
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.NO_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.NO_ROLE, data.ndb_profile), message.content)
def testMentorRoleToMentorRoleWhileNoRoleRequested(self):
"""Tests MENTOR ROLE if mentor role offered and user requests no role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user does not request any role from organization
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, org_role=connection_model.MENTOR_ROLE)
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# mentor role is offered now
request.POST = {'role': connection_model.MENTOR_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testMentorRoleToMentorRoleWhileRoleRequested(self):
"""Tests MENTOR ROLE if mentor role offered and user requests role."""
# user is a mentor for organization
profile = profile_utils.seedNDBProfile(
self.program.key(), mentor_for=[self.org.key])
connection = connection_model.Connection.query(
connection_model.Connection.organization == self.org.key,
ancestor=profile.key).get()
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# mentor role is offered now
request.POST = {'role': connection_model.MENTOR_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testMentorRoleToOrgAdminRoleWhileNoRoleRequested(self):
"""Tests ORG ADMIN if mentor role offered and user requests no role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user does not request any role from organization
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, org_role=connection_model.MENTOR_ROLE)
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# org admin role is offered now
request.POST = {'role': connection_model.ORG_ADMIN_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.ORG_ADMIN_ROLE, data.ndb_profile), message.content)
def testMentorRoleToOrgAdminRoleWhileRoleRequested(self):
"""Tests ORG ADMIN if mentor role offered and user requests role."""
# user is a mentor for organization
profile = profile_utils.seedNDBProfile(
self.program.key(), mentor_for=[self.org.key])
connection = connection_model.Connection.query(
connection_model.Connection.organization == self.org.key,
ancestor=profile.key).get()
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# org admin role is offered now
request.POST = {'role': connection_model.ORG_ADMIN_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.ORG_ADMIN_ROLE, data.ndb_profile), message.content)
def testOrgAdminRoleToNoRoleWhileNoRoleRequested(self):
"""Tests NO ROLE if org admin offered and user requests no role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user does not request any role from organization
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, org_role=connection_model.ORG_ADMIN_ROLE)
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# no role is offered now
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.NO_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.NO_ROLE, data.ndb_profile), message.content)
def testOrgAdminRoleToNoRoleWhileRoleRequested(self):
"""Tests NO ROLE if org admin offered and user requests role."""
# user is an org admin for organization
profile = profile_utils.seedNDBProfile(
self.program.key(), admin_for=[self.org.key])
connection = connection_model.Connection.query(
connection_model.Connection.organization == self.org.key,
ancestor=profile.key).get()
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# no role is offered now
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
# assume that org admin cannot be removed
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
with mock.patch.object(
soc_profile_logic, 'isNoRoleEligibleForOrg', return_value=rich_bool.FALSE):
with self.assertRaises(exception.UserError) as context:
handler.handle(data, None, None)
self.assertEqual(context.exception.status, httplib.BAD_REQUEST)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
# now the mentor can be removed
request = http.HttpRequest()
# no role is offered now
request.POST = {'role': connection_model.NO_ROLE}
data = request_data.RequestData(request, None, kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
# assume that org admin can be removed
with mock.patch.object(
soc_profile_logic, 'isNoRoleEligibleForOrg', return_value=rich_bool.TRUE):
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.NO_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.NO_ROLE, data.ndb_profile), message.content)
def testOrgAdminRoleToMentorRoleWhileNoRoleRequested(self):
"""Tests MENTOR ROLE if org admin offered and user requests no role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user does not request any role from organization
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, org_role=connection_model.ORG_ADMIN_ROLE)
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# mentor role is offered now
request.POST = {'role': connection_model.MENTOR_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.MENTOR_ROLE, data.ndb_profile), message.content)
def testOrgAdminRoleToMentorRoleWhileRoleRequested(self):
"""Tests MENTOR ROLE if org admin offered and user requests role."""
# user is an org admin for organization
profile = profile_utils.seedNDBProfile(
self.program.key(), admin_for=[self.org.key])
connection = connection_model.Connection.query(
connection_model.Connection.organization == self.org.key,
ancestor=profile.key).get()
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# mentor role is offered now
request.POST = {'role': connection_model.MENTOR_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# assume that org admin cannot be removed
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
with mock.patch.object(
profile_logic, 'isMentorRoleEligibleForOrg',
return_value=rich_bool.FALSE):
with self.assertRaises(exception.UserError) as context:
handler.handle(data, None, None)
self.assertEqual(context.exception.status, httplib.BAD_REQUEST)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
# now the org admin can be removed
request = http.HttpRequest()
# mentor role is offered now
request.POST = {'role': connection_model.MENTOR_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
# seed an organization administrator who makes the decision
user = profile_utils.seedNDBUser()
profile_utils.loginNDB(user)
profile_utils.seedNDBProfile(
self.program.key(), user=user, admin_for=[self.org.key])
# assume that org admin can be removed
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
with mock.patch.object(
profile_logic, 'isMentorRoleEligibleForOrg',
return_value=rich_bool.TRUE):
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# connection changed, so seen by properties are changed
self.assertFalse(connection.seen_by_user)
self.assertTrue(connection.seen_by_org)
# check that a connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIn(_generatedMessageContent(
connection_model.MENTOR_ROLE, data.ndb_profile), message.content)
def testOrgAdminRoleToOrgAdminRoleWhileNoRoleRequested(self):
"""Tests ORG ADMIN if org admin offered and user requests no role."""
profile = profile_utils.seedNDBProfile(self.program.key())
# user does not request any role from organization
connection = connection_utils.seed_new_connection(
profile.key, self.org.key, org_role=connection_model.ORG_ADMIN_ROLE)
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# org admin role is offered now
request.POST = {'role': connection_model.ORG_ADMIN_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.NO_ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertNotIn(self.org.key, profile.admin_for)
self.assertNotIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
def testOrgAdminRoleToOrgAdminRoleWhileRoleRequested(self):
"""Tests ORG ADMIN if org admin offered and user requests role."""
# user is a org admin for organization
profile = profile_utils.seedNDBProfile(
self.program.key(), admin_for=[self.org.key])
connection = connection_model.Connection.query(
connection_model.Connection.organization == self.org.key,
ancestor=profile.key).get()
old_seen_by_org = connection.seen_by_org
old_seen_by_user = connection.seen_by_user
self.kwargs = {
'sponsor': self.sponsor.link_id,
'program': self.program.program_id,
'user': profile.profile_id,
'id': connection.key.id()
}
request = http.HttpRequest()
# org admin role is offered now
request.POST = {'role': connection_model.ORG_ADMIN_ROLE}
data = request_data.RequestData(request, None, self.kwargs)
handler = connection_view.OrgActionsFormHandler(self.view, url='unused')
handler.handle(data, None, None)
# check if all data is updated properly
connection = connection.key.get()
profile = profile.key.get()
self.assertEqual(connection.user_role, connection_model.ROLE)
self.assertEqual(connection.org_role, connection_model.ORG_ADMIN_ROLE)
self.assertIn(self.org.key, profile.admin_for)
self.assertIn(self.org.key, profile.mentor_for)
# nothing has changed, so seen by properties are not changed
self.assertEqual(connection.seen_by_user, old_seen_by_user)
self.assertEqual(connection.seen_by_org, old_seen_by_org)
# check that no connection message is created
query = connection_model.ConnectionMessage.query(ancestor=connection.key)
message = query.get()
self.assertIsNone(message)
| apache-2.0 | -1,343,540,183,369,436,200 | 39.232009 | 83 | 0.711332 | false |
stefanseefeld/numba | numba/tests/test_cffi.py | 1 | 4966 | from __future__ import print_function, division, absolute_import
import array
import numpy as np
import sys
from numba import unittest_support as unittest
from numba import jit, cffi_support, types, errors
from numba.compiler import compile_isolated, Flags
from numba.tests.support import TestCase, tag
import numba.tests.cffi_usecases as mod
enable_pyobj_flags = Flags()
enable_pyobj_flags.set("enable_pyobject")
no_pyobj_flags = Flags()
@unittest.skipUnless(cffi_support.SUPPORTED,
"CFFI not supported -- please install the cffi module")
class TestCFFI(TestCase):
# Need to run the tests serially because of race conditions in
# cffi's OOL mode.
_numba_parallel_test_ = False
def setUp(self):
mod.init()
mod.init_ool()
def test_type_map(self):
signature = cffi_support.map_type(mod.ffi.typeof(mod.cffi_sin))
self.assertEqual(len(signature.args), 1)
self.assertEqual(signature.args[0], types.double)
def _test_function(self, pyfunc, flags=enable_pyobj_flags):
cres = compile_isolated(pyfunc, [types.double], flags=flags)
cfunc = cres.entry_point
for x in [-1.2, -1, 0, 0.1, 3.14]:
self.assertPreciseEqual(pyfunc(x), cfunc(x))
def test_sin_function(self):
self._test_function(mod.use_cffi_sin)
@tag('important')
def test_sin_function_npm(self):
self._test_function(mod.use_cffi_sin, flags=no_pyobj_flags)
def test_sin_function_ool(self, flags=enable_pyobj_flags):
self._test_function(mod.use_cffi_sin_ool)
def test_sin_function_npm_ool(self):
self._test_function(mod.use_cffi_sin_ool, flags=no_pyobj_flags)
def test_two_funcs(self):
# Check that two constant functions don't get mixed up.
self._test_function(mod.use_two_funcs)
def test_two_funcs_ool(self):
self._test_function(mod.use_two_funcs_ool)
def test_function_pointer(self):
pyfunc = mod.use_func_pointer
cfunc = jit(nopython=True)(pyfunc)
for (fa, fb, x) in [
(mod.cffi_sin, mod.cffi_cos, 1.0),
(mod.cffi_sin, mod.cffi_cos, -1.0),
(mod.cffi_cos, mod.cffi_sin, 1.0),
(mod.cffi_cos, mod.cffi_sin, -1.0),
(mod.cffi_sin_ool, mod.cffi_cos_ool, 1.0),
(mod.cffi_sin_ool, mod.cffi_cos_ool, -1.0),
(mod.cffi_cos_ool, mod.cffi_sin_ool, 1.0),
(mod.cffi_cos_ool, mod.cffi_sin_ool, -1.0),
(mod.cffi_sin, mod.cffi_cos_ool, 1.0),
(mod.cffi_sin, mod.cffi_cos_ool, -1.0),
(mod.cffi_cos, mod.cffi_sin_ool, 1.0),
(mod.cffi_cos, mod.cffi_sin_ool, -1.0)]:
expected = pyfunc(fa, fb, x)
got = cfunc(fa, fb, x)
self.assertEqual(got, expected)
# A single specialization was compiled for all calls
self.assertEqual(len(cfunc.overloads), 1, cfunc.overloads)
def test_user_defined_symbols(self):
pyfunc = mod.use_user_defined_symbols
cfunc = jit(nopython=True)(pyfunc)
self.assertEqual(pyfunc(), cfunc())
def check_vector_sin(self, cfunc, x, y):
cfunc(x, y)
np.testing.assert_allclose(y, np.sin(x))
def _test_from_buffer_numpy_array(self, pyfunc, dtype):
x = np.arange(10).astype(dtype)
y = np.zeros_like(x)
cfunc = jit(nopython=True)(pyfunc)
self.check_vector_sin(cfunc, x, y)
@tag('important')
def test_from_buffer_float32(self):
self._test_from_buffer_numpy_array(mod.vector_sin_float32, np.float32)
def test_from_buffer_float64(self):
self._test_from_buffer_numpy_array(mod.vector_sin_float64, np.float64)
def test_from_buffer_struct(self):
n = 10
x = np.arange(n) + np.arange(n * 2, n * 3) * 1j
y = np.zeros(n)
real_cfunc = jit(nopython=True)(mod.vector_extract_real)
real_cfunc(x, y)
np.testing.assert_equal(x.real, y)
imag_cfunc = jit(nopython=True)(mod.vector_extract_imag)
imag_cfunc(x, y)
np.testing.assert_equal(x.imag, y)
@unittest.skipIf(sys.version_info < (3,),
"buffer protocol on array.array needs Python 3+")
def test_from_buffer_pyarray(self):
pyfunc = mod.vector_sin_float32
cfunc = jit(nopython=True)(pyfunc)
x = array.array("f", range(10))
y = array.array("f", [0] * len(x))
self.check_vector_sin(cfunc, x, y)
def test_from_buffer_error(self):
pyfunc = mod.vector_sin_float32
cfunc = jit(nopython=True)(pyfunc)
# Non-contiguous array
x = np.arange(10).astype(np.float32)[::2]
y = np.zeros_like(x)
with self.assertRaises(errors.TypingError) as raises:
cfunc(x, y)
self.assertIn("from_buffer() unsupported on non-contiguous buffers",
str(raises.exception))
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | -4,280,643,776,205,915,600 | 33.727273 | 78 | 0.610552 | false |
jackpien/rosbots | ros_ws/src/rosbots_driver/scripts/rosbots_driver/motor_driver.py | 1 | 4631 | #!/usr/bin/env python
#import RPIO as GPIO
import RPi.GPIO as GPIO
from RPIO import PWM
import RPIO
import rospy
from geometry_msgs.msg import Twist
class MotorDriverL9110S:
# Broadcom pin outs
# https://www.element14.com/community/servlet/JiveServlet/previewBody/73950-102-10-339300/pi3_gpio.png
left_ia = 23
left_ib = 24
right_ia = 20
right_ib = 21
encoder_right = 22
encoder_left = 17
pwm_subcycle_time_us = 20000 # 20ms cycle for PWM
pwm_max_width = 20000
pwm_granularity = 10
def __init__(self):
rospy.init_node('motor_driver', anonymous=True)
rospy.Subscriber("twist", Twist, self.twist_callback)
rospy.on_shutdown(self.shutdown_cb)
GPIO.setmode(GPIO.BCM) # Broadcom pin-numbering scheme
GPIO.cleanup()
GPIO.setup(self.left_ib, GPIO.OUT)
GPIO.setup(self.right_ib, GPIO.OUT)
GPIO.setup(self.encoder_right, GPIO.IN) # Right
GPIO.setup(self.encoder_left, GPIO.IN) # Left
self._servo = PWM.Servo(subcycle_time_us=self.pwm_subcycle_time_us)
self._servo.set_servo(self.left_ia, 0)
self._servo.set_servo(self.right_ia, 0)
GPIO.output(self.left_ib, GPIO.LOW)
GPIO.output(self.right_ib, GPIO.LOW)
# Two GPIO interrupt callbacks for encoder
RPIO.setup(self.encoder_right, RPIO.IN)
RPIO.add_interrupt_callback(self.encoder_right,
self.encoder_callback, edge='rising',
debounce_timeout_ms=10,
pull_up_down=RPIO.PUD_DOWN,
threaded_callback=True)
RPIO.setup(self.encoder_left, RPIO.IN)
RPIO.add_interrupt_callback(self.encoder_left,
self.encoder_callback, edge='rising',
debounce_timeout_ms=10,
pull_up_down=RPIO.PUD_DOWN,
threaded_callback=True)
# Starts waiting for interrupts
RPIO.wait_for_interrupts(threaded=True)
def shutdown_cb(self):
rospy.loginfo(rospy.get_caller_id() + ": Shutdown callback")
GPIO.setmode(GPIO.BCM) # Broadcom pin-numbering scheme
self._servo.stop_servo(self.left_ia)
self._servo.stop_servo(self.right_ia)
GPIO.output(self.left_ib, GPIO.LOW)
GPIO.output(self.right_ib, GPIO.LOW)
GPIO.cleanup()
RPIO.cleanup()
def twist_callback(self, data):
rospy.loginfo(rospy.get_caller_id() + \
": Linear.x: %f -- Angular.z: %f", \
data.linear.x, data.angular.z)
x_dir = max(-1, min(1, data.linear.x))
z_ang = max(-1, min(1, data.angular.z))
lw = x_dir
rw = x_dir
if z_ang != 0:
# Left wheel faster than right
lw -= z_ang
rw += z_ang
lw = max(-1, min(1, lw))
rw = max(-1, min(1, rw))
rospy.loginfo(rospy.get_caller_id() + ": lw: %f -- rw: %f", lw, rw)
if lw == 0:
self._servo.set_servo(self.left_ia, 0)
GPIO.output(self.left_ib, GPIO.LOW)
else:
if lw > 0:
pw = self.pwm_max_width * lw
GPIO.output(self.left_ib, GPIO.LOW)
else:
pw = self.pwm_max_width - (self.pwm_max_width * (lw*-1))
GPIO.output(self.left_ib, GPIO.HIGH)
pw = int(pw/self.pwm_granularity) * self.pwm_granularity
self._servo.set_servo(self.left_ia, pw)
if rw == 0:
self._servo.set_servo(self.right_ia, 0)
GPIO.output(self.right_ib, GPIO.LOW)
else:
if rw > 0:
pw = self.pwm_max_width * rw
GPIO.output(self.right_ib, GPIO.LOW)
else:
pw = self.pwm_max_width - (self.pwm_max_width * (rw*-1))
GPIO.output(self.right_ib, GPIO.HIGH)
pw = int(pw/self.pwm_granularity) * self.pwm_granularity
self._servo.set_servo(self.right_ia, pw)
def encoder_callback(self, gpio_id, val):
rospy.loginfo(rospy.get_caller_id() + ": gpio %s: %s", gpio_id, val)
def main():
mdriver = MotorDriverL9110S()
# spin() simply keeps python from exiting until this node is stopped
rospy.spin()
if __name__ == '__main__':
main()
| gpl-3.0 | 444,582,640,532,802,600 | 30.937931 | 106 | 0.525157 | false |
bieschke/nuffle | lib/python/formencode/validators.py | 1 | 59292 | ## FormEncode, a Form processor
## Copyright (C) 2003, Ian Bicking <[email protected]>
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either
## version 2.1 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
## NOTE: In the context of the Python environment, I interpret "dynamic
## linking" as importing -- thus the LGPL applies to the contents of
## the modules, but make no requirements on code importing these
## modules.
"""
Validator/Converters for use with FormEncode.
"""
import re
DateTime = None
mxlookup = None
httplib = None
urlparse = None
from interfaces import *
from api import *
sha = random = None
import cgi
import fieldstorage
True, False = (1==1), (0==1)
############################################################
## Wrapper Validators
############################################################
datetime_module = None
mxDateTime_module = None
def import_datetime(module_type):
global datetime_module, mxDateTime_module
if module_type is None:
try:
if datetime_module is None:
import datetime as datetime_module
return datetime_module
except ImportError:
if mxDateTime_module is None:
from mx import DateTime as mxDateTime_module
return mxDateTime_module
module_type = module_type.lower()
assert module_type in ('datetime', 'mxdatetime')
if module_type == 'datetime':
if datetime_module is None:
import datetime as datetime_module
return datetime_module
else:
if mxDateTime_module is None:
from mx import DateTime as mxDateTime_module
return mxDateTime_module
def datetime_now(module):
if module.__name__ == 'datetime':
return module.datetime.now()
else:
return module.now()
def datetime_makedate(module, year, month, day):
if module.__name__ == 'datetime':
return module.date(year, month, day)
else:
try:
return module.DateTime(year, month, day)
except module.RangeError, e:
raise ValueError(str(e))
class ConfirmType(FancyValidator):
"""
Confirms that the input/output is of the proper type.
Uses the parameters:
subclass:
The class or a tuple of classes; the item must be an instance
of the class or a subclass.
type:
A type or tuple of types (or classes); the item must be of
the exact class or type. Subclasses are not allowed.
Examples::
>>> cint = ConfirmType(subclass=int)
>>> cint.to_python(True)
True
>>> cint.to_python('1')
Traceback (most recent call last):
...
Invalid: '1' is not a subclass of <type 'int'>
>>> cintfloat = ConfirmType(subclass=(float, int))
>>> cintfloat.to_python(1.0), cintfloat.from_python(1.0)
(1.0, 1.0)
>>> cintfloat.to_python(1), cintfloat.from_python(1)
(1, 1)
>>> cintfloat.to_python(None)
Traceback (most recent call last):
...
Invalid: None is not a subclass of one of the types <type 'float'>, <type 'int'>
>>> cint2 = ConfirmType(type=int)
>>> cint2.from_python(True)
Traceback (most recent call last):
...
Invalid: True must be of the type <type 'int'>
"""
subclass = None
type = None
messages = {
'subclass': "%(object)r is not a subclass of %(subclass)s",
'inSubclass': "%(object)r is not a subclass of one of the types %(subclassList)s",
'inType': "%(object)r must be one of the types %(typeList)s",
'type': "%(object)r must be of the type %(type)s",
}
def __init__(self, *args, **kw):
FancyValidator.__init__(self, *args, **kw)
if self.subclass:
if isinstance(self.subclass, list):
self.subclass = tuple(self.subclass)
elif not isinstance(self.subclass, tuple):
self.subclass = (self.subclass,)
self.validate_python = self.confirm_subclass
if self.type:
if isinstance(self.type, list):
self.type = tuple(self.type)
elif not isinstance(self.subclass, tuple):
self.type = (self.type,)
self.validate_python = self.confirm_type
def confirm_subclass(self, value, state):
if not isinstance(value, self.subclass):
if len(self.subclass) == 1:
msg = self.message('subclass', state, object=value,
subclass=self.subclass[0])
else:
subclass_list = ', '.join(map(str, self.subclass))
msg = self.message('inSubclass', state, object=value,
subclassList=subclass_list)
raise Invalid(msg, value, state)
def confirm_type(self, value, state):
for t in self.type:
if type(value) is t:
break
else:
if len(self.type) == 1:
msg = self.message('type', state, object=value,
type=self.type[0])
else:
msg = self.message('inType', state, object=value,
typeList=', '.join(map(str, self.type)))
raise Invalid(msg, value, state)
return value
class Wrapper(FancyValidator):
"""
Used to convert functions to validator/converters.
You can give a simple function for `to_python`, `from_python`,
`validate_python` or `validate_other`. If that function raises an
exception, the value is considered invalid. Whatever value the
function returns is considered the converted value.
Unlike validators, the `state` argument is not used. Functions
like `int` can be used here, that take a single argument.
Examples::
>>> def downcase(v):
... return v.lower()
>>> wrap = Wrapper(to_python=downcase)
>>> wrap.to_python('This')
'this'
>>> wrap.from_python('This')
'This'
>>> wrap2 = Wrapper(from_python=downcase)
>>> wrap2.from_python('This')
'this'
>>> wrap2.from_python(1)
Traceback (most recent call last):
...
Invalid: 'int' object has no attribute 'lower'
>>> wrap3 = Wrapper(validate_python=int)
>>> wrap3.to_python('1')
'1'
>>> wrap3.to_python('a')
Traceback (most recent call last):
...
Invalid: invalid literal for int(): a
"""
func_to_python = None
func_from_python = None
func_validate_python = None
func_validate_other = None
def __init__(self, *args, **kw):
for n in ['to_python', 'from_python', 'validate_python',
'validate_other']:
if kw.has_key(n):
kw['func_%s' % n] = kw[n]
del kw[n]
FancyValidator.__init__(self, *args, **kw)
self._to_python = self.wrap(self.func_to_python)
self._from_python = self.wrap(self.func_from_python)
self.validate_python = self.wrap(self.func_validate_python)
self.validate_other = self.wrap(self.func_validate_other)
def wrap(self, func):
if not func:
return None
def result(value, state, func=func):
try:
return func(value)
except Exception, e:
raise Invalid(str(e), {}, value, state)
return result
class Constant(FancyValidator):
"""
This converter converts everything to the same thing.
I.e., you pass in the constant value when initializing, then all
values get converted to that constant value.
This is only really useful for funny situations, like::
fromEmailValidator = ValidateAny(
ValidEmailAddress(),
Constant('unknown@localhost'))
In this case, the if the email is not valid
``'unknown@localhost'`` will be used instead. Of course, you
could use ``if_invalid`` instead.
Examples::
>>> Constant('X').to_python('y')
'X'
"""
__unpackargs__ = ('value',)
def _to_python(self, value, state):
return self.value
_from_python = _to_python
############################################################
## Normal validators
############################################################
class MaxLength(FancyValidator):
"""
Invalid if the value is longer than `maxLength`. Uses len(),
so it can work for strings, lists, or anything with length.
Examples::
>>> max5 = MaxLength(5)
>>> max5.to_python('12345')
'12345'
>>> max5.from_python('12345')
'12345'
>>> max5.to_python('123456')
Traceback (most recent call last):
...
Invalid: Enter a value less than 5 characters long
>>> max5.from_python('123456')
Traceback (most recent call last):
...
Invalid: Enter a value less than 5 characters long
>>> max5.to_python([1, 2, 3])
[1, 2, 3]
>>> max5.to_python([1, 2, 3, 4, 5, 6])
Traceback (most recent call last):
...
Invalid: Enter a value less than 5 characters long
>>> max5.to_python(5)
Traceback (most recent call last):
...
Invalid: Invalid value (value with length expected)
"""
__unpackargs__ = ('maxLength',)
messages = {
'tooLong': "Enter a value less than %(maxLength)i characters long",
'invalid': "Invalid value (value with length expected)",
}
def validate_python(self, value, state):
try:
if value and \
len(value) > self.maxLength:
raise Invalid(self.message('tooLong', state,
maxLength=self.maxLength),
value, state)
else:
return None
except TypeError:
raise Invalid(self.message('invalid', state),
value, state)
class MinLength(FancyValidator):
"""
Invalid if the value is shorter than `minlength`. Uses len(),
so it can work for strings, lists, or anything with length.
Examples::
>>> min5 = MinLength(5)
>>> min5.to_python('12345')
'12345'
>>> min5.from_python('12345')
'12345'
>>> min5.to_python('1234')
Traceback (most recent call last):
...
Invalid: Enter a value more than 5 characters long
>>> min5.from_python('1234')
Traceback (most recent call last):
...
Invalid: Enter a value more than 5 characters long
>>> min5.to_python([1, 2, 3, 4, 5])
[1, 2, 3, 4, 5]
>>> min5.to_python([1, 2, 3])
Traceback (most recent call last):
...
Invalid: Enter a value more than 5 characters long
>>> min5.to_python(5)
Traceback (most recent call last):
...
Invalid: Invalid value (value with length expected)
"""
__unpackargs__ = ('minLength',)
messages = {
'tooShort': "Enter a value more than %(minLength)i characters long",
'invalid': "Invalid value (value with length expected)",
}
def validate_python(self, value, state):
try:
if len(value) < self.minLength:
raise Invalid(self.message('tooShort', state,
minLength=self.minLength),
value, state)
except TypeError:
raise Invalid(self.message('invalid', state),
value, state)
class NotEmpty(FancyValidator):
"""
Invalid if value is empty (empty string, empty list, etc).
Generally for objects that Python considers false, except zero
which is not considered invalid.
Examples::
>>> ne = NotEmpty(messages={'empty': 'enter something'})
>>> ne.to_python('')
Traceback (most recent call last):
...
Invalid: enter something
>>> ne.to_python(0)
0
"""
messages = {
'empty': "Please enter a value",
}
def validate_python(self, value, state):
if value == 0:
# This isn't "empty" for this definition.
return value
if not value:
raise Invalid(self.message('empty', state),
value, state)
class Empty(FancyValidator):
"""
Invalid unless the value is empty. Use cleverly, if at all.
Examples::
>>> Empty.to_python(0)
Traceback (most recent call last):
...
Invalid: You cannot enter a value here
"""
messages = {
'notEmpty': "You cannot enter a value here",
}
def validate_python(self, value, state):
if value or value == 0:
raise Invalid(self.message('notEmpty', state),
value, state)
class Regex(FancyValidator):
"""
Invalid if the value doesn't match the regular expression `regex`.
The regular expression can be a compiled re object, or a string
which will be compiled for you.
Use strip=True if you want to strip the value before validation,
and as a form of conversion (often useful).
Examples::
>>> cap = Regex(r'^[A-Z]+$')
>>> cap.to_python('ABC')
'ABC'
>>> cap.from_python('abc')
Traceback (most recent call last):
...
Invalid: The input is not valid
>>> cap.to_python(1)
Traceback (most recent call last):
...
Invalid: The input must be a string (not a <type 'int'>: 1)
>>> Regex(r'^[A-Z]+$', strip=True).to_python(' ABC ')
'ABC'
>>> Regex(r'this', regexOps=('I',)).to_python('THIS')
'THIS'
"""
regexOps = ()
strip = False
regex = None
__unpackargs__ = ('regex',)
messages = {
'invalid': "The input is not valid",
}
def __init__(self, *args, **kw):
FancyValidator.__init__(self, *args, **kw)
if isinstance(self.regex, str):
ops = 0
assert not isinstance(self.regexOps, str), (
"regexOps should be a list of options from the re module "
"(names, or actual values)")
for op in self.regexOps:
if isinstance(op, str):
ops |= getattr(re, op)
else:
ops |= op
self.regex = re.compile(self.regex, ops)
def validate_python(self, value, state):
self.assert_string(value, state)
if self.strip and (isinstance(value, str) or isinstance(value, unicode)):
value = value.strip()
if not self.regex.search(value):
raise Invalid(self.message('invalid', state),
value, state)
def _to_python(self, value, state):
if self.strip and \
(isinstance(value, str) or isinstance(value, unicode)):
return value.strip()
return value
class PlainText(Regex):
"""
Test that the field contains only letters, numbers, underscore,
and the hyphen. Subclasses Regex.
Examples::
>>> PlainText.to_python('_this9_')
'_this9_'
>>> PlainText.from_python(' this ')
Traceback (most recent call last):
...
Invalid: Enter only letters, numbers, or _ (underscore)
>>> PlainText(strip=True).to_python(' this ')
'this'
>>> PlainText(strip=True).from_python(' this ')
' this '
"""
regex = r"^[a-zA-Z_\-0-9]*$"
messages = {
'invalid': 'Enter only letters, numbers, or _ (underscore)',
}
class OneOf(FancyValidator):
"""
Tests that the value is one of the members of a given list.
If ``testValueLists=True``, then if the input value is a list or
tuple, all the members of the sequence will be checked (i.e., the
input must be a subset of the allowed values).
Use ``hideList=True`` to keep the list of valid values out of the
error message in exceptions.
Examples::
>>> oneof = OneOf([1, 2, 3])
>>> oneof.to_python(1)
1
>>> oneof.to_python(4)
Traceback (most recent call last):
...
Invalid: Value must be one of: 1; 2; 3 (not 4)
>>> oneof(testValueList=True).to_python([2, 3, [1, 2, 3]])
[2, 3, [1, 2, 3]]
>>> oneof.to_python([2, 3, [1, 2, 3]])
Traceback (most recent call last):
...
Invalid: Value must be one of: 1; 2; 3 (not [2, 3, [1, 2, 3]])
"""
list = None
testValueList = False
hideList = False
__unpackargs__ = ('list',)
messages = {
'invalid': "Invalid value",
'notIn': "Value must be one of: %(items)s (not %(value)r)",
}
def validate_python(self, value, state):
if self.testValueList and isinstance(value, (list, tuple)):
for v in value:
self.validate_python(v, state)
else:
if not value in self.list:
if self.hideList:
raise Invalid(self.message('invalid', state),
value, state)
else:
items = '; '.join(map(str, self.list))
raise Invalid(self.message('notIn', state,
items=items,
value=value),
value, state)
class DictConverter(FancyValidator):
"""
Converts values based on a dictionary which has values as keys for
the resultant values.
If ``allowNull`` is passed, it will not balk if a false value
(e.g., '' or None) is given (it will return None in these cases).
to_python takes keys and gives values, from_python takes values and
gives keys.
If you give hideDict=True, then the contents of the dictionary
will not show up in error messages.
Examples::
>>> dc = DictConverter({1: 'one', 2: 'two'})
>>> dc.to_python(1)
'one'
>>> dc.from_python('one')
1
>>> dc.to_python(3)
Traceback (most recent call last):
Invalid: Enter a value from: 1; 2
>>> dc2 = dc(hideDict=True)
>>> dc2.hideDict
True
>>> dc2.dict
{1: 'one', 2: 'two'}
>>> dc2.to_python(3)
Traceback (most recent call last):
Invalid: Choose something
>>> dc.from_python('three')
Traceback (most recent call last):
Invalid: Nothing in my dictionary goes by the value 'three'. Choose one of: 'one'; 'two'
"""
dict = None
hideDict = False
__unpackargs__ = ('dict',)
messages = {
'keyNotFound': "Choose something",
'chooseKey': "Enter a value from: %(items)s",
'valueNotFound': "That value is not known",
'chooseValue': "Nothing in my dictionary goes by the value %(value)s. Choose one of: %(items)s",
}
def _to_python(self, value, state):
try:
return self.dict[value]
except KeyError:
if self.hideDict:
raise Invalid(self.message('keyNotFound', state),
value, state)
else:
items = '; '.join(map(repr, self.dict.keys()))
raise Invalid(self.message('chooseKey', state,
items=items),
value, state)
def _from_python(self, value, state):
for k, v in self.dict.items():
if value == v:
return k
if self.hideDict:
raise Invalid(self.message('valueNotFound', state),
value, state)
else:
items = '; '.join(map(repr, self.dict.values()))
raise Invalid(self.message('chooseValue', state,
value=repr(value),
items=items),
value, state)
class IndexListConverter(FancyValidator):
"""
Converts a index (which may be a string like '2') to the value in
the given list.
Examples::
>>> index = IndexListConverter(['zero', 'one', 'two'])
>>> index.to_python(0)
'zero'
>>> index.from_python('zero')
0
>>> index.to_python('1')
'one'
>>> index.to_python(5)
Traceback (most recent call last):
Invalid: Index out of range
>>> index.to_python(None)
Traceback (most recent call last):
Invalid: Must be an integer index
>>> index.from_python('five')
Traceback (most recent call last):
Invalid: Item 'five' was not found in the list
"""
list = None
__unpackargs__ = ('list',)
messages = {
'integer': "Must be an integer index",
'outOfRange': "Index out of range",
'notFound': "Item %(value)s was not found in the list",
}
def _to_python(self, value, state):
try:
value = int(value)
except (ValueError, TypeError):
raise Invalid(self.message('integer', state),
value, state)
try:
return self.list[value]
except IndexError:
raise Invalid(self.message('outOfRange', state),
value, state)
def _from_python(self, value, state):
for i in range(len(self.list)):
if self.list[i] == value:
return i
raise Invalid(self.message('notFound', state,
value=repr(value)),
value, state)
class DateValidator(FancyValidator):
"""
Validates that a date is within the given range. Be sure to call
DateConverter first if you aren't expecting mxDateTime input.
earliest_date and latest_date may be functions; if so, they will
be called each time before validating.
"""
earliest_date = None
latest_date = None
after_now = False
# Use 'datetime' to force the Python 2.3+ datetime module, or
# 'mxDateTime' to force the mxDateTime module (None means use
# datetime, or if not present mxDateTime)
datetime_module = None
messages = {
'after': "Date must be after %(date)s",
'before': "Date must be before %(date)s",
# Double %'s, because this will be substituted twice:
'date_format': "%%A, %%d %%B %%Y",
'future': "The date must be sometime in the future",
}
def validate_python(self, value, state):
if self.earliest_date:
if callable(self.earliest_date):
earliest_date = self.earliest_date()
else:
earliest_date = self.earliest_date
if value < earliest_date:
date_formatted = earliest_date.strftime(
self.message('date_format', state))
raise Invalid(
self.message('after', state,
date=date_formatted),
value, state)
if self.latest_date:
if callable(self.latest_date):
latest_date = self.latest_date()
else:
latest_date = self.latest_date
if value > latest_date:
date_formatted = latest_date.strftime(
self.message('date_format', state))
raise Invalid(
self.message('before', state,
date=date_formatted),
value, state)
if self.after_now:
dt_mod = import_datetime(self.datetime_module)
now = datetime_now(dt_mod)
if value < now:
date_formatted = now.strftime(
self.message('date_format', state))
raise Invalid(
self.message('future', state,
date=date_formatted),
value, state)
class Bool(FancyValidator):
"""
Always Valid, returns True or False based on the value and the
existance of the value.
Examples::
>>> Bool.to_python(0)
False
>>> Bool.to_python(1)
True
>>> Bool.to_python('')
False
>>> Bool.to_python(None)
False
"""
if_missing = False
def _to_python(self, value, state):
return bool(value)
_from_python = _to_python
class Int(FancyValidator):
"""
Convert a value to an integer.
"""
messages = {
'integer': "Please enter an integer value",
}
def _to_python(self, value, state):
try:
return int(value)
except (ValueError, TypeError):
raise Invalid(self.message('integer', state),
value, state)
_from_python = _to_python
class Number(FancyValidator):
"""
Convert a value to a float or integer. Tries to convert it to
an integer if no information is lost.
"""
messages = {
'number': "Please enter a number",
}
def _to_python(self, value, state):
try:
value = float(value)
if value == int(value):
return int(value)
return value
except ValueError:
raise Invalid(self.message('number', state),
value, state)
class String(FancyValidator):
"""
Converts things to string, but treats empty things as the empty
string.
Also takes a `max` and `min` argument, and the string length must
fall in that range.
"""
min = None
max = None
messages = {
'tooLong': "Enter a value less than %(max)i characters long",
'tooShort': "Enter a value %(min)i characters long or more",
}
def validate_python(self, value, state):
if (self.max is not None and value is not None
and len(value) > self.max):
raise Invalid(self.message('tooLong', state,
max=self.max),
value, state)
if (self.min is not None
and (not value or len(value) < self.min)):
raise Invalid(self.message('tooShort', state,
min=self.min),
value, state)
def _from_python(self, value, state):
if value:
return str(value)
if value == 0:
return str(value)
return ""
class Set(FancyValidator):
"""
This is for when you think you may return multiple values for a
certain field.
This way the result will always be a list, even if there's only
one result. It's equivalent to ForEach(convertToList=True).
"""
if_empty = ()
def _to_python(self, value, state):
if isinstance(value, (list, tuple)):
return value
elif value is None:
return []
else:
return [value]
class Email(FancyValidator):
"""
Validate an email address.
If you pass ``resolve_domain=True``, then it will try to resolve
the domain name to make sure it's valid. This takes longer, of
course. You must have the `pyDNS <http://pydns.sf.net>`_ modules
installed to look up MX records.
"""
resolve_domain = False
usernameRE = re.compile(r"^[a-z0-9\_\-']+", re.I)
domainRE = re.compile(r"^[a-z0-9\.\-]+\.[a-z]+$", re.I)
messages = {
'empty': 'Please enter an email address',
'noAt': 'An email address must contain a single @',
'badUsername': 'The username portion of the email address is invalid (the portion before the @: %(username)s)',
'badDomain': 'The domain portion of the email address is invalid (the portion after the @: %(domain)s)',
'domainDoesNotExist': 'The domain of the email address does not exist (the portion after the @: %(domain)s)',
}
def __init__(self, *args, **kw):
global mxlookup
FancyValidator.__init__(self, *args, **kw)
if self.resolve_domain:
if mxlookup is None:
try:
from DNS.lazy import mxlookup
except ImportError:
import warnings
warnings.warn(
"pyDNS <http://pydns.sf.net> is not installed on "
"your system (or the DNS package cannot be found). "
"I cannot resolve domain names in addresses")
raise
def validate_python(self, value, state):
if not value:
raise Invalid(
self.message('empty', state),
value, state)
value = value.strip()
splitted = value.split('@', 1)
if not len(splitted) == 2:
raise Invalid(
self.message('noAt', state),
value, state)
if not self.usernameRE.search(splitted[0]):
raise Invalid(
self.message('badUsername', state,
username=splitted[0]),
value, state)
if not self.domainRE.search(splitted[1]):
raise Invalid(
self.message('badDomain', state,
domain=splitted[1]),
value, state)
if self.resolve_domain:
domains = mxlookup(splitted[1])
if not domains:
raise Invalid(
self.message('domainDoesNotExist', state,
domain=splitted[1]),
value, state)
def _to_python(self, value, state):
return value.strip()
class URL(FancyValidator):
"""
Validate a URL, either http://... or https://. If check_exists
is true, then we'll actually make a request for the page.
If add_http is true, then if no scheme is present we'll add
http://
"""
check_exists = False
add_http = True
url_re = re.compile(r'^(http|https)://[a-z\-\.]+\.[a-z]+(?:[0-9]+)?(?:/.*)?$', re.I)
scheme_re = re.compile(r'^[a-zA-Z]+:')
messages = {
'noScheme': 'You must start your URL with http://, https://, etc',
'badURL': 'That is not a valid URL',
'httpError': 'An error occurred when trying to access the URL: %(error)s',
'notFound': 'The server responded that the page could not be found',
'status': 'The server responded with a bad status code (%(status)s)',
}
def _to_python(self, value, state):
value = value.strip()
if self.add_http:
if not self.scheme_re.search(value):
value = 'http://' + value
match = self.scheme_re.search(value)
if not match:
raise Invalid(
self.message('noScheme', state),
value, state)
value = match.group(0).lower() + value[len(match.group(0)):]
if not self.url_re.search(value):
raise Invalid(
self.message('badURL', state),
value, state)
if self.check_exists and (value.startswith('http://')
or value.startswith('https://')):
self._check_url_exists(value, state)
return value
def _check_url_exists(self, url, state):
global httplib, urlparse
if httplib is None:
import httplib
if urlparse is None:
import urlparse
scheme, netloc, path, params, query, fragment = urlparse.urlparse(
url, 'http')
if scheme == 'http':
ConnClass = httplib.HTTPConnection
else:
ConnClass = httplib.HTTPSConnection
try:
conn = ConnClass(netloc)
if params:
path += ';' + params
if query:
path += '?' + query
conn.request('HEAD', path)
res = conn.getresponse()
except httplib.HTTPException, e:
raise Invalid(
self.message('httpError', state, error=e),
state, url)
else:
if res.status == 404:
raise Invalid(
self.message('notFound', state),
state, url)
if res.status != 200:
raise Invalid(
self.message('status', state, status=res.status),
state, url)
class StateProvince(FancyValidator):
"""
Valid state or province code (two-letter).
Well, for now I don't know the province codes, but it does state
codes. Give your own `states` list to validate other state-like
codes; give `extraStates` to add values without losing the current
state values.
"""
states = ['AK', 'AL', 'AR', 'AZ', 'CA', 'CO', 'CT', 'DC', 'DE',
'FL', 'GA', 'HI', 'IA', 'ID', 'IN', 'IL', 'KS', 'KY',
'LA', 'MA', 'MD', 'ME', 'MI', 'MN', 'MO', 'MS', 'MT',
'NC', 'ND', 'NE', 'NH', 'NJ', 'NM', 'NV', 'NY', 'OH',
'OK', 'OR', 'PA', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT',
'VA', 'VT', 'WA', 'WI', 'WV', 'WY']
extraStates = []
__unpackargs__ = ('extraStates',)
messages = {
'empty': 'Please enter a state code',
'wrongLength': 'Please enter a state code with TWO letters',
'invalid': 'That is not a valid state code',
}
def validate_python(self, value, state):
value = str(value).strip().upper()
if not value:
raise Invalid(
self.message('empty', state),
value, state)
if not value or len(value) != 2:
raise Invalid(
self.message('wrongLength', state),
value, state)
if value not in self.states \
and not (self.extraStates and value in self.extraStates):
raise Invalid(
self.message('invalid', state),
value, state)
def _to_python(self, value, state):
return str(value).strip().upper()
class PhoneNumber(FancyValidator):
"""
Validates, and converts to ###-###-####, optionally with
extension (as ext.##...)
@@: should add international phone number support
"""
_phoneRE = re.compile(r'^\s*(?:1-)?(\d\d\d)[\- \.]?(\d\d\d)[\- \.]?(\d\d\d\d)(?:\s*ext\.?\s*(\d+))?\s*$', re.I)
messages = {
'phoneFormat': 'Please enter a number, with area code, in the form ###-###-####, optionally with "ext.####"',
}
def _to_python(self, value, state):
self.assert_string(value, state)
match = self._phoneRE.search(value)
if not match:
raise Invalid(
self.message('phoneFormat', state),
value, state)
return value
def _from_python(self, value, state):
self.assert_string(value, state)
match = self._phoneRE.search(value)
if not match:
raise Invalid(self.message('phoneFormat', state),
value, state)
result = '%s-%s-%s' % (match.group(1), match.group(2), match.group(3))
if match.group(4):
result = result + " ext.%s" % match.group(4)
return result
class FieldStorageUploadConverter(FancyValidator):
"""
Converts a cgi.FieldStorage instance to
a value that FormEncode can use for file
uploads.
"""
def _to_python(self, value, state):
if isinstance(value, cgi.FieldStorage):
return fieldstorage.convert_fieldstorage(value)
else:
return value
class DateConverter(FancyValidator):
"""
Validates and converts a textual date, like mm/yy, dd/mm/yy,
dd-mm-yy, etc, always assumes month comes second value is the
month.
Accepts English month names, also abbreviated. Returns value as
mx.DateTime object. Two year dates are assumed to be within
1950-2020, with dates from 21-49 being ambiguous and signaling an
error.
Use accept_day=False if you just want a month/year (like for a
credit card expiration date).
"""
## @@: accepts only US-style dates
accept_day = True
# also allowed: 'dd/mm/yyyy'
month_style = 'mm/dd/yyyy'
# Use 'datetime' to force the Python 2.3+ datetime module, or
# 'mxDateTime' to force the mxDateTime module (None means use
# datetime, or if not present mxDateTime)
datetime_module = None
_day_date_re = re.compile(r'^\s*(\d\d?)[\-\./\\](\d\d?|jan|january|feb|febuary|mar|march|apr|april|may|jun|june|jul|july|aug|august|sep|sept|september|oct|october|nov|november|dec|december)[\-\./\\](\d\d\d?\d?)\s*$', re.I)
_month_date_re = re.compile(r'^\s*(\d\d?|jan|january|feb|febuary|mar|march|apr|april|may|jun|june|jul|july|aug|august|sep|sept|september|oct|october|nov|november|dec|december)[\-\./\\](\d\d\d?\d?)\s*$', re.I)
_month_names = {
'jan': 1, 'january': 1,
'feb': 2, 'febuary': 2,
'mar': 3, 'march': 3,
'apr': 4, 'april': 4,
'may': 5,
'jun': 6, 'june': 6,
'jul': 7, 'july': 7,
'aug': 8, 'august': 8,
'sep': 9, 'sept': 9, 'september': 9,
'oct': 10, 'october': 10,
'nov': 11, 'november': 11,
'dec': 12, 'december': 12,
}
## @@: Feb. should be leap-year aware (but mxDateTime does catch that)
_monthDays = {
1: 31, 2: 29, 3: 31, 4: 30, 5: 31, 6: 30, 7: 31, 8: 31,
9: 30, 10: 31, 11: 30, 12: 31}
messages = {
'badFormat': 'Please enter the date in the form %(format)s',
'monthRange': 'Please enter a month from 1 to 12',
'invalidDay': 'Please enter a valid day',
'dayRange': 'That month only has %(days)i days',
'invalidDate': 'That is not a valid day (%(exception)s)',
'unknownMonthName': "Unknown month name: %(month)s",
'invalidYear': 'Please enter a number for the year',
'fourDigitYear': 'Please enter a four-digit year',
'wrongFormat': 'Please enter the date in the form %(format)s',
}
def _to_python(self, value, state):
if self.accept_day:
return self.convert_day(value, state)
else:
return self.convert_month(value, state)
def convert_day(self, value, state):
self.assert_string(value, state)
match = self._day_date_re.search(value)
if not match:
raise Invalid(self.message('badFormat', state,
format=self.month_style),
value, state)
day = int(match.group(1))
try:
month = int(match.group(2))
except TypeError:
month = self.make_month(match.group(2), state)
else:
if self.month_style == 'mm/dd/yyyy':
month, day = day, month
year = self.make_year(match.group(3), state)
if month > 12 or month < 1:
raise Invalid(self.message('monthRange', state),
value, state)
if day < 1:
raise Invalid(self.message('invalidDay', state),
value, state)
if self._monthDays[month] < day:
raise Invalid(self.message('dayRange', state,
days=self._monthDays[month]),
value, state)
dt_mod = import_datetime(self.datetime_module)
try:
return datetime_makedate(dt_mod, year, month, day)
except ValueError, v:
raise Invalid(self.message('invalidDate', state,
exception=str(v)),
value, state)
def make_month(self, value, state):
try:
return int(value)
except ValueError:
value = value.lower().strip()
if self._month_names.has_key(value):
return self._month_names[value]
else:
raise Invalid(self.message('unknownMonthName', state,
month=value),
value, state)
def make_year(self, year, state):
try:
year = int(year)
except ValueError:
raise Invalid(self.message('invalidYear', state),
year, state)
if year <= 20:
year = year + 2000
if year >= 50 and year < 100:
year = year + 1900
if year > 20 and year < 50:
raise Invalid(self.message('fourDigitYear', state),
year, state)
return year
def convert_month(self, value, state):
match = self._month_date_re.search(value)
if not match:
raise Invalid(self.message('wrongFormat', state,
format='mm/yyyy'),
value, state)
month = self.make_month(match.group(1), state)
year = self.make_year(match.group(2), state)
if month > 12 or month < 1:
raise Invalid(self.message('monthRange', state),
value, state)
dt_mod = import_datetime(self.datetime_module)
return datetime_makedate(dt_mod, year, month, 1)
def _from_python(self, value, state):
if self.if_empty is not NoDefault and not value:
return ''
if self.accept_day:
return self.unconvert_day(value, state)
else:
return self.unconvert_month(value, state)
def unconvert_day(self, value, state):
# @@ ib: double-check, improve
return value.strftime("%m/%d/%Y")
def unconvert_month(self, value, state):
# @@ ib: double-check, improve
return value.strftime("%m/%Y")
class TimeConverter(FancyValidator):
"""
Converts times in the format HH:MM:SSampm to (h, m, s).
Seconds are optional.
For ampm, set use_ampm = True. For seconds, use_seconds = True.
Use 'optional' for either of these to make them optional.
Examples::
>>> tim = TimeConverter()
>>> tim.to_python('8:30')
(8, 30)
>>> tim.to_python('20:30')
(20, 30)
>>> tim.to_python('30:00')
Traceback (most recent call last):
...
Invalid: You must enter an hour in the range 0-23
>>> tim.to_python('13:00pm')
Traceback (most recent call last):
...
Invalid: You must enter an hour in the range 1-12
>>> tim.to_python('12:-1')
Traceback (most recent call last):
...
Invalid: You must enter a minute in the range 0-59
>>> tim.to_python('12:02pm')
(12, 2)
>>> tim.to_python('12:02am')
(0, 2)
>>> tim.to_python('1:00PM')
(13, 0)
>>> tim.from_python((13, 0))
'13:00:00'
>>> tim2 = tim(use_ampm=True, use_seconds=False)
>>> tim2.from_python((13, 0))
'1:00pm'
>>> tim2.from_python((0, 0))
'12:00am'
>>> tim2.from_python((12, 0))
'12:00pm'
"""
use_ampm = 'optional'
prefer_ampm = False
use_seconds = 'optional'
messages = {
'noAMPM': 'You must indicate AM or PM',
'tooManyColon': 'There are two many :\'s',
'noSeconds': 'You may not enter seconds',
'secondsRequired': 'You must enter seconds',
'minutesRequired': 'You must enter minutes (after a :)',
'badNumber': 'The %(part)s value you gave is not a number: %(number)r',
'badHour': 'You must enter an hour in the range %(range)s',
'badMinute': 'You must enter a minute in the range 0-59',
'badSecond': 'You must enter a second in the range 0-59',
}
def _to_python(self, value, state):
time = value.strip()
explicit_ampm = False
if self.use_ampm:
last_two = time[-2:].lower()
if last_two not in ('am', 'pm'):
if self.use_ampm != 'optional':
raise Invalid(
self.message('noAMPM', state),
value, state)
else:
offset = 0
else:
explicit_ampm = True
if last_two == 'pm':
offset = 12
else:
offset = 0
time = time[:-2]
else:
offset = 0
parts = time.split(':')
if len(parts) > 3:
raise Invalid(
self.message('tooManyColon', state),
value, state)
if len(parts) == 3 and not self.use_seconds:
raise Invalid(
self.message('noSeconds', state),
value, state)
if (len(parts) == 2
and self.use_seconds
and self.use_seconds != 'optional'):
raise Invalid(
self.message('secondsRequired', state),
value, state)
if len(parts) == 1:
raise Invalid(
self.message('minutesRequired', state),
value, state)
try:
hour = int(parts[0])
except ValueError:
raise Invalid(
self.message('badNumber', state, number=parts[0], part='hour'),
value, state)
if explicit_ampm:
if hour > 12 or hour < 1:
raise Invalid(
self.message('badHour', state, number=hour, range='1-12'),
value, state)
if hour == 12 and offset == 12:
# 12pm == 12
pass
elif hour == 12 and offset == 0:
# 12am == 0
hour = 0
else:
hour += offset
else:
if hour > 23 or hour < 0:
raise Invalid(
self.message('badHour', state,
number=hour, range='0-23'),
value, state)
try:
minute = int(parts[1])
except ValueError:
raise Invalid(
self.message('badNumber', state,
number=parts[1], part='minute'),
value, state)
if minute > 59 or minute < 0:
raise Invalid(
self.message('badMinute', state, number=minute),
value, state)
if len(parts) == 3:
try:
second = int(parts[2])
except ValueError:
raise Invalid(
self.message('badNumber', state,
number=parts[2], part='second'))
if second > 59 or second < 0:
raise Invalid(
self.message('badSecond', state, number=second),
value, state)
else:
second = None
if second is None:
return (hour, minute)
else:
return (hour, minute, second)
def _from_python(self, value, state):
if isinstance(value, (str, unicode)):
return value
if hasattr(value, 'hour'):
hour, minute = value.hour, value.minute
elif len(value) == 3:
hour, minute, second = value
elif len(value) == 2:
hour, minute = value
second = 0
ampm = ''
if ((self.use_ampm == 'optional' and self.prefer_ampm)
or (self.use_ampm and self.use_ampm != 'optional')):
ampm = 'am'
if hour > 12:
hour -= 12
ampm = 'pm'
elif hour == 12:
ampm = 'pm'
elif hour == 0:
hour = 12
if self.use_seconds:
return '%i:%02i:%02i%s' % (hour, minute, second, ampm)
else:
return '%i:%02i%s' % (hour, minute, ampm)
class PostalCode(Regex):
"""
US Postal codes (aka Zip Codes).
"""
regex = r'^\d\d\d\d\d(?:-\d\d\d\d)?$'
strip = True
messages = {
'invalid': 'Please enter a zip code (5 digits)',
}
class StripField(FancyValidator):
"""
Take a field from a dictionary, removing the key from the
dictionary.
``name`` is the key. The field value and a new copy of the
dictionary with that field removed are returned.
"""
__unpackargs__ = ('name',)
messages = {
'missing': 'The name %(name)s is missing',
}
def _to_python(self, valueDict, state):
v = valueDict.copy()
try:
field = v[self.name]
del v[self.name]
except KeyError:
raise Invalid(self.message('missing', state,
name=repr(self.name)),
valueDict, state)
return field, v
class StringBoolean(FancyValidator):
# Originally from TurboGears
"""
Converts a string to a boolean.
Values like 'true' and 'false' are considered True and False,
respectively; anything in ``true_values`` is true, anything in
``false_values`` is false, case-insensitive). The first item of
those lists is considered the preferred form.
"""
true_values = ['true', 't', 'yes', 'y', 'on', '1']
false_values = ['false', 'f', 'no', 'n', 'off', '0']
messages = { "string" : "Value should be %(true)r or %(false)r" }
def _to_python(self, value, state):
if isinstance(value, (str, unicode)):
value = value.strip().lower()
if value in self.true_values:
return True
if not value or value in self.false_values:
return False
raise Invalid(self.message("string", state,
true=self.true_values[0],
false=self.false_values[0]),
value, state)
return bool(value)
def _from_python(self, value, state):
if value:
return self.true_values[0]
else:
return self.false_values[0]
class SignedString(FancyValidator):
"""
Encodes a string into a signed string, and base64 encodes both the
signature string and a random nonce.
It is up to you to provide a secret, and to keep the secret handy
and consistent.
"""
messages = {
'malformed': 'Value does not contain a signature',
'badsig': 'Signature is not correct',
}
secret = None
nonce_length = 4
def _to_python(self, value, state):
global sha
if not sha:
import sha
assert secret, (
"You must give a secret")
parts = value.split(None, 1)
if not parts or len(parts) == 1:
raise Invalid(self.message('malformed', state),
value, state)
sig, rest = parts
sig = sig.decode('base64')
rest = sig.decord('base64')
nonce = rest[:self.nonce_length]
rest = rest[self.nonce_length:]
digest = sha.new(self.secret+nonce+rest).digest()
if digest != sig:
raise Invalid(self.message('badsig', state),
value, state)
return rest
def _from_python(self, value, state):
global sha
if not sha:
import sha
nonce = self.make_nonce()
value = str(value)
digest = sha.new(self.secret+nonce+rest).digest()
return self.encode(digest)+' '+self.encode(nonce+value)
def encode(self, value):
return value.encode('base64').strip().replace('\n', '')
def make_nonce(self):
global random
if not random:
import random
return ''.join([
chr(random.randrange(256))
for i in range(self.nonce_length)])
class FormValidator(FancyValidator):
"""
A FormValidator is something that can be chained with a
Schema. Unlike normal chaining the FormValidator can
validate forms that aren't entirely valid.
The important method is .validate(), of course. It gets passed a
dictionary of the (processed) values from the form. If you have
.validate_partial_form set to True, then it will get the incomplete
values as well -- use .has_key() to test if the field was able to
process any particular field.
Anyway, .validate() should return a string or a dictionary. If a
string, it's an error message that applies to the whole form. If
not, then it should be a dictionary of fieldName: errorMessage.
The special key "form" is the error message for the form as a whole
(i.e., a string is equivalent to {"form": string}).
Return None on no errors.
"""
validate_partial_form = False
validate_partial_python = None
validate_partial_other = None
class FieldsMatch(FormValidator):
"""
Tests that the given fields match, i.e., are identical. Useful
for password+confirmation fields. Pass the list of field names in
as `field_names`.
"""
show_match = False
field_names = None
validate_partial_form = True
__unpackargs__ = ('*', 'field_names')
messages = {
'invalid': "Fields do not match (should be %(match)s)",
'invalidNoMatch': "Fields do not match",
}
def validate_partial(self, field_dict, state):
for name in self.field_names:
if not field_dict.has_key(name):
return
self.validate_python(field_dict, state)
def validate_python(self, field_dict, state):
ref = field_dict[self.field_names[0]]
errors = {}
for name in self.field_names[1:]:
if field_dict.get(name, '') != ref:
if self.show_match:
errors[name] = self.message('invalid', state,
match=ref)
else:
errors[name] = self.message('invalidNoMatch', state)
if errors:
error_list = errors.items()
error_list.sort()
error_message = '<br>\n'.join(
['%s: %s' % (name, value) for name, value in error_list])
raise Invalid(error_message,
field_dict, state,
error_dict=errors)
class CreditCardValidator(FormValidator):
"""
Checks that credit card numbers are valid (if not real).
You pass in the name of the field that has the credit card
type and the field with the credit card number. The credit
card type should be one of "visa", "mastercard", "amex",
"dinersclub", "discover", "jcb".
You must check the expiration date yourself (there is no
relation between CC number/types and expiration dates).
"""
validate_partial_form = True
cc_type_field = 'ccType'
cc_number_field = 'ccNumber'
__unpackargs__ = ('cc_type_field', 'cc_number_field')
messages = {
'notANumber': "Please enter only the number, no other characters",
'badLength': "You did not enter a valid number of digits",
'invalidNumber': "That number is not valid",
}
def validate_partial(self, field_dict, state):
if not field_dict.get(self.cc_type_field, None) \
or not field_dict.get(self.cc_number_field, None):
return None
self.validate_python(field_dict, state)
def validate_python(self, field_dict, state):
errors = self._validateReturn(field_dict, state)
if errors:
error_list = errors.items()
error_list.sort()
raise Invalid(
'<br>\n'.join(["%s: %s" % (name, value)
for name, value in error_list]),
field_dict, state, error_dict=errors)
def _validateReturn(self, field_dict, state):
ccType = field_dict[self.cc_type_field].lower().strip()
number = field_dict[self.cc_number_field].strip()
number = number.replace(' ', '')
number = number.replace('-', '')
try:
long(number)
except ValueError:
return {self.cc_number_field: self.message('notANumber', state)}
assert self._cardInfo.has_key(ccType), (
"I can't validate that type of credit card")
foundValid = False
validLength = False
for prefix, length in self._cardInfo[ccType]:
if len(number) == length:
validLength = True
if (len(number) == length
and number.startswith(prefix)):
foundValid = True
break
if not validLength:
return {self.cc_number_field: self.message('badLength', state)}
if not foundValid:
return {self.cc_number_field: self.message('invalidNumber', state)}
if not self._validateMod10(number):
return {self.cc_number_field: self.message('invalidNumber', state)}
return None
def _validateMod10(self, s):
"""
This code by Sean Reifschneider, of tummy.com
"""
double = 0
sum = 0
for i in range(len(s) - 1, -1, -1):
for c in str((double + 1) * int(s[i])):
sum = sum + int(c)
double = (double + 1) % 2
return((sum % 10) == 0)
_cardInfo = {
"visa": [('4', 16),
('4', 13)],
"mastercard": [('51', 16),
('52', 16),
('53', 16),
('54', 16),
('55', 16)],
"discover": [('6011', 16)],
"amex": [('34', 15),
('37', 15)],
"dinersclub": [('300', 14),
('301', 14),
('302', 14),
('303', 14),
('304', 14),
('305', 14),
('36', 14),
('38', 14)],
"jcb": [('3', 16),
('2131', 15),
('1800', 15)],
}
| gpl-2.0 | 8,930,365,845,739,722,000 | 32.114525 | 226 | 0.525029 | false |
tkolhar/robottelo | tests/foreman/cli/test_installer.py | 1 | 3597 | # -*- encoding: utf-8 -*-
"""Tests for Installer"""
from robottelo.decorators import run_only_on, stubbed
from robottelo.test import CLITestCase
class InstallerTestCase(CLITestCase):
"""Test class for installer"""
# Notes for installer testing:
# Perhaps there is a convenient log analyzer library out there
# that can parse logs? It would be better (and possibly less
# error-prone) than simply grepping for ERROR/FATAL
@stubbed()
@run_only_on('sat')
def test_positive_installer_check_services(self):
# devnote:
# maybe `hammer ping` command might be useful here to check
# the health status
"""@test: Services services start correctly
@feature: Installer
@assert: All services {katello-jobs, tomcat6, foreman, pulp,
passenger-analytics, httpd, foreman_proxy, elasticsearch, postgresql,
mongod} are started
@status: Manual
"""
@stubbed()
@run_only_on('sat')
def test_positive_installer_logfile_check(self):
"""@test: Look for ERROR or FATAL references in logfiles
@feature: Installer
@steps:
1. search all relevant logfiles for ERROR/FATAL
@assert: No ERROR/FATAL notifcations occur in {katello-jobs, tomcat6,
foreman, pulp, passenger-analytics,httpd, foreman_proxy, elasticsearch,
postgresql, mongod} logfiles.
@status: Manual
"""
@stubbed()
@run_only_on('sat')
def test_positive_installer_check_progress_meter(self):
"""@test: Assure progress indicator/meter "works"
@feature: Installer
@assert: Progress indicator increases appropriately as install
commences, through to completion
@status: Manual
"""
@stubbed()
@run_only_on('sat')
def test_positive_server_installer_from_iso(self):
"""@test: Can install product from ISO
@feature: Installer
@assert: Install from ISO is sucessful.
@status: Manual
"""
@stubbed()
@run_only_on('sat')
def test_positive_server_installer_from_repository(self):
"""@test: Can install main satellite instance successfully via RPM
@feature: Installer
@assert: Install of main instance successful.
@status: Manual
"""
@stubbed()
@run_only_on('sat')
def test_positive_capsule_installer_from_repository(self):
"""@test: Can install capsule successfully via RPM
@feature: Installer
@assert: Install of capsule successful.
@status: Manual
"""
@stubbed()
@run_only_on('sat')
def test_positive_disconnected_util_installer(self):
"""@test: Can install satellite disconnected utility successfully
via RPM
@feature: Installer
@assert: Install of disconnected utility successful.
@status: Manual
"""
@stubbed()
@run_only_on('sat')
def test_positive_capsule_installer_and_register(self):
"""@test: Upon installation, capsule instance self-registers
itself to parent instance
@feature: Installer
@assert: capsule is communicating properly with parent,
following install.
@status: Manual
"""
@stubbed()
@run_only_on('sat')
def test_positive_installer_clear_data(self):
"""@test: User can run installer to clear existing data
@feature: Installer
@assert: All data is cleared from satellite instance
@bz: 1072780
@status: Manual
"""
| gpl-3.0 | -3,570,105,165,779,702,000 | 25.448529 | 79 | 0.626911 | false |
TeamODrKnow/doctor-know | main.py | 1 | 15028 | __author__ = '[email protected] (J. Matthew Landis)'
import os
import logging
import pickle
import webapp2
import time
import httplib2
import json
import tweepy
import haigha
from collections import Counter
from haigha.connections.rabbit_connection import RabbitConnection
from apiclient import discovery
from oauth2client import appengine
from oauth2client import client
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.ext import ndb
from google.appengine.ext.webapp import template
#######################################################################
PROJECTID = '934763316754'
CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json')
# Helpful message to display in the browser if the CLIENT_SECRETS file
# is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """""
<h1>Warning: Please configure OAuth 2.0</h1>
<p>
To make this sample run you will need to populate the client_secrets.json file
found at:
</p>
<p>
<code>%s</code>.
</p>
<p>with information found on the <a
href="https://code.google.com/apis/console">APIs Console</a>.
</p>
""" % CLIENT_SECRETS
http = httplib2.Http(memcache)
service = discovery.build("plus", "v1", http=http)
bigquery_service = discovery.build("bigquery","v2", http=http)
consumer_key = "9xNrmD6hE0xnRSYdZt5t0XT0B"
consumer_secret = "kperqjklvPhBCVvHI96aZIfJu5w1DHI2BZoNMdBEvBPfmuZIYG"
access_token = "46501499-cijYvv9ixtQKHLSiLt9QaRtcmWeEKvvGZK5s6ukw7"
access_token_secret = "D127XCAN02BPb0ZtcreCG6dpBJyiiLCeD6ckS2MgdHqwG"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
decorator = appengine.oauth2decorator_from_clientsecrets(
CLIENT_SECRETS,
scope='https://www.googleapis.com/auth/plus.me',
message=MISSING_CLIENT_SECRETS_MESSAGE)
bq_decorator = appengine.oauth2decorator_from_clientsecrets(
CLIENT_SECRETS,
scope='https://www.googleapis.com/auth/bigquery',
message=MISSING_CLIENT_SECRETS_MESSAGE)
## Function to retrieve and render a template
def render_template(handler, templatename, templatevalues):
path = os.path.join(os.path.dirname(__file__), 'templates/' + templatename)
html = template.render(path, templatevalues)
handler.response.out.write(html)
#######################################################################
## Handles and loads index page
class MainPage(webapp2.RequestHandler):
def get(self):
nickname = "null"
email = "null"
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
nickname = ui.fname+ " " +ui.lname
email = user.email()
login = users.create_login_url('/')
else:
nickname = user.nickname()
email = user.email()
login = '/createProfile'
else:
ui = None
login = users.create_login_url('/')
logout = users.create_logout_url('/')
os.system("python stream.py")
template_values = {
'login': login,
'logout': logout,
'user': user,
'nickname': nickname,
'email': email
}
render_template(self, 'index.html', template_values)
#######################################################################
## Handle user info and profile
class CreateProfile(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
self.redirect('/profile')
else:
template_data = {'logout':users.create_logout_url('/'), 'nickname': users.nickname()}
template_path = 'templates/createProfile.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(user.create_login_url('/'))
#######################################################################
## process user profile
## check for user signed in, if so, save the entered information, otherwise, redirect them to the login page
class ProcessUser(webapp2.RequestHandler) :
def post(self) :
user = users.get_current_user()
if user:
fname = self.request.get('fname')
lname = self.request.get('lname')
fname.replace(" ", "")
lname.replace(" ", "")
words = self.request.get_all('word')
if (not(not fname)) & (not(not lname)):
NewUser = UserModel()
NewUser.uid = user.user_id()
NewUser.fname = fname
NewUser.lname = lname
NewUser.words = []
for word in words:
word.replace(" ", "")
if word:
NewUser.words+=[word]
NewUser.put()
self.redirect('/profile')
else:
self.redirect('/createProfile')
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Model Data
class DataHandler(webapp2.RequestHandler) :
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get("inputData")
queryData = {'query':'SELECT SUM(word_count) as WCount,corpus_date,group_concat(corpus) as Work FROM '
'[publicdata:samples.shakespeare] WHERE word="'+inputData+'" and corpus_date>0 GROUP BY corpus_date ORDER BY WCount'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
resp = []
if 'rows' in dataList:
#parse dataList
for row in dataList['rows']:
for key,dict_list in row.iteritems():
count = dict_list[0]
year = dict_list[1]
corpus = dict_list[2]
resp.append({'count': count['v'],'year':year['v'],'corpus':corpus['v']})
else:
resp.append({'count':'0','year':'0','corpus':'0'})
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps(resp))
else:
self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Model Words
class WordsHandler(webapp2.RequestHandler) :
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get("inputData")
queryData = {'query':'SELECT text FROM '
'[doctor-know:rtda.tweets] WHERE Words CONTAINS "'+inputData+'"GROUP BY text ORDER BY text LIMIT 150'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
resp = {}
resp['text'] = status.text
resp['created_at'] = time.mktime(status.created_at.timetuple())
resp['geo'] = status.geo
resp['source'] = status.source
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps(resp))
else:
self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Model Words
class WordsHandler(webapp2.RequestHandler) :
inputData = "yes"
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get("inputData")
queryData = {'query':'SELECT text FROM '
'[doctor-know:rtda.tweets] WHERE text CONTAINS "'+inputData+'" GROUP BY text ORDER BY text LIMIT 300'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
tweets = []
if 'rows' in dataList:
#parse dataList
count = 0
for row in dataList['rows']:
for key,dict_list in row.iteritems():
tweet = dict_list[0]
count += 1
tweets.append({'text': tweet})
if count == 300:
break
ignore_words = [ "fuck", "shit", "cock", "penis", "porn"]
words = []
for tweet in tweets:
tt = tweet.get('text', "")
for word in tt.split():
if "http" in word:
continue
if word not in ignore_words:
words.append(word)
resp = Counter(words)
resp.headers.add('Access-Control-Allow-Origin', "*")
return resp
# self.response.headers['Content-Type'] = 'application/json'
# self.response.out.write(json.dumps(tweets))
# else:
# self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Profile Page
class ProfilePage(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'firstname': ui.fname, 'lastname': ui.lname, 'words': ui.words, 'nickname': ui.fname+ " " +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/profile.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect('/createProfile')
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Artificial Creativity Engine
class DisplayEngine(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'nickname': ui.fname+ " " +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/engine.html'
self.response.out.write(template.render(template_path,template_data))
else:
template_data = {'nickname': user.nickname(), 'logout': users.create_logout_url('/')}
template_path = 'templates/engine.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Data Analysis
class DisplayData(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'nickname': ui.fname+ " " +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/data.html'
self.response.out.write(template.render(template_path,template_data))
else:
template_data = {'nickname': user.nickname(), 'logout': users.create_logout_url('/')}
template_path = 'templates/data.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Establish/Update User Profile
class UserModel(ndb.Model) :
uid = ndb.StringProperty(indexed=True)
fname = ndb.StringProperty(indexed = False)
lname = ndb.StringProperty(indexed = False)
words = ndb.StringProperty(indexed=False,repeated=True)
#######################################################################
## Establish/Update User Profile
# class CustomStreamListener(tweepy.StreamListener):
# def __init__(self, api):
# self.api = api
# super(tweepy.StreamListener, self).__init__()
# #setup rabbitMQ Connection
# self.connection = RabbitConnection(host='130.211.189.207', heartbeat=None, debug=True)
# self.channel = self.connection.channel()
# #set max queue size
# args = {"x-max-length": 2000}
# self.channel.queue.declare(queue='twitter_topic_feed', arguments=args)
# def on_status(self, status):
# print status.text, "\n"
# data = {}
# data['text'] = status.text
# data['created_at'] = time.mktime(status.created_at.timetuple())
# data['geo'] = status.geo
# data['source'] = status.source
# #queue the tweet
# self.channel.basic.publish(exchange='',
# routing_key='twitter_topic_feed',
# body=json.dumps(data))
# def on_error(self, status_code):
# print >> sys.stderr, 'Encountered error with status code:', status_code
# return True # Don't kill the stream
# def on_timeout(self):
# print >> sys.stderr, 'Timeout...'
# return True # Don't kill the stream
# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api))
# # my keyword today is chelsea as the team just had a big win
# sapi.filter(track=[self.request.get("inputData")])
#######################################################################
## Establish/Update User Profile
# class CustomStreamListener(tweepy.StreamListener):
# def __init__(self, api):
# self.api = api
# super(tweepy.StreamListener, self).__init__()
# #setup rabbitMQ Connection
# self.connection = RabbitConnection(host='130.211.189.207', heartbeat=None, debug=True)
# self.channel = self.connection.channel()
# #set max queue size
# args = {"x-max-length": 2000}
# self.channel.queue.declare(queue='twitter_topic_feed', arguments=args)
# def on_status(self, status):
# print status.text, "\n"
# data = {}
# data['text'] = status.text
# data['created_at'] = time.mktime(status.created_at.timetuple())
# data['geo'] = status.geo
# data['source'] = status.source
# #queue the tweet
# self.channel.basic.publish(exchange='',
# routing_key='twitter_topic_feed',
# body=json.dumps(data))
# def on_error(self, status_code):
# print >> sys.stderr, 'Encountered error with status code:', status_code
# return True # Don't kill the stream
# def on_timeout(self):
# print >> sys.stderr, 'Timeout...'
# return True # Don't kill the stream
# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api))
# # my keyword today is chelsea as the team just had a big win
# sapi.filter(track=[self.request.get("inputData")])
app = webapp2.WSGIApplication( [
('/', MainPage),
('/profile', ProfilePage),
('/createProfile', CreateProfile),
('/userRegister', ProcessUser),
('/getData', DataHandler),
('/getWords', WordsHandler),
('/data', DisplayData),
('/engine', DisplayEngine),
(decorator.callback_path, decorator.callback_handler()),
(bq_decorator.callback_path, bq_decorator.callback_handler())
], debug=True)
| mit | -4,567,955,119,498,473,000 | 34.443396 | 161 | 0.5895 | false |
mbakke/ganeti | test/ganeti.jqueue_unittest.py | 1 | 85469 | #!/usr/bin/python
#
# Copyright (C) 2010, 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Script for testing ganeti.jqueue"""
import os
import sys
import unittest
import tempfile
import shutil
import errno
import itertools
import random
from ganeti import constants
from ganeti import utils
from ganeti import errors
from ganeti import jqueue
from ganeti import opcodes
from ganeti import compat
from ganeti import mcpu
from ganeti import query
from ganeti import workerpool
import testutils
class _FakeJob:
def __init__(self, job_id, status):
self.id = job_id
self.writable = False
self._status = status
self._log = []
def SetStatus(self, status):
self._status = status
def AddLogEntry(self, msg):
self._log.append((len(self._log), msg))
def CalcStatus(self):
return self._status
def GetInfo(self, fields):
result = []
for name in fields:
if name == "status":
result.append(self._status)
else:
raise Exception("Unknown field")
return result
def GetLogEntries(self, newer_than):
assert newer_than is None or newer_than >= 0
if newer_than is None:
return self._log
return self._log[newer_than:]
class TestJobChangesChecker(unittest.TestCase):
def testStatus(self):
job = _FakeJob(9094, constants.JOB_STATUS_QUEUED)
checker = jqueue._JobChangesChecker(["status"], None, None)
self.assertEqual(checker(job), ([constants.JOB_STATUS_QUEUED], []))
job.SetStatus(constants.JOB_STATUS_RUNNING)
self.assertEqual(checker(job), ([constants.JOB_STATUS_RUNNING], []))
job.SetStatus(constants.JOB_STATUS_SUCCESS)
self.assertEqual(checker(job), ([constants.JOB_STATUS_SUCCESS], []))
# job.id is used by checker
self.assertEqual(job.id, 9094)
def testStatusWithPrev(self):
job = _FakeJob(12807, constants.JOB_STATUS_QUEUED)
checker = jqueue._JobChangesChecker(["status"],
[constants.JOB_STATUS_QUEUED], None)
self.assert_(checker(job) is None)
job.SetStatus(constants.JOB_STATUS_RUNNING)
self.assertEqual(checker(job), ([constants.JOB_STATUS_RUNNING], []))
def testFinalStatus(self):
for status in constants.JOBS_FINALIZED:
job = _FakeJob(2178711, status)
checker = jqueue._JobChangesChecker(["status"], [status], None)
# There won't be any changes in this status, hence it should signal
# a change immediately
self.assertEqual(checker(job), ([status], []))
def testLog(self):
job = _FakeJob(9094, constants.JOB_STATUS_RUNNING)
checker = jqueue._JobChangesChecker(["status"], None, None)
self.assertEqual(checker(job), ([constants.JOB_STATUS_RUNNING], []))
job.AddLogEntry("Hello World")
(job_info, log_entries) = checker(job)
self.assertEqual(job_info, [constants.JOB_STATUS_RUNNING])
self.assertEqual(log_entries, [[0, "Hello World"]])
checker2 = jqueue._JobChangesChecker(["status"], job_info, len(log_entries))
self.assert_(checker2(job) is None)
job.AddLogEntry("Foo Bar")
job.SetStatus(constants.JOB_STATUS_ERROR)
(job_info, log_entries) = checker2(job)
self.assertEqual(job_info, [constants.JOB_STATUS_ERROR])
self.assertEqual(log_entries, [[1, "Foo Bar"]])
checker3 = jqueue._JobChangesChecker(["status"], None, None)
(job_info, log_entries) = checker3(job)
self.assertEqual(job_info, [constants.JOB_STATUS_ERROR])
self.assertEqual(log_entries, [[0, "Hello World"], [1, "Foo Bar"]])
class TestJobChangesWaiter(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.filename = utils.PathJoin(self.tmpdir, "job-1")
utils.WriteFile(self.filename, data="")
def tearDown(self):
shutil.rmtree(self.tmpdir)
def _EnsureNotifierClosed(self, notifier):
try:
os.fstat(notifier._fd)
except EnvironmentError, err:
self.assertEqual(err.errno, errno.EBADF)
else:
self.fail("File descriptor wasn't closed")
def testClose(self):
for wait in [False, True]:
waiter = jqueue._JobFileChangesWaiter(self.filename)
try:
if wait:
waiter.Wait(0.001)
finally:
waiter.Close()
# Ensure file descriptor was closed
self._EnsureNotifierClosed(waiter._notifier)
def testChangingFile(self):
waiter = jqueue._JobFileChangesWaiter(self.filename)
try:
self.assertFalse(waiter.Wait(0.1))
utils.WriteFile(self.filename, data="changed")
self.assert_(waiter.Wait(60))
finally:
waiter.Close()
self._EnsureNotifierClosed(waiter._notifier)
def testChangingFile2(self):
waiter = jqueue._JobChangesWaiter(self.filename)
try:
self.assertFalse(waiter._filewaiter)
self.assert_(waiter.Wait(0.1))
self.assert_(waiter._filewaiter)
# File waiter is now used, but there have been no changes
self.assertFalse(waiter.Wait(0.1))
utils.WriteFile(self.filename, data="changed")
self.assert_(waiter.Wait(60))
finally:
waiter.Close()
self._EnsureNotifierClosed(waiter._filewaiter._notifier)
class TestWaitForJobChangesHelper(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.filename = utils.PathJoin(self.tmpdir, "job-2614226563")
utils.WriteFile(self.filename, data="")
def tearDown(self):
shutil.rmtree(self.tmpdir)
def _LoadWaitingJob(self):
return _FakeJob(2614226563, constants.JOB_STATUS_WAITING)
def _LoadLostJob(self):
return None
def testNoChanges(self):
wfjc = jqueue._WaitForJobChangesHelper()
# No change
self.assertEqual(wfjc(self.filename, self._LoadWaitingJob, ["status"],
[constants.JOB_STATUS_WAITING], None, 0.1),
constants.JOB_NOTCHANGED)
# No previous information
self.assertEqual(wfjc(self.filename, self._LoadWaitingJob,
["status"], None, None, 1.0),
([constants.JOB_STATUS_WAITING], []))
def testLostJob(self):
wfjc = jqueue._WaitForJobChangesHelper()
self.assert_(wfjc(self.filename, self._LoadLostJob,
["status"], None, None, 1.0) is None)
class TestEncodeOpError(unittest.TestCase):
def test(self):
encerr = jqueue._EncodeOpError(errors.LockError("Test 1"))
self.assert_(isinstance(encerr, tuple))
self.assertRaises(errors.LockError, errors.MaybeRaise, encerr)
encerr = jqueue._EncodeOpError(errors.GenericError("Test 2"))
self.assert_(isinstance(encerr, tuple))
self.assertRaises(errors.GenericError, errors.MaybeRaise, encerr)
encerr = jqueue._EncodeOpError(NotImplementedError("Foo"))
self.assert_(isinstance(encerr, tuple))
self.assertRaises(errors.OpExecError, errors.MaybeRaise, encerr)
encerr = jqueue._EncodeOpError("Hello World")
self.assert_(isinstance(encerr, tuple))
self.assertRaises(errors.OpExecError, errors.MaybeRaise, encerr)
class TestQueuedOpCode(unittest.TestCase):
def testDefaults(self):
def _Check(op):
self.assertFalse(hasattr(op.input, "dry_run"))
self.assertEqual(op.priority, constants.OP_PRIO_DEFAULT)
self.assertFalse(op.log)
self.assert_(op.start_timestamp is None)
self.assert_(op.exec_timestamp is None)
self.assert_(op.end_timestamp is None)
self.assert_(op.result is None)
self.assertEqual(op.status, constants.OP_STATUS_QUEUED)
op1 = jqueue._QueuedOpCode(opcodes.OpTestDelay())
_Check(op1)
op2 = jqueue._QueuedOpCode.Restore(op1.Serialize())
_Check(op2)
self.assertEqual(op1.Serialize(), op2.Serialize())
def testPriority(self):
def _Check(op):
assert constants.OP_PRIO_DEFAULT != constants.OP_PRIO_HIGH, \
"Default priority equals high priority; test can't work"
self.assertEqual(op.priority, constants.OP_PRIO_HIGH)
self.assertEqual(op.status, constants.OP_STATUS_QUEUED)
inpop = opcodes.OpTagsGet(priority=constants.OP_PRIO_HIGH)
op1 = jqueue._QueuedOpCode(inpop)
_Check(op1)
op2 = jqueue._QueuedOpCode.Restore(op1.Serialize())
_Check(op2)
self.assertEqual(op1.Serialize(), op2.Serialize())
class TestQueuedJob(unittest.TestCase):
def test(self):
self.assertRaises(errors.GenericError, jqueue._QueuedJob,
None, 1, [], False)
def testDefaults(self):
job_id = 4260
ops = [
opcodes.OpTagsGet(),
opcodes.OpTestDelay(),
]
def _Check(job):
self.assertTrue(job.writable)
self.assertEqual(job.id, job_id)
self.assertEqual(job.log_serial, 0)
self.assert_(job.received_timestamp)
self.assert_(job.start_timestamp is None)
self.assert_(job.end_timestamp is None)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertEqual(job.CalcPriority(), constants.OP_PRIO_DEFAULT)
self.assert_(repr(job).startswith("<"))
self.assertEqual(len(job.ops), len(ops))
self.assert_(compat.all(inp.__getstate__() == op.input.__getstate__()
for (inp, op) in zip(ops, job.ops)))
self.assertRaises(errors.OpPrereqError, job.GetInfo,
["unknown-field"])
self.assertEqual(job.GetInfo(["summary"]),
[[op.input.Summary() for op in job.ops]])
job1 = jqueue._QueuedJob(None, job_id, ops, True)
_Check(job1)
job2 = jqueue._QueuedJob.Restore(None, job1.Serialize(), True)
_Check(job2)
self.assertEqual(job1.Serialize(), job2.Serialize())
def testWritable(self):
job = jqueue._QueuedJob(None, 1, [opcodes.OpTestDelay()], False)
self.assertFalse(job.writable)
job = jqueue._QueuedJob(None, 1, [opcodes.OpTestDelay()], True)
self.assertTrue(job.writable)
def testPriority(self):
job_id = 4283
ops = [
opcodes.OpTagsGet(priority=constants.OP_PRIO_DEFAULT),
opcodes.OpTestDelay(),
]
def _Check(job):
self.assertEqual(job.id, job_id)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assert_(repr(job).startswith("<"))
job = jqueue._QueuedJob(None, job_id, ops, True)
_Check(job)
self.assert_(compat.all(op.priority == constants.OP_PRIO_DEFAULT
for op in job.ops))
self.assertEqual(job.CalcPriority(), constants.OP_PRIO_DEFAULT)
# Increase first
job.ops[0].priority -= 1
_Check(job)
self.assertEqual(job.CalcPriority(), constants.OP_PRIO_DEFAULT - 1)
# Mark opcode as finished
job.ops[0].status = constants.OP_STATUS_SUCCESS
_Check(job)
self.assertEqual(job.CalcPriority(), constants.OP_PRIO_DEFAULT)
# Increase second
job.ops[1].priority -= 10
self.assertEqual(job.CalcPriority(), constants.OP_PRIO_DEFAULT - 10)
# Test increasing first
job.ops[0].status = constants.OP_STATUS_RUNNING
job.ops[0].priority -= 19
self.assertEqual(job.CalcPriority(), constants.OP_PRIO_DEFAULT - 20)
def testCalcStatus(self):
def _Queued(ops):
# The default status is "queued"
self.assert_(compat.all(op.status == constants.OP_STATUS_QUEUED
for op in ops))
def _Waitlock1(ops):
ops[0].status = constants.OP_STATUS_WAITING
def _Waitlock2(ops):
ops[0].status = constants.OP_STATUS_SUCCESS
ops[1].status = constants.OP_STATUS_SUCCESS
ops[2].status = constants.OP_STATUS_WAITING
def _Running(ops):
ops[0].status = constants.OP_STATUS_SUCCESS
ops[1].status = constants.OP_STATUS_RUNNING
for op in ops[2:]:
op.status = constants.OP_STATUS_QUEUED
def _Canceling1(ops):
ops[0].status = constants.OP_STATUS_SUCCESS
ops[1].status = constants.OP_STATUS_SUCCESS
for op in ops[2:]:
op.status = constants.OP_STATUS_CANCELING
def _Canceling2(ops):
for op in ops:
op.status = constants.OP_STATUS_CANCELING
def _Canceled(ops):
for op in ops:
op.status = constants.OP_STATUS_CANCELED
def _Error1(ops):
for idx, op in enumerate(ops):
if idx > 3:
op.status = constants.OP_STATUS_ERROR
else:
op.status = constants.OP_STATUS_SUCCESS
def _Error2(ops):
for op in ops:
op.status = constants.OP_STATUS_ERROR
def _Success(ops):
for op in ops:
op.status = constants.OP_STATUS_SUCCESS
tests = {
constants.JOB_STATUS_QUEUED: [_Queued],
constants.JOB_STATUS_WAITING: [_Waitlock1, _Waitlock2],
constants.JOB_STATUS_RUNNING: [_Running],
constants.JOB_STATUS_CANCELING: [_Canceling1, _Canceling2],
constants.JOB_STATUS_CANCELED: [_Canceled],
constants.JOB_STATUS_ERROR: [_Error1, _Error2],
constants.JOB_STATUS_SUCCESS: [_Success],
}
def _NewJob():
job = jqueue._QueuedJob(None, 1,
[opcodes.OpTestDelay() for _ in range(10)],
True)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assert_(compat.all(op.status == constants.OP_STATUS_QUEUED
for op in job.ops))
return job
for status in constants.JOB_STATUS_ALL:
sttests = tests[status]
assert sttests
for fn in sttests:
job = _NewJob()
fn(job.ops)
self.assertEqual(job.CalcStatus(), status)
class _FakeDependencyManager:
def __init__(self):
self._checks = []
self._notifications = []
self._waiting = set()
def AddCheckResult(self, job, dep_job_id, dep_status, result):
self._checks.append((job, dep_job_id, dep_status, result))
def CountPendingResults(self):
return len(self._checks)
def CountWaitingJobs(self):
return len(self._waiting)
def GetNextNotification(self):
return self._notifications.pop(0)
def JobWaiting(self, job):
return job in self._waiting
def CheckAndRegister(self, job, dep_job_id, dep_status):
(exp_job, exp_dep_job_id, exp_dep_status, result) = self._checks.pop(0)
assert exp_job == job
assert exp_dep_job_id == dep_job_id
assert exp_dep_status == dep_status
(result_status, _) = result
if result_status == jqueue._JobDependencyManager.WAIT:
self._waiting.add(job)
elif result_status == jqueue._JobDependencyManager.CONTINUE:
self._waiting.remove(job)
return result
def NotifyWaiters(self, job_id):
self._notifications.append(job_id)
class _DisabledFakeDependencyManager:
def JobWaiting(self, _):
return False
def CheckAndRegister(self, *args):
assert False, "Should not be called"
def NotifyWaiters(self, _):
pass
class _FakeQueueForProc:
def __init__(self, depmgr=None):
self._acquired = False
self._updates = []
self._submitted = []
self._accepting_jobs = True
self._submit_count = itertools.count(1000)
if depmgr:
self.depmgr = depmgr
else:
self.depmgr = _DisabledFakeDependencyManager()
def IsAcquired(self):
return self._acquired
def GetNextUpdate(self):
return self._updates.pop(0)
def GetNextSubmittedJob(self):
return self._submitted.pop(0)
def acquire(self, shared=0):
assert shared == 1
self._acquired = True
def release(self):
assert self._acquired
self._acquired = False
def UpdateJobUnlocked(self, job, replicate=True):
assert self._acquired, "Lock not acquired while updating job"
self._updates.append((job, bool(replicate)))
def SubmitManyJobs(self, jobs):
assert not self._acquired, "Lock acquired while submitting jobs"
job_ids = [self._submit_count.next() for _ in jobs]
self._submitted.extend(zip(job_ids, jobs))
return job_ids
def StopAcceptingJobs(self):
self._accepting_jobs = False
def AcceptingJobsUnlocked(self):
return self._accepting_jobs
class _FakeExecOpCodeForProc:
def __init__(self, queue, before_start, after_start):
self._queue = queue
self._before_start = before_start
self._after_start = after_start
def __call__(self, op, cbs, timeout=None, priority=None):
assert isinstance(op, opcodes.OpTestDummy)
assert not self._queue.IsAcquired(), \
"Queue lock not released when executing opcode"
if self._before_start:
self._before_start(timeout, priority)
cbs.NotifyStart()
if self._after_start:
self._after_start(op, cbs)
# Check again after the callbacks
assert not self._queue.IsAcquired()
if op.fail:
raise errors.OpExecError("Error requested (%s)" % op.result)
if hasattr(op, "submit_jobs") and op.submit_jobs is not None:
return cbs.SubmitManyJobs(op.submit_jobs)
return op.result
class _JobProcessorTestUtils:
def _CreateJob(self, queue, job_id, ops):
job = jqueue._QueuedJob(queue, job_id, ops, True)
self.assertFalse(job.start_timestamp)
self.assertFalse(job.end_timestamp)
self.assertEqual(len(ops), len(job.ops))
self.assert_(compat.all(op.input == inp
for (op, inp) in zip(job.ops, ops)))
self.assertEqual(job.GetInfo(["ops"]), [[op.__getstate__() for op in ops]])
return job
class TestJobProcessor(unittest.TestCase, _JobProcessorTestUtils):
def _GenericCheckJob(self, job):
assert compat.all(isinstance(op.input, opcodes.OpTestDummy)
for op in job.ops)
self.assertEqual(job.GetInfo(["opstart", "opexec", "opend"]),
[[op.start_timestamp for op in job.ops],
[op.exec_timestamp for op in job.ops],
[op.end_timestamp for op in job.ops]])
self.assertEqual(job.GetInfo(["received_ts", "start_ts", "end_ts"]),
[job.received_timestamp,
job.start_timestamp,
job.end_timestamp])
self.assert_(job.start_timestamp)
self.assert_(job.end_timestamp)
self.assertEqual(job.start_timestamp, job.ops[0].start_timestamp)
def testSuccess(self):
queue = _FakeQueueForProc()
for (job_id, opcount) in [(25351, 1), (6637, 3),
(24644, 10), (32207, 100)]:
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(opcount)]
# Create job
job = self._CreateJob(queue, job_id, ops)
def _BeforeStart(timeout, priority):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
self.assertFalse(job.cur_opctx)
def _AfterStart(op, cbs):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_RUNNING)
self.assertFalse(job.cur_opctx)
# Job is running, cancelling shouldn't be possible
(success, _) = job.Cancel()
self.assertFalse(success)
opexec = _FakeExecOpCodeForProc(queue, _BeforeStart, _AfterStart)
for idx in range(len(ops)):
self.assertRaises(IndexError, queue.GetNextUpdate)
result = jqueue._JobProcessor(queue, opexec, job)()
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
if idx == len(ops) - 1:
# Last opcode
self.assertEqual(result, jqueue._JobProcessor.FINISHED)
else:
self.assertEqual(result, jqueue._JobProcessor.DEFER)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assert_(job.start_timestamp)
self.assertFalse(job.end_timestamp)
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_SUCCESS)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_SUCCESS])
self.assertEqual(job.GetInfo(["opresult"]),
[[op.input.result for op in job.ops]])
self.assertEqual(job.GetInfo(["opstatus"]),
[len(job.ops) * [constants.OP_STATUS_SUCCESS]])
self.assert_(compat.all(op.start_timestamp and op.end_timestamp
for op in job.ops))
self._GenericCheckJob(job)
# Calling the processor on a finished job should be a no-op
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
self.assertRaises(IndexError, queue.GetNextUpdate)
def testOpcodeError(self):
queue = _FakeQueueForProc()
testdata = [
(17077, 1, 0, 0),
(1782, 5, 2, 2),
(18179, 10, 9, 9),
(4744, 10, 3, 8),
(23816, 100, 39, 45),
]
for (job_id, opcount, failfrom, failto) in testdata:
# Prepare opcodes
ops = [opcodes.OpTestDummy(result="Res%s" % i,
fail=(failfrom <= i and
i <= failto))
for i in range(opcount)]
# Create job
job = self._CreateJob(queue, str(job_id), ops)
opexec = _FakeExecOpCodeForProc(queue, None, None)
for idx in range(len(ops)):
self.assertRaises(IndexError, queue.GetNextUpdate)
result = jqueue._JobProcessor(queue, opexec, job)()
# queued to waitlock
self.assertEqual(queue.GetNextUpdate(), (job, True))
# waitlock to running
self.assertEqual(queue.GetNextUpdate(), (job, True))
# Opcode result
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
if idx in (failfrom, len(ops) - 1):
# Last opcode
self.assertEqual(result, jqueue._JobProcessor.FINISHED)
break
self.assertEqual(result, jqueue._JobProcessor.DEFER)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertRaises(IndexError, queue.GetNextUpdate)
# Check job status
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_ERROR)
self.assertEqual(job.GetInfo(["id"]), [str(job_id)])
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_ERROR])
# Check opcode status
data = zip(job.ops,
job.GetInfo(["opstatus"])[0],
job.GetInfo(["opresult"])[0])
for idx, (op, opstatus, opresult) in enumerate(data):
if idx < failfrom:
assert not op.input.fail
self.assertEqual(opstatus, constants.OP_STATUS_SUCCESS)
self.assertEqual(opresult, op.input.result)
elif idx <= failto:
assert op.input.fail
self.assertEqual(opstatus, constants.OP_STATUS_ERROR)
self.assertRaises(errors.OpExecError, errors.MaybeRaise, opresult)
else:
assert not op.input.fail
self.assertEqual(opstatus, constants.OP_STATUS_ERROR)
self.assertRaises(errors.OpExecError, errors.MaybeRaise, opresult)
self.assert_(compat.all(op.start_timestamp and op.end_timestamp
for op in job.ops[:failfrom]))
self._GenericCheckJob(job)
# Calling the processor on a finished job should be a no-op
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
self.assertRaises(IndexError, queue.GetNextUpdate)
def testCancelWhileInQueue(self):
queue = _FakeQueueForProc()
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(5)]
# Create job
job_id = 17045
job = self._CreateJob(queue, job_id, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
# Mark as cancelled
(success, _) = job.Cancel()
self.assert_(success)
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(job.start_timestamp)
self.assertTrue(job.end_timestamp)
self.assert_(compat.all(op.status == constants.OP_STATUS_CANCELED
for op in job.ops))
# Serialize to check for differences
before_proc = job.Serialize()
# Simulate processor called in workerpool
opexec = _FakeExecOpCodeForProc(queue, None, None)
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
# Check result
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_CANCELED)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_CANCELED])
self.assertFalse(job.start_timestamp)
self.assertTrue(job.end_timestamp)
self.assertFalse(compat.any(op.start_timestamp or op.end_timestamp
for op in job.ops))
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[[constants.OP_STATUS_CANCELED for _ in job.ops],
["Job canceled by request" for _ in job.ops]])
# Must not have changed or written
self.assertEqual(before_proc, job.Serialize())
self.assertRaises(IndexError, queue.GetNextUpdate)
def testCancelWhileWaitlockInQueue(self):
queue = _FakeQueueForProc()
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(5)]
# Create job
job_id = 8645
job = self._CreateJob(queue, job_id, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
job.ops[0].status = constants.OP_STATUS_WAITING
assert len(job.ops) == 5
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
# Mark as cancelling
(success, _) = job.Cancel()
self.assert_(success)
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assert_(compat.all(op.status == constants.OP_STATUS_CANCELING
for op in job.ops))
opexec = _FakeExecOpCodeForProc(queue, None, None)
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
# Check result
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_CANCELED)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_CANCELED])
self.assertFalse(job.start_timestamp)
self.assert_(job.end_timestamp)
self.assertFalse(compat.any(op.start_timestamp or op.end_timestamp
for op in job.ops))
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[[constants.OP_STATUS_CANCELED for _ in job.ops],
["Job canceled by request" for _ in job.ops]])
def testCancelWhileWaitlock(self):
queue = _FakeQueueForProc()
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(5)]
# Create job
job_id = 11009
job = self._CreateJob(queue, job_id, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
def _BeforeStart(timeout, priority):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
# Mark as cancelled
(success, _) = job.Cancel()
self.assert_(success)
self.assert_(compat.all(op.status == constants.OP_STATUS_CANCELING
for op in job.ops))
self.assertRaises(IndexError, queue.GetNextUpdate)
def _AfterStart(op, cbs):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_RUNNING)
opexec = _FakeExecOpCodeForProc(queue, _BeforeStart, _AfterStart)
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
# Check result
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_CANCELED)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_CANCELED])
self.assert_(job.start_timestamp)
self.assert_(job.end_timestamp)
self.assertFalse(compat.all(op.start_timestamp and op.end_timestamp
for op in job.ops))
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[[constants.OP_STATUS_CANCELED for _ in job.ops],
["Job canceled by request" for _ in job.ops]])
def _TestCancelWhileSomething(self, cb):
queue = _FakeQueueForProc()
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(5)]
# Create job
job_id = 24314
job = self._CreateJob(queue, job_id, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
def _BeforeStart(timeout, priority):
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
# Mark as cancelled
(success, _) = job.Cancel()
self.assert_(success)
self.assert_(compat.all(op.status == constants.OP_STATUS_CANCELING
for op in job.ops))
cb(queue)
def _AfterStart(op, cbs):
self.fail("Should not reach this")
opexec = _FakeExecOpCodeForProc(queue, _BeforeStart, _AfterStart)
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
# Check result
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_CANCELED)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_CANCELED])
self.assert_(job.start_timestamp)
self.assert_(job.end_timestamp)
self.assertFalse(compat.all(op.start_timestamp and op.end_timestamp
for op in job.ops))
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[[constants.OP_STATUS_CANCELED for _ in job.ops],
["Job canceled by request" for _ in job.ops]])
return queue
def testCancelWhileWaitlockWithTimeout(self):
def fn(_):
# Fake an acquire attempt timing out
raise mcpu.LockAcquireTimeout()
self._TestCancelWhileSomething(fn)
def testCancelDuringQueueShutdown(self):
queue = self._TestCancelWhileSomething(lambda q: q.StopAcceptingJobs())
self.assertFalse(queue.AcceptingJobsUnlocked())
def testCancelWhileRunning(self):
# Tests canceling a job with finished opcodes and more, unprocessed ones
queue = _FakeQueueForProc()
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(3)]
# Create job
job_id = str(28492)
job = self._CreateJob(queue, job_id, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
opexec = _FakeExecOpCodeForProc(queue, None, None)
# Run one opcode
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.DEFER)
# Job goes back to queued
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[[constants.OP_STATUS_SUCCESS,
constants.OP_STATUS_QUEUED,
constants.OP_STATUS_QUEUED],
["Res0", None, None]])
# Mark as cancelled
(success, _) = job.Cancel()
self.assert_(success)
# Try processing another opcode (this will actually cancel the job)
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
# Check result
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_CANCELED)
self.assertEqual(job.GetInfo(["id"]), [job_id])
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_CANCELED])
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[[constants.OP_STATUS_SUCCESS,
constants.OP_STATUS_CANCELED,
constants.OP_STATUS_CANCELED],
["Res0", "Job canceled by request",
"Job canceled by request"]])
def _TestQueueShutdown(self, queue, opexec, job, runcount):
self.assertTrue(queue.AcceptingJobsUnlocked())
# Simulate shutdown
queue.StopAcceptingJobs()
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.DEFER)
# Check result
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_QUEUED])
self.assertFalse(job.cur_opctx)
self.assertTrue(job.start_timestamp)
self.assertFalse(job.end_timestamp)
self.assertEqual(job.start_timestamp, job.ops[0].start_timestamp)
self.assertTrue(compat.all(op.start_timestamp and op.end_timestamp
for op in job.ops[:runcount]))
self.assertFalse(job.ops[runcount].end_timestamp)
self.assertFalse(compat.any(op.start_timestamp or op.end_timestamp
for op in job.ops[(runcount + 1):]))
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[(([constants.OP_STATUS_SUCCESS] * runcount) +
([constants.OP_STATUS_QUEUED] *
(len(job.ops) - runcount))),
(["Res%s" % i for i in range(runcount)] +
([None] * (len(job.ops) - runcount)))])
# Must have been written and replicated
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
def testQueueShutdownWhileRunning(self):
# Tests shutting down the queue while a job is running
queue = _FakeQueueForProc()
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(3)]
# Create job
job_id = 2718211587
job = self._CreateJob(queue, job_id, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
opexec = _FakeExecOpCodeForProc(queue, None, None)
self.assertRaises(IndexError, queue.GetNextUpdate)
# Run one opcode
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.DEFER)
# Job goes back to queued
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[[constants.OP_STATUS_SUCCESS,
constants.OP_STATUS_QUEUED,
constants.OP_STATUS_QUEUED],
["Res0", None, None]])
self.assertFalse(job.cur_opctx)
# Writes for waiting, running and result
for _ in range(3):
self.assertEqual(queue.GetNextUpdate(), (job, True))
# Run second opcode
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.DEFER)
# Job goes back to queued
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[[constants.OP_STATUS_SUCCESS,
constants.OP_STATUS_SUCCESS,
constants.OP_STATUS_QUEUED],
["Res0", "Res1", None]])
self.assertFalse(job.cur_opctx)
# Writes for waiting, running and result
for _ in range(3):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self._TestQueueShutdown(queue, opexec, job, 2)
def testQueueShutdownWithLockTimeout(self):
# Tests shutting down while a lock acquire times out
queue = _FakeQueueForProc()
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(3)]
# Create job
job_id = 1304231178
job = self._CreateJob(queue, job_id, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
acquire_timeout = False
def _BeforeStart(timeout, priority):
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
if acquire_timeout:
raise mcpu.LockAcquireTimeout()
opexec = _FakeExecOpCodeForProc(queue, _BeforeStart, None)
self.assertRaises(IndexError, queue.GetNextUpdate)
# Run one opcode
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.DEFER)
# Job goes back to queued
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[[constants.OP_STATUS_SUCCESS,
constants.OP_STATUS_QUEUED,
constants.OP_STATUS_QUEUED],
["Res0", None, None]])
self.assertFalse(job.cur_opctx)
# Writes for waiting, running and result
for _ in range(3):
self.assertEqual(queue.GetNextUpdate(), (job, True))
# The next opcode should have expiring lock acquires
acquire_timeout = True
self._TestQueueShutdown(queue, opexec, job, 1)
def testQueueShutdownWhileInQueue(self):
# This should never happen in reality (no new jobs are started by the
# workerpool once a shutdown has been initiated), but it's better to test
# the job processor for this scenario
queue = _FakeQueueForProc()
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(5)]
# Create job
job_id = 2031
job = self._CreateJob(queue, job_id, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(job.start_timestamp)
self.assertFalse(job.end_timestamp)
self.assertTrue(compat.all(op.status == constants.OP_STATUS_QUEUED
for op in job.ops))
opexec = _FakeExecOpCodeForProc(queue, None, None)
self._TestQueueShutdown(queue, opexec, job, 0)
def testQueueShutdownWhileWaitlockInQueue(self):
queue = _FakeQueueForProc()
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(5)]
# Create job
job_id = 53125685
job = self._CreateJob(queue, job_id, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
job.ops[0].status = constants.OP_STATUS_WAITING
assert len(job.ops) == 5
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
self.assertRaises(IndexError, queue.GetNextUpdate)
opexec = _FakeExecOpCodeForProc(queue, None, None)
self._TestQueueShutdown(queue, opexec, job, 0)
def testPartiallyRun(self):
# Tests calling the processor on a job that's been partially run before the
# program was restarted
queue = _FakeQueueForProc()
opexec = _FakeExecOpCodeForProc(queue, None, None)
for job_id, successcount in [(30697, 1), (2552, 4), (12489, 9)]:
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(10)]
# Create job
job = self._CreateJob(queue, job_id, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
for _ in range(successcount):
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.DEFER)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertEqual(job.GetInfo(["opstatus"]),
[[constants.OP_STATUS_SUCCESS
for _ in range(successcount)] +
[constants.OP_STATUS_QUEUED
for _ in range(len(ops) - successcount)]])
self.assert_(job.ops_iter)
# Serialize and restore (simulates program restart)
newjob = jqueue._QueuedJob.Restore(queue, job.Serialize(), True)
self.assertFalse(newjob.ops_iter)
self._TestPartial(newjob, successcount)
def _TestPartial(self, job, successcount):
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertEqual(job.start_timestamp, job.ops[0].start_timestamp)
queue = _FakeQueueForProc()
opexec = _FakeExecOpCodeForProc(queue, None, None)
for remaining in reversed(range(len(job.ops) - successcount)):
result = jqueue._JobProcessor(queue, opexec, job)()
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
if remaining == 0:
# Last opcode
self.assertEqual(result, jqueue._JobProcessor.FINISHED)
break
self.assertEqual(result, jqueue._JobProcessor.DEFER)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_SUCCESS)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_SUCCESS])
self.assertEqual(job.GetInfo(["opresult"]),
[[op.input.result for op in job.ops]])
self.assertEqual(job.GetInfo(["opstatus"]),
[[constants.OP_STATUS_SUCCESS for _ in job.ops]])
self.assert_(compat.all(op.start_timestamp and op.end_timestamp
for op in job.ops))
self._GenericCheckJob(job)
# Calling the processor on a finished job should be a no-op
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
self.assertRaises(IndexError, queue.GetNextUpdate)
# ... also after being restored
job2 = jqueue._QueuedJob.Restore(queue, job.Serialize(), True)
# Calling the processor on a finished job should be a no-op
self.assertEqual(jqueue._JobProcessor(queue, opexec, job2)(),
jqueue._JobProcessor.FINISHED)
self.assertRaises(IndexError, queue.GetNextUpdate)
def testProcessorOnRunningJob(self):
ops = [opcodes.OpTestDummy(result="result", fail=False)]
queue = _FakeQueueForProc()
opexec = _FakeExecOpCodeForProc(queue, None, None)
# Create job
job = self._CreateJob(queue, 9571, ops)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
job.ops[0].status = constants.OP_STATUS_RUNNING
assert len(job.ops) == 1
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_RUNNING)
# Calling on running job must fail
self.assertRaises(errors.ProgrammerError,
jqueue._JobProcessor(queue, opexec, job))
def testLogMessages(self):
# Tests the "Feedback" callback function
queue = _FakeQueueForProc()
messages = {
1: [
(None, "Hello"),
(None, "World"),
(constants.ELOG_MESSAGE, "there"),
],
4: [
(constants.ELOG_JQUEUE_TEST, (1, 2, 3)),
(constants.ELOG_JQUEUE_TEST, ("other", "type")),
],
}
ops = [opcodes.OpTestDummy(result="Logtest%s" % i, fail=False,
messages=messages.get(i, []))
for i in range(5)]
# Create job
job = self._CreateJob(queue, 29386, ops)
def _BeforeStart(timeout, priority):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
def _AfterStart(op, cbs):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_RUNNING)
self.assertRaises(AssertionError, cbs.Feedback,
"too", "many", "arguments")
for (log_type, msg) in op.messages:
self.assertRaises(IndexError, queue.GetNextUpdate)
if log_type:
cbs.Feedback(log_type, msg)
else:
cbs.Feedback(msg)
# Check for job update without replication
self.assertEqual(queue.GetNextUpdate(), (job, False))
self.assertRaises(IndexError, queue.GetNextUpdate)
opexec = _FakeExecOpCodeForProc(queue, _BeforeStart, _AfterStart)
for remaining in reversed(range(len(job.ops))):
self.assertRaises(IndexError, queue.GetNextUpdate)
result = jqueue._JobProcessor(queue, opexec, job)()
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
if remaining == 0:
# Last opcode
self.assertEqual(result, jqueue._JobProcessor.FINISHED)
break
self.assertEqual(result, jqueue._JobProcessor.DEFER)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_SUCCESS)
self.assertEqual(job.GetInfo(["opresult"]),
[[op.input.result for op in job.ops]])
logmsgcount = sum(len(m) for m in messages.values())
self._CheckLogMessages(job, logmsgcount)
# Serialize and restore (simulates program restart)
newjob = jqueue._QueuedJob.Restore(queue, job.Serialize(), True)
self._CheckLogMessages(newjob, logmsgcount)
# Check each message
prevserial = -1
for idx, oplog in enumerate(job.GetInfo(["oplog"])[0]):
for (serial, timestamp, log_type, msg) in oplog:
(exptype, expmsg) = messages.get(idx).pop(0)
if exptype:
self.assertEqual(log_type, exptype)
else:
self.assertEqual(log_type, constants.ELOG_MESSAGE)
self.assertEqual(expmsg, msg)
self.assert_(serial > prevserial)
prevserial = serial
def _CheckLogMessages(self, job, count):
# Check serial
self.assertEqual(job.log_serial, count)
# No filter
self.assertEqual(job.GetLogEntries(None),
[entry for entries in job.GetInfo(["oplog"])[0] if entries
for entry in entries])
# Filter with serial
assert count > 3
self.assert_(job.GetLogEntries(3))
self.assertEqual(job.GetLogEntries(3),
[entry for entries in job.GetInfo(["oplog"])[0] if entries
for entry in entries][3:])
# No log message after highest serial
self.assertFalse(job.GetLogEntries(count))
self.assertFalse(job.GetLogEntries(count + 3))
def testSubmitManyJobs(self):
queue = _FakeQueueForProc()
job_id = 15656
ops = [
opcodes.OpTestDummy(result="Res0", fail=False,
submit_jobs=[]),
opcodes.OpTestDummy(result="Res1", fail=False,
submit_jobs=[
[opcodes.OpTestDummy(result="r1j0", fail=False)],
]),
opcodes.OpTestDummy(result="Res2", fail=False,
submit_jobs=[
[opcodes.OpTestDummy(result="r2j0o0", fail=False),
opcodes.OpTestDummy(result="r2j0o1", fail=False),
opcodes.OpTestDummy(result="r2j0o2", fail=False),
opcodes.OpTestDummy(result="r2j0o3", fail=False)],
[opcodes.OpTestDummy(result="r2j1", fail=False)],
[opcodes.OpTestDummy(result="r2j3o0", fail=False),
opcodes.OpTestDummy(result="r2j3o1", fail=False)],
]),
]
# Create job
job = self._CreateJob(queue, job_id, ops)
def _BeforeStart(timeout, priority):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
self.assertFalse(job.cur_opctx)
def _AfterStart(op, cbs):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_RUNNING)
self.assertFalse(job.cur_opctx)
# Job is running, cancelling shouldn't be possible
(success, _) = job.Cancel()
self.assertFalse(success)
opexec = _FakeExecOpCodeForProc(queue, _BeforeStart, _AfterStart)
for idx in range(len(ops)):
self.assertRaises(IndexError, queue.GetNextUpdate)
result = jqueue._JobProcessor(queue, opexec, job)()
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
if idx == len(ops) - 1:
# Last opcode
self.assertEqual(result, jqueue._JobProcessor.FINISHED)
else:
self.assertEqual(result, jqueue._JobProcessor.DEFER)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assert_(job.start_timestamp)
self.assertFalse(job.end_timestamp)
self.assertRaises(IndexError, queue.GetNextUpdate)
for idx, submitted_ops in enumerate(job_ops
for op in ops
for job_ops in op.submit_jobs):
self.assertEqual(queue.GetNextSubmittedJob(),
(1000 + idx, submitted_ops))
self.assertRaises(IndexError, queue.GetNextSubmittedJob)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_SUCCESS)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_SUCCESS])
self.assertEqual(job.GetInfo(["opresult"]),
[[[], [1000], [1001, 1002, 1003]]])
self.assertEqual(job.GetInfo(["opstatus"]),
[len(job.ops) * [constants.OP_STATUS_SUCCESS]])
self._GenericCheckJob(job)
# Calling the processor on a finished job should be a no-op
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
self.assertRaises(IndexError, queue.GetNextUpdate)
def testJobDependency(self):
depmgr = _FakeDependencyManager()
queue = _FakeQueueForProc(depmgr=depmgr)
self.assertEqual(queue.depmgr, depmgr)
prev_job_id = 22113
prev_job_id2 = 28102
job_id = 29929
ops = [
opcodes.OpTestDummy(result="Res0", fail=False,
depends=[
[prev_job_id2, None],
[prev_job_id, None],
]),
opcodes.OpTestDummy(result="Res1", fail=False),
]
# Create job
job = self._CreateJob(queue, job_id, ops)
def _BeforeStart(timeout, priority):
if attempt == 0 or attempt > 5:
# Job should only be updated when it wasn't waiting for another job
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
self.assertFalse(job.cur_opctx)
def _AfterStart(op, cbs):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_RUNNING)
self.assertFalse(job.cur_opctx)
# Job is running, cancelling shouldn't be possible
(success, _) = job.Cancel()
self.assertFalse(success)
opexec = _FakeExecOpCodeForProc(queue, _BeforeStart, _AfterStart)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
counter = itertools.count()
while True:
attempt = counter.next()
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertRaises(IndexError, depmgr.GetNextNotification)
if attempt < 2:
depmgr.AddCheckResult(job, prev_job_id2, None,
(jqueue._JobDependencyManager.WAIT, "wait2"))
elif attempt == 2:
depmgr.AddCheckResult(job, prev_job_id2, None,
(jqueue._JobDependencyManager.CONTINUE, "cont"))
# The processor will ask for the next dependency immediately
depmgr.AddCheckResult(job, prev_job_id, None,
(jqueue._JobDependencyManager.WAIT, "wait"))
elif attempt < 5:
depmgr.AddCheckResult(job, prev_job_id, None,
(jqueue._JobDependencyManager.WAIT, "wait"))
elif attempt == 5:
depmgr.AddCheckResult(job, prev_job_id, None,
(jqueue._JobDependencyManager.CONTINUE, "cont"))
if attempt == 2:
self.assertEqual(depmgr.CountPendingResults(), 2)
elif attempt > 5:
self.assertEqual(depmgr.CountPendingResults(), 0)
else:
self.assertEqual(depmgr.CountPendingResults(), 1)
result = jqueue._JobProcessor(queue, opexec, job)()
if attempt == 0 or attempt >= 5:
# Job should only be updated if there was an actual change
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(depmgr.CountPendingResults())
if attempt < 5:
# Simulate waiting for other job
self.assertEqual(result, jqueue._JobProcessor.WAITDEP)
self.assertTrue(job.cur_opctx)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
self.assertRaises(IndexError, depmgr.GetNextNotification)
self.assert_(job.start_timestamp)
self.assertFalse(job.end_timestamp)
continue
if result == jqueue._JobProcessor.FINISHED:
# Last opcode
self.assertFalse(job.cur_opctx)
break
self.assertRaises(IndexError, depmgr.GetNextNotification)
self.assertEqual(result, jqueue._JobProcessor.DEFER)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assert_(job.start_timestamp)
self.assertFalse(job.end_timestamp)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_SUCCESS)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_SUCCESS])
self.assertEqual(job.GetInfo(["opresult"]),
[[op.input.result for op in job.ops]])
self.assertEqual(job.GetInfo(["opstatus"]),
[len(job.ops) * [constants.OP_STATUS_SUCCESS]])
self.assertTrue(compat.all(op.start_timestamp and op.end_timestamp
for op in job.ops))
self._GenericCheckJob(job)
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertRaises(IndexError, depmgr.GetNextNotification)
self.assertFalse(depmgr.CountPendingResults())
self.assertFalse(depmgr.CountWaitingJobs())
# Calling the processor on a finished job should be a no-op
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
self.assertRaises(IndexError, queue.GetNextUpdate)
def testJobDependencyCancel(self):
depmgr = _FakeDependencyManager()
queue = _FakeQueueForProc(depmgr=depmgr)
self.assertEqual(queue.depmgr, depmgr)
prev_job_id = 13623
job_id = 30876
ops = [
opcodes.OpTestDummy(result="Res0", fail=False),
opcodes.OpTestDummy(result="Res1", fail=False,
depends=[
[prev_job_id, None],
]),
opcodes.OpTestDummy(result="Res2", fail=False),
]
# Create job
job = self._CreateJob(queue, job_id, ops)
def _BeforeStart(timeout, priority):
if attempt == 0 or attempt > 5:
# Job should only be updated when it wasn't waiting for another job
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
self.assertFalse(job.cur_opctx)
def _AfterStart(op, cbs):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_RUNNING)
self.assertFalse(job.cur_opctx)
# Job is running, cancelling shouldn't be possible
(success, _) = job.Cancel()
self.assertFalse(success)
opexec = _FakeExecOpCodeForProc(queue, _BeforeStart, _AfterStart)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
counter = itertools.count()
while True:
attempt = counter.next()
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertRaises(IndexError, depmgr.GetNextNotification)
if attempt == 0:
# This will handle the first opcode
pass
elif attempt < 4:
depmgr.AddCheckResult(job, prev_job_id, None,
(jqueue._JobDependencyManager.WAIT, "wait"))
elif attempt == 4:
# Other job was cancelled
depmgr.AddCheckResult(job, prev_job_id, None,
(jqueue._JobDependencyManager.CANCEL, "cancel"))
if attempt == 0:
self.assertEqual(depmgr.CountPendingResults(), 0)
else:
self.assertEqual(depmgr.CountPendingResults(), 1)
result = jqueue._JobProcessor(queue, opexec, job)()
if attempt <= 1 or attempt >= 4:
# Job should only be updated if there was an actual change
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(depmgr.CountPendingResults())
if attempt > 0 and attempt < 4:
# Simulate waiting for other job
self.assertEqual(result, jqueue._JobProcessor.WAITDEP)
self.assertTrue(job.cur_opctx)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
self.assertRaises(IndexError, depmgr.GetNextNotification)
self.assert_(job.start_timestamp)
self.assertFalse(job.end_timestamp)
continue
if result == jqueue._JobProcessor.FINISHED:
# Last opcode
self.assertFalse(job.cur_opctx)
break
self.assertRaises(IndexError, depmgr.GetNextNotification)
self.assertEqual(result, jqueue._JobProcessor.DEFER)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assert_(job.start_timestamp)
self.assertFalse(job.end_timestamp)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_CANCELED)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_CANCELED])
self.assertEqual(job.GetInfo(["opstatus", "opresult"]),
[[constants.OP_STATUS_SUCCESS,
constants.OP_STATUS_CANCELED,
constants.OP_STATUS_CANCELED],
["Res0", "Job canceled by request",
"Job canceled by request"]])
self._GenericCheckJob(job)
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertRaises(IndexError, depmgr.GetNextNotification)
self.assertFalse(depmgr.CountPendingResults())
# Calling the processor on a finished job should be a no-op
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
self.assertRaises(IndexError, queue.GetNextUpdate)
def testJobDependencyWrongstatus(self):
depmgr = _FakeDependencyManager()
queue = _FakeQueueForProc(depmgr=depmgr)
self.assertEqual(queue.depmgr, depmgr)
prev_job_id = 9741
job_id = 11763
ops = [
opcodes.OpTestDummy(result="Res0", fail=False),
opcodes.OpTestDummy(result="Res1", fail=False,
depends=[
[prev_job_id, None],
]),
opcodes.OpTestDummy(result="Res2", fail=False),
]
# Create job
job = self._CreateJob(queue, job_id, ops)
def _BeforeStart(timeout, priority):
if attempt == 0 or attempt > 5:
# Job should only be updated when it wasn't waiting for another job
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
self.assertFalse(job.cur_opctx)
def _AfterStart(op, cbs):
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_RUNNING)
self.assertFalse(job.cur_opctx)
# Job is running, cancelling shouldn't be possible
(success, _) = job.Cancel()
self.assertFalse(success)
opexec = _FakeExecOpCodeForProc(queue, _BeforeStart, _AfterStart)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
counter = itertools.count()
while True:
attempt = counter.next()
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertRaises(IndexError, depmgr.GetNextNotification)
if attempt == 0:
# This will handle the first opcode
pass
elif attempt < 4:
depmgr.AddCheckResult(job, prev_job_id, None,
(jqueue._JobDependencyManager.WAIT, "wait"))
elif attempt == 4:
# Other job failed
depmgr.AddCheckResult(job, prev_job_id, None,
(jqueue._JobDependencyManager.WRONGSTATUS, "w"))
if attempt == 0:
self.assertEqual(depmgr.CountPendingResults(), 0)
else:
self.assertEqual(depmgr.CountPendingResults(), 1)
result = jqueue._JobProcessor(queue, opexec, job)()
if attempt <= 1 or attempt >= 4:
# Job should only be updated if there was an actual change
self.assertEqual(queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertFalse(depmgr.CountPendingResults())
if attempt > 0 and attempt < 4:
# Simulate waiting for other job
self.assertEqual(result, jqueue._JobProcessor.WAITDEP)
self.assertTrue(job.cur_opctx)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
self.assertRaises(IndexError, depmgr.GetNextNotification)
self.assert_(job.start_timestamp)
self.assertFalse(job.end_timestamp)
continue
if result == jqueue._JobProcessor.FINISHED:
# Last opcode
self.assertFalse(job.cur_opctx)
break
self.assertRaises(IndexError, depmgr.GetNextNotification)
self.assertEqual(result, jqueue._JobProcessor.DEFER)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
self.assert_(job.start_timestamp)
self.assertFalse(job.end_timestamp)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_ERROR)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_ERROR])
self.assertEqual(job.GetInfo(["opstatus"]),
[[constants.OP_STATUS_SUCCESS,
constants.OP_STATUS_ERROR,
constants.OP_STATUS_ERROR]]),
(opresult, ) = job.GetInfo(["opresult"])
self.assertEqual(len(opresult), len(ops))
self.assertEqual(opresult[0], "Res0")
self.assertTrue(errors.GetEncodedError(opresult[1]))
self.assertTrue(errors.GetEncodedError(opresult[2]))
self._GenericCheckJob(job)
self.assertRaises(IndexError, queue.GetNextUpdate)
self.assertRaises(IndexError, depmgr.GetNextNotification)
self.assertFalse(depmgr.CountPendingResults())
# Calling the processor on a finished job should be a no-op
self.assertEqual(jqueue._JobProcessor(queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
self.assertRaises(IndexError, queue.GetNextUpdate)
class TestEvaluateJobProcessorResult(unittest.TestCase):
def testFinished(self):
depmgr = _FakeDependencyManager()
job = _IdOnlyFakeJob(30953)
jqueue._EvaluateJobProcessorResult(depmgr, job,
jqueue._JobProcessor.FINISHED)
self.assertEqual(depmgr.GetNextNotification(), job.id)
self.assertRaises(IndexError, depmgr.GetNextNotification)
def testDefer(self):
depmgr = _FakeDependencyManager()
job = _IdOnlyFakeJob(11326, priority=5463)
try:
jqueue._EvaluateJobProcessorResult(depmgr, job,
jqueue._JobProcessor.DEFER)
except workerpool.DeferTask, err:
self.assertEqual(err.priority, 5463)
else:
self.fail("Didn't raise exception")
self.assertRaises(IndexError, depmgr.GetNextNotification)
def testWaitdep(self):
depmgr = _FakeDependencyManager()
job = _IdOnlyFakeJob(21317)
jqueue._EvaluateJobProcessorResult(depmgr, job,
jqueue._JobProcessor.WAITDEP)
self.assertRaises(IndexError, depmgr.GetNextNotification)
def testOther(self):
depmgr = _FakeDependencyManager()
job = _IdOnlyFakeJob(5813)
self.assertRaises(errors.ProgrammerError,
jqueue._EvaluateJobProcessorResult,
depmgr, job, "Other result")
self.assertRaises(IndexError, depmgr.GetNextNotification)
class _FakeTimeoutStrategy:
def __init__(self, timeouts):
self.timeouts = timeouts
self.attempts = 0
self.last_timeout = None
def NextAttempt(self):
self.attempts += 1
if self.timeouts:
timeout = self.timeouts.pop(0)
else:
timeout = None
self.last_timeout = timeout
return timeout
class TestJobProcessorTimeouts(unittest.TestCase, _JobProcessorTestUtils):
def setUp(self):
self.queue = _FakeQueueForProc()
self.job = None
self.curop = None
self.opcounter = None
self.timeout_strategy = None
self.retries = 0
self.prev_tsop = None
self.prev_prio = None
self.prev_status = None
self.lock_acq_prio = None
self.gave_lock = None
self.done_lock_before_blocking = False
def _BeforeStart(self, timeout, priority):
job = self.job
# If status has changed, job must've been written
if self.prev_status != self.job.ops[self.curop].status:
self.assertEqual(self.queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, self.queue.GetNextUpdate)
self.assertFalse(self.queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
ts = self.timeout_strategy
self.assert_(timeout is None or isinstance(timeout, (int, float)))
self.assertEqual(timeout, ts.last_timeout)
self.assertEqual(priority, job.ops[self.curop].priority)
self.gave_lock = True
self.lock_acq_prio = priority
if (self.curop == 3 and
job.ops[self.curop].priority == constants.OP_PRIO_HIGHEST + 3):
# Give locks before running into blocking acquire
assert self.retries == 7
self.retries = 0
self.done_lock_before_blocking = True
return
if self.retries > 0:
self.assert_(timeout is not None)
self.retries -= 1
self.gave_lock = False
raise mcpu.LockAcquireTimeout()
if job.ops[self.curop].priority == constants.OP_PRIO_HIGHEST:
assert self.retries == 0, "Didn't exhaust all retries at highest priority"
assert not ts.timeouts
self.assert_(timeout is None)
def _AfterStart(self, op, cbs):
job = self.job
# Setting to "running" requires an update
self.assertEqual(self.queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, self.queue.GetNextUpdate)
self.assertFalse(self.queue.IsAcquired())
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_RUNNING)
# Job is running, cancelling shouldn't be possible
(success, _) = job.Cancel()
self.assertFalse(success)
def _NextOpcode(self):
self.curop = self.opcounter.next()
self.prev_prio = self.job.ops[self.curop].priority
self.prev_status = self.job.ops[self.curop].status
def _NewTimeoutStrategy(self):
job = self.job
self.assertEqual(self.retries, 0)
if self.prev_tsop == self.curop:
# Still on the same opcode, priority must've been increased
self.assertEqual(self.prev_prio, job.ops[self.curop].priority + 1)
if self.curop == 1:
# Normal retry
timeouts = range(10, 31, 10)
self.retries = len(timeouts) - 1
elif self.curop == 2:
# Let this run into a blocking acquire
timeouts = range(11, 61, 12)
self.retries = len(timeouts)
elif self.curop == 3:
# Wait for priority to increase, but give lock before blocking acquire
timeouts = range(12, 100, 14)
self.retries = len(timeouts)
self.assertFalse(self.done_lock_before_blocking)
elif self.curop == 4:
self.assert_(self.done_lock_before_blocking)
# Timeouts, but no need to retry
timeouts = range(10, 31, 10)
self.retries = 0
elif self.curop == 5:
# Normal retry
timeouts = range(19, 100, 11)
self.retries = len(timeouts)
else:
timeouts = []
self.retries = 0
assert len(job.ops) == 10
assert self.retries <= len(timeouts)
ts = _FakeTimeoutStrategy(timeouts)
self.timeout_strategy = ts
self.prev_tsop = self.curop
self.prev_prio = job.ops[self.curop].priority
return ts
def testTimeout(self):
ops = [opcodes.OpTestDummy(result="Res%s" % i, fail=False)
for i in range(10)]
# Create job
job_id = 15801
job = self._CreateJob(self.queue, job_id, ops)
self.job = job
self.opcounter = itertools.count(0)
opexec = _FakeExecOpCodeForProc(self.queue, self._BeforeStart,
self._AfterStart)
tsf = self._NewTimeoutStrategy
self.assertFalse(self.done_lock_before_blocking)
while True:
proc = jqueue._JobProcessor(self.queue, opexec, job,
_timeout_strategy_factory=tsf)
self.assertRaises(IndexError, self.queue.GetNextUpdate)
if self.curop is not None:
self.prev_status = self.job.ops[self.curop].status
self.lock_acq_prio = None
result = proc(_nextop_fn=self._NextOpcode)
assert self.curop is not None
if result == jqueue._JobProcessor.FINISHED or self.gave_lock:
# Got lock and/or job is done, result must've been written
self.assertFalse(job.cur_opctx)
self.assertEqual(self.queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, self.queue.GetNextUpdate)
self.assertEqual(self.lock_acq_prio, job.ops[self.curop].priority)
self.assert_(job.ops[self.curop].exec_timestamp)
if result == jqueue._JobProcessor.FINISHED:
self.assertFalse(job.cur_opctx)
break
self.assertEqual(result, jqueue._JobProcessor.DEFER)
if self.curop == 0:
self.assertEqual(job.ops[self.curop].start_timestamp,
job.start_timestamp)
if self.gave_lock:
# Opcode finished, but job not yet done
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_QUEUED)
else:
# Did not get locks
self.assert_(job.cur_opctx)
self.assertEqual(job.cur_opctx._timeout_strategy._fn,
self.timeout_strategy.NextAttempt)
self.assertFalse(job.ops[self.curop].exec_timestamp)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_WAITING)
# If priority has changed since acquiring locks, the job must've been
# updated
if self.lock_acq_prio != job.ops[self.curop].priority:
self.assertEqual(self.queue.GetNextUpdate(), (job, True))
self.assertRaises(IndexError, self.queue.GetNextUpdate)
self.assert_(job.start_timestamp)
self.assertFalse(job.end_timestamp)
self.assertEqual(self.curop, len(job.ops) - 1)
self.assertEqual(self.job, job)
self.assertEqual(self.opcounter.next(), len(job.ops))
self.assert_(self.done_lock_before_blocking)
self.assertRaises(IndexError, self.queue.GetNextUpdate)
self.assertEqual(job.CalcStatus(), constants.JOB_STATUS_SUCCESS)
self.assertEqual(job.GetInfo(["status"]), [constants.JOB_STATUS_SUCCESS])
self.assertEqual(job.GetInfo(["opresult"]),
[[op.input.result for op in job.ops]])
self.assertEqual(job.GetInfo(["opstatus"]),
[len(job.ops) * [constants.OP_STATUS_SUCCESS]])
self.assert_(compat.all(op.start_timestamp and op.end_timestamp
for op in job.ops))
# Calling the processor on a finished job should be a no-op
self.assertEqual(jqueue._JobProcessor(self.queue, opexec, job)(),
jqueue._JobProcessor.FINISHED)
self.assertRaises(IndexError, self.queue.GetNextUpdate)
class _IdOnlyFakeJob:
def __init__(self, job_id, priority=NotImplemented):
self.id = str(job_id)
self._priority = priority
def CalcPriority(self):
return self._priority
class TestJobDependencyManager(unittest.TestCase):
def setUp(self):
self._status = []
self._queue = []
self.jdm = jqueue._JobDependencyManager(self._GetStatus, self._Enqueue)
def _GetStatus(self, job_id):
(exp_job_id, result) = self._status.pop(0)
self.assertEqual(exp_job_id, job_id)
return result
def _Enqueue(self, jobs):
self.assertFalse(self.jdm._lock.is_owned(),
msg=("Must not own manager lock while re-adding jobs"
" (potential deadlock)"))
self._queue.append(jobs)
def testNotFinalizedThenCancel(self):
job = _IdOnlyFakeJob(17697)
job_id = str(28625)
self._status.append((job_id, constants.JOB_STATUS_RUNNING))
(result, _) = self.jdm.CheckAndRegister(job, job_id, [])
self.assertEqual(result, self.jdm.WAIT)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertTrue(self.jdm.JobWaiting(job))
self.assertEqual(self.jdm._waiters, {
job_id: set([job]),
})
self.assertEqual(self.jdm.GetLockInfo([query.LQ_PENDING]), [
("job/28625", None, None, [("job", [job.id])])
])
self._status.append((job_id, constants.JOB_STATUS_CANCELED))
(result, _) = self.jdm.CheckAndRegister(job, job_id, [])
self.assertEqual(result, self.jdm.CANCEL)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertFalse(self.jdm.JobWaiting(job))
self.assertFalse(self.jdm.GetLockInfo([query.LQ_PENDING]))
def testNotFinalizedThenQueued(self):
# This can happen on a queue shutdown
job = _IdOnlyFakeJob(1320)
job_id = str(22971)
for i in range(5):
if i > 2:
self._status.append((job_id, constants.JOB_STATUS_QUEUED))
else:
self._status.append((job_id, constants.JOB_STATUS_RUNNING))
(result, _) = self.jdm.CheckAndRegister(job, job_id, [])
self.assertEqual(result, self.jdm.WAIT)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertTrue(self.jdm.JobWaiting(job))
self.assertEqual(self.jdm._waiters, {
job_id: set([job]),
})
self.assertEqual(self.jdm.GetLockInfo([query.LQ_PENDING]), [
("job/22971", None, None, [("job", [job.id])])
])
def testRequireCancel(self):
job = _IdOnlyFakeJob(5278)
job_id = str(9610)
dep_status = [constants.JOB_STATUS_CANCELED]
self._status.append((job_id, constants.JOB_STATUS_WAITING))
(result, _) = self.jdm.CheckAndRegister(job, job_id, dep_status)
self.assertEqual(result, self.jdm.WAIT)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertTrue(self.jdm.JobWaiting(job))
self.assertEqual(self.jdm._waiters, {
job_id: set([job]),
})
self.assertEqual(self.jdm.GetLockInfo([query.LQ_PENDING]), [
("job/9610", None, None, [("job", [job.id])])
])
self._status.append((job_id, constants.JOB_STATUS_CANCELED))
(result, _) = self.jdm.CheckAndRegister(job, job_id, dep_status)
self.assertEqual(result, self.jdm.CONTINUE)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertFalse(self.jdm.JobWaiting(job))
self.assertFalse(self.jdm.GetLockInfo([query.LQ_PENDING]))
def testRequireError(self):
job = _IdOnlyFakeJob(21459)
job_id = str(25519)
dep_status = [constants.JOB_STATUS_ERROR]
self._status.append((job_id, constants.JOB_STATUS_WAITING))
(result, _) = self.jdm.CheckAndRegister(job, job_id, dep_status)
self.assertEqual(result, self.jdm.WAIT)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertTrue(self.jdm.JobWaiting(job))
self.assertEqual(self.jdm._waiters, {
job_id: set([job]),
})
self._status.append((job_id, constants.JOB_STATUS_ERROR))
(result, _) = self.jdm.CheckAndRegister(job, job_id, dep_status)
self.assertEqual(result, self.jdm.CONTINUE)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertFalse(self.jdm.JobWaiting(job))
self.assertFalse(self.jdm.GetLockInfo([query.LQ_PENDING]))
def testRequireMultiple(self):
dep_status = list(constants.JOBS_FINALIZED)
for end_status in dep_status:
job = _IdOnlyFakeJob(21343)
job_id = str(14609)
self._status.append((job_id, constants.JOB_STATUS_WAITING))
(result, _) = self.jdm.CheckAndRegister(job, job_id, dep_status)
self.assertEqual(result, self.jdm.WAIT)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertTrue(self.jdm.JobWaiting(job))
self.assertEqual(self.jdm._waiters, {
job_id: set([job]),
})
self.assertEqual(self.jdm.GetLockInfo([query.LQ_PENDING]), [
("job/14609", None, None, [("job", [job.id])])
])
self._status.append((job_id, end_status))
(result, _) = self.jdm.CheckAndRegister(job, job_id, dep_status)
self.assertEqual(result, self.jdm.CONTINUE)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertFalse(self.jdm.JobWaiting(job))
self.assertFalse(self.jdm.GetLockInfo([query.LQ_PENDING]))
def testNotify(self):
job = _IdOnlyFakeJob(8227)
job_id = str(4113)
self._status.append((job_id, constants.JOB_STATUS_RUNNING))
(result, _) = self.jdm.CheckAndRegister(job, job_id, [])
self.assertEqual(result, self.jdm.WAIT)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertTrue(self.jdm.JobWaiting(job))
self.assertEqual(self.jdm._waiters, {
job_id: set([job]),
})
self.jdm.NotifyWaiters(job_id)
self.assertFalse(self._status)
self.assertFalse(self.jdm._waiters)
self.assertFalse(self.jdm.JobWaiting(job))
self.assertEqual(self._queue, [set([job])])
def testWrongStatus(self):
job = _IdOnlyFakeJob(10102)
job_id = str(1271)
self._status.append((job_id, constants.JOB_STATUS_QUEUED))
(result, _) = self.jdm.CheckAndRegister(job, job_id,
[constants.JOB_STATUS_SUCCESS])
self.assertEqual(result, self.jdm.WAIT)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertTrue(self.jdm.JobWaiting(job))
self.assertEqual(self.jdm._waiters, {
job_id: set([job]),
})
self._status.append((job_id, constants.JOB_STATUS_ERROR))
(result, _) = self.jdm.CheckAndRegister(job, job_id,
[constants.JOB_STATUS_SUCCESS])
self.assertEqual(result, self.jdm.WRONGSTATUS)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertFalse(self.jdm.JobWaiting(job))
def testCorrectStatus(self):
job = _IdOnlyFakeJob(24273)
job_id = str(23885)
self._status.append((job_id, constants.JOB_STATUS_QUEUED))
(result, _) = self.jdm.CheckAndRegister(job, job_id,
[constants.JOB_STATUS_SUCCESS])
self.assertEqual(result, self.jdm.WAIT)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertTrue(self.jdm.JobWaiting(job))
self.assertEqual(self.jdm._waiters, {
job_id: set([job]),
})
self._status.append((job_id, constants.JOB_STATUS_SUCCESS))
(result, _) = self.jdm.CheckAndRegister(job, job_id,
[constants.JOB_STATUS_SUCCESS])
self.assertEqual(result, self.jdm.CONTINUE)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertFalse(self.jdm.JobWaiting(job))
def testFinalizedRightAway(self):
job = _IdOnlyFakeJob(224)
job_id = str(3081)
self._status.append((job_id, constants.JOB_STATUS_SUCCESS))
(result, _) = self.jdm.CheckAndRegister(job, job_id,
[constants.JOB_STATUS_SUCCESS])
self.assertEqual(result, self.jdm.CONTINUE)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertFalse(self.jdm.JobWaiting(job))
self.assertEqual(self.jdm._waiters, {
job_id: set(),
})
# Force cleanup
self.jdm.NotifyWaiters("0")
self.assertFalse(self.jdm._waiters)
self.assertFalse(self._status)
self.assertFalse(self._queue)
def testMultipleWaiting(self):
# Use a deterministic random generator
rnd = random.Random(21402)
job_ids = map(str, rnd.sample(range(1, 10000), 150))
waiters = dict((job_ids.pop(),
set(map(_IdOnlyFakeJob,
[job_ids.pop()
for _ in range(rnd.randint(1, 20))])))
for _ in range(10))
# Ensure there are no duplicate job IDs
assert not utils.FindDuplicates(waiters.keys() +
[job.id
for jobs in waiters.values()
for job in jobs])
# Register all jobs as waiters
for job_id, job in [(job_id, job)
for (job_id, jobs) in waiters.items()
for job in jobs]:
self._status.append((job_id, constants.JOB_STATUS_QUEUED))
(result, _) = self.jdm.CheckAndRegister(job, job_id,
[constants.JOB_STATUS_SUCCESS])
self.assertEqual(result, self.jdm.WAIT)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertTrue(self.jdm.JobWaiting(job))
self.assertEqual(self.jdm._waiters, waiters)
def _MakeSet((name, mode, owner_names, pending)):
return (name, mode, owner_names,
[(pendmode, set(pend)) for (pendmode, pend) in pending])
def _CheckLockInfo():
info = self.jdm.GetLockInfo([query.LQ_PENDING])
self.assertEqual(sorted(map(_MakeSet, info)), sorted([
("job/%s" % job_id, None, None,
[("job", set([job.id for job in jobs]))])
for job_id, jobs in waiters.items()
if jobs
]))
_CheckLockInfo()
# Notify in random order
for job_id in rnd.sample(waiters, len(waiters)):
# Remove from pending waiter list
jobs = waiters.pop(job_id)
for job in jobs:
self._status.append((job_id, constants.JOB_STATUS_SUCCESS))
(result, _) = self.jdm.CheckAndRegister(job, job_id,
[constants.JOB_STATUS_SUCCESS])
self.assertEqual(result, self.jdm.CONTINUE)
self.assertFalse(self._status)
self.assertFalse(self._queue)
self.assertFalse(self.jdm.JobWaiting(job))
_CheckLockInfo()
self.assertFalse(self.jdm.GetLockInfo([query.LQ_PENDING]))
assert not waiters
def testSelfDependency(self):
job = _IdOnlyFakeJob(18937)
self._status.append((job.id, constants.JOB_STATUS_SUCCESS))
(result, _) = self.jdm.CheckAndRegister(job, job.id, [])
self.assertEqual(result, self.jdm.ERROR)
def testJobDisappears(self):
job = _IdOnlyFakeJob(30540)
job_id = str(23769)
def _FakeStatus(_):
raise errors.JobLost("#msg#")
jdm = jqueue._JobDependencyManager(_FakeStatus, None)
(result, _) = jdm.CheckAndRegister(job, job_id, [])
self.assertEqual(result, self.jdm.ERROR)
self.assertFalse(jdm.JobWaiting(job))
self.assertFalse(jdm.GetLockInfo([query.LQ_PENDING]))
if __name__ == "__main__":
testutils.GanetiTestProgram()
| gpl-2.0 | -6,711,925,957,121,488,000 | 34.288604 | 80 | 0.642853 | false |
Nuevosmedios/ADL_LRS | adl_lrs/settings.py | 1 | 7317 | # Django settings for adl_lrs project.
from unipath import Path
# Root of LRS
SETTINGS_PATH = Path(__file__)
PROJECT_ROOT = SETTINGS_PATH.ancestor(3)
# If you want to debug
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lrs',
'USER': 'root',
'PASSWORD': 'password',
'HOST': 'localhost',
'PORT': '',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/New_York'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-US'
# The ID, as an integer, of the current site in the django_site database table.
# This is used so that application data can hook into specific sites and a single database can manage
# content for multiple sites.
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = PROJECT_ROOT.child('media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# Where to be redirected after logging in
LOGIN_REDIRECT_URL = '/XAPI/me'
# Me view has a tab of user's statements
STMTS_PER_PAGE = 10
# Whether HTTP auth or OAuth is enabled
HTTP_AUTH_ENABLED = True
OAUTH_ENABLED = False
# OAuth callback views
OAUTH_AUTHORIZE_VIEW = 'oauth_provider.views.authorize_client'
OAUTH_CALLBACK_VIEW = 'oauth_provider.views.callback_view'
OAUTH_SIGNATURE_METHODS = ['plaintext','hmac-sha1','rsa-sha1']
OAUTH_REALM_KEY_NAME = 'http://localhost:8000/XAPI'
# Limit on number of statements the server will return
SERVER_STMT_LIMIT = 100
# ActivityID resolve timeout (seconds)
ACTIVITY_ID_RESOLVE_TIMEOUT = .2
# Caches for /more endpoint and attachments
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'cache_statement_list',
'TIMEOUT': 86400,
},
'attachment_cache':{
'BACKEND':'django.core.cache.backends.db.DatabaseCache',
'LOCATION':'attachment_cache',
},
}
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'v+m%^r0x)$_x8i3trn*duc6vd-yju0kx2b#9lk0sn2k^7cgyp5'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages"
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'lrs.util.AllowOriginMiddleware.AllowOriginMiddleware',
# Uncomment the next line for simple clickjacking protection:
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# Main url router
ROOT_URLCONF = 'adl_lrs.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'adl_lrs.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'lrs',
'gunicorn',
'oauth_provider',
'django.contrib.admin',
'django_extensions'
)
REQUEST_HANDLER_LOG_DIR = SETTINGS_PATH.ancestor(3) + '/logs/lrs.log'
DEFAULT_LOG_DIR = SETTINGS_PATH.ancestor(3) + '/logs/django_request.log'
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': u'%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'standard': {
'format': u'%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
'simple': {
'format': u'%(levelname)s %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': DEFAULT_LOG_DIR,
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': REQUEST_HANDLER_LOG_DIR,
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'lrs': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
}
}
| apache-2.0 | -8,551,532,077,530,078,000 | 30.813043 | 101 | 0.66981 | false |
tomasdubec/openstack-cinder | cinder/tests/scheduler/test_host_filters.py | 1 | 6424 | # Copyright 2011 OpenStack LLC. # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler Host Filters.
"""
import httplib
import stubout
from cinder import context
from cinder import db
from cinder import exception
from cinder.openstack.common import jsonutils
from cinder.openstack.common.scheduler import filters
from cinder import test
from cinder.tests.scheduler import fakes
from cinder.tests import utils as test_utils
from cinder import utils
DATA = ''
def stub_out_https_backend(stubs):
"""
Stubs out the httplib.HTTPRequest.getresponse to return
faked-out data instead of grabbing actual contents of a resource
The stubbed getresponse() returns an iterator over
the data "I am a teapot, short and stout\n"
:param stubs: Set of stubout stubs
"""
class FakeHTTPResponse(object):
def read(self):
return DATA
def fake_do_request(self, *args, **kwargs):
return httplib.OK, FakeHTTPResponse()
class HostFiltersTestCase(test.TestCase):
"""Test case for host filters."""
def setUp(self):
super(HostFiltersTestCase, self).setUp()
self.stubs = stubout.StubOutForTesting()
stub_out_https_backend(self.stubs)
self.context = context.RequestContext('fake', 'fake')
self.json_query = jsonutils.dumps(
['and', ['>=', '$free_capacity_gb', 1024],
['>=', '$total_capacity_gb', 10 * 1024]])
# This has a side effect of testing 'get_filter_classes'
# when specifying a method (in this case, our standard filters)
filter_handler = filters.HostFilterHandler('cinder.scheduler.filters')
classes = filter_handler.get_all_classes()
self.class_map = {}
for cls in classes:
self.class_map[cls.__name__] = cls
def _stub_service_is_up(self, ret_value):
def fake_service_is_up(service):
return ret_value
self.stubs.Set(utils, 'service_is_up', fake_service_is_up)
@test.skip_if(not test_utils.is_cinder_installed(),
'Test requires Cinder installed')
def test_capacity_filter_passes(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100}
service = {'disabled': False}
host = fakes.FakeHostState('host1',
{'free_capacity_gb': 200,
'updated_at': None,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
@test.skip_if(not test_utils.is_cinder_installed(),
'Test requires Cinder installed')
def test_capacity_filter_fails(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100}
service = {'disabled': False}
host = fakes.FakeHostState('host1',
{'free_capacity_gb': 120,
'reserved_percentage': 20,
'updated_at': None,
'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
@test.skip_if(not test_utils.is_cinder_installed(),
'Test requires Cinder installed')
def test_capacity_filter_passes_infinite(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100}
service = {'disabled': False}
host = fakes.FakeHostState('host1',
{'free_capacity_gb': 'infinite',
'updated_at': None,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
@test.skip_if(not test_utils.is_cinder_installed(),
'Test requires Cinder installed')
def test_capacity_filter_passes_unknown(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100}
service = {'disabled': False}
host = fakes.FakeHostState('host1',
{'free_capacity_gb': 'unknown',
'updated_at': None,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
@test.skip_if(not test_utils.is_cinder_installed(),
'Test requires Cinder installed')
def test_retry_filter_disabled(self):
# Test case where retry/re-scheduling is disabled.
filt_cls = self.class_map['RetryFilter']()
host = fakes.FakeHostState('host1', {})
filter_properties = {}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
@test.skip_if(not test_utils.is_cinder_installed(),
'Test requires Cinder installed')
def test_retry_filter_pass(self):
# Node not previously tried.
filt_cls = self.class_map['RetryFilter']()
host = fakes.FakeHostState('host1', {})
retry = dict(num_attempts=2, hosts=['host2'])
filter_properties = dict(retry=retry)
self.assertTrue(filt_cls.host_passes(host, filter_properties))
@test.skip_if(not test_utils.is_cinder_installed(),
'Test requires Cinder installed')
def test_retry_filter_fail(self):
# Node was already tried.
filt_cls = self.class_map['RetryFilter']()
host = fakes.FakeHostState('host1', {})
retry = dict(num_attempts=1, hosts=['host1'])
filter_properties = dict(retry=retry)
self.assertFalse(filt_cls.host_passes(host, filter_properties))
| apache-2.0 | -3,891,531,009,832,143,000 | 39.402516 | 78 | 0.603674 | false |
Diacamma2/asso | diacamma/asso/views.py | 1 | 1391 | # -*- coding: utf-8 -*-
'''
Syndic module to declare a new Diacamma appli
@author: Laurent GAY
@organization: sd-libre.fr
@contact: [email protected]
@copyright: 2015 sd-libre.fr
@license: This file is part of Lucterios.
Lucterios is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lucterios is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lucterios. If not, see <http://www.gnu.org/licenses/>.
'''
from __future__ import unicode_literals
from os.path import join, dirname, isfile
from lucterios.framework import signal_and_lock
from lucterios.CORE.parameters import Params
@signal_and_lock.Signal.decorate('initial_account')
def initial_account_asso(account_list):
from diacamma.accounting.system import accounting_system_ident
filename = join(dirname(__file__), 'init_%s.csv' % accounting_system_ident(Params.getvalue("accounting-system")))
if isinstance(account_list, list):
account_list.append(filename)
return isfile(filename)
| gpl-3.0 | -8,452,789,103,961,414,000 | 34.666667 | 117 | 0.765636 | false |
mikalstill/ostrich | ostrich/stages/stage_40_enable_proxies.py | 1 | 3000 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ostrich import steps
def get_steps(r):
"""Configure proxies."""
nextsteps = []
if r.complete['osa-branch'] == 'stable/mitaka':
if r.complete['http-proxy'] and r.complete['http-proxy'] != 'none':
local_servers = 'localhost,127.0.0.1'
if r.complete['local-cache'] != 'none':
local_servers += ',%s' % r.complete['local-cache']
r.kwargs['env'].update({'http_proxy': r.complete['http-proxy'],
'https_proxy': r.complete['http-proxy'],
'no_proxy': local_servers})
# This entry will only last until it is clobbered by ansible
nextsteps.append(
steps.FileAppendStep(
'proxy-environment',
'/etc/environment',
(('\n\nexport http_proxy="%(proxy)s"\n'
'export HTTP_PROXY="%(proxy)s"\n'
'export https_proxy="%(proxy)s"\n'
'export HTTPS_PROXY="%(proxy)s"\n'
'export ftp_proxy="%(proxy)s"\n'
'export FTP_PROXY="%(proxy)s"\n'
'export no_proxy=%(local)s\n'
'export NO_PROXY=%(local)sn')
% {'proxy': r.complete['http-proxy'],
'local': local_servers}),
**r.kwargs)
)
replacements = [
('(http|https|git)://github.com',
r.complete['git-mirror-github']),
('(http|https|git)://git.openstack.org',
r.complete['git-mirror-openstack']),
]
if r.complete['local-cache'] != 'none':
replacements.append(
('https://rpc-repo.rackspace.com',
'http://%s/rpc-repo.rackspace.com' % r.complete['local-cache'])
)
nextsteps.append(
steps.BulkRegexpEditorStep(
'bulk-edit-osa',
'/opt/openstack-ansible',
'.*\.(ini|yml|sh)$',
replacements,
**r.kwargs)
)
nextsteps.append(
steps.BulkRegexpEditorStep(
'unapply-git-mirrors-for-cgit',
'/opt/openstack-ansible',
'.*\.(ini|yml|sh)$',
[
('%s/cgit' % r.complete['git-mirror-openstack'],
'https://git.openstack.org/cgit')
],
**r.kwargs)
)
return nextsteps
| apache-2.0 | -6,931,821,039,428,020,000 | 35.144578 | 76 | 0.515333 | false |
pozetroninc/micropython | tests/float/float1.py | 1 | 2194 | # test basic float capabilities
# literals
print(.12)
print(1.)
print(1.2)
print(0e0)
print(0e+0)
print(0e-0)
# float construction
print(float(1.2))
print(float("1.2"))
print(float("+1"))
print(float("1e1"))
print(float("1e+1"))
print(float("1e-1"))
print(float("inf"))
print(float("-inf"))
print(float("INF"))
print(float("infinity"))
print(float("INFINITY"))
print(float("nan"))
print(float("-nan"))
print(float("NaN"))
try:
float("")
except ValueError:
print("ValueError")
try:
float("1e+")
except ValueError:
print("ValueError")
try:
float("1z")
except ValueError:
print("ValueError")
# construct from something with the buffer protocol
print(float(b"1.2"))
print(float(bytearray(b"3.4")))
# unary operators
print(bool(0.0))
print(bool(1.2))
print(+(1.2))
print(-(1.2))
# division of integers
x = 1 / 2
print(x)
# /= operator
a = 1
a /= 2
print(a)
# floor division
print(1.0 // 2)
print(2.0 // 2)
# comparison
print(1.2 <= 3.4)
print(1.2 <= -3.4)
print(1.2 >= 3.4)
print(1.2 >= -3.4)
print(0.0 == False, 1.0 == True)
print(False == 0.0, True == 1.0)
# comparison of nan is special
nan = float('nan')
print(nan == 1.2)
print(nan == nan)
try:
1.0 / 0
except ZeroDivisionError:
print("ZeroDivisionError")
try:
1.0 // 0
except ZeroDivisionError:
print("ZeroDivisionError")
try:
1.2 % 0
except ZeroDivisionError:
print("ZeroDivisionError")
try:
0.0 ** -1
except ZeroDivisionError:
print("ZeroDivisionError")
# unsupported unary ops
try:
~1.2
except TypeError:
print("TypeError")
try:
1.2 in 3.4
except TypeError:
print("TypeError")
# small int on LHS, float on RHS, unsupported op
try:
print(1 | 1.0)
except TypeError:
print('TypeError')
# can't convert list to float
try:
float([])
except TypeError:
print("TypeError")
# test constant float with more than 255 chars
x = 1.84728699436059052516398251149631771898472869943605905251639825114963177189847286994360590525163982511496317718984728699436059052516398251149631771898472869943605905251639825114963177189847286994360590525163982511496317718984728699436059052516398251149631771898472869943605905251639825114963177189
print("%.5f" % x)
| mit | 6,278,333,151,221,868,000 | 17.132231 | 302 | 0.691431 | false |
ChaoticEvil/django_base | src/apps/pages/admin.py | 1 | 4774 | from django.contrib import admin
from .models import Page, Menu
from django.shortcuts import render, HttpResponse
from mptt.exceptions import InvalidMove
from apps.feincms.admin import tree_editor
from django_base.settings import SITE_NAME
from django.utils.translation import ugettext_lazy as _
from .forms import MenuParentForm
class PageAdmin(admin.ModelAdmin):
fieldsets = [
(_('Метаинформация'), {
'classes': ('collapse',),
'fields': [
'mdesc', 'mkeys', 'mtitle'
],
}),
(_('Содержимое страницы'), {
'fields': [
'title', 'body'
],
}),
(_('Шаблон и статус'), {
'fields': [
('template', 'is_public', 'is_index')
]
}),
(_('Галерея'), {
'fields': [
'gallery'
]
}),
]
raw_id_fields = ('gallery',)
autocomplete_lookup_fields = {'fk': ['gallery']}
list_display = ('id', 'title', 'is_public', 'last_update')
list_display_links = ('id', 'title')
list_editable = ('is_public',)
list_per_page = 100
sortable_field_name = 'title'
search_fields = ['title', 'body']
list_filter = ['last_update', 'is_public', 'is_index']
class MenuAdmin(tree_editor.TreeEditor):
fieldsets = [
(_('Пункт меню'), {
'fields': [
('title', 'slug'), 'parent', 'external_link', 'page',
('is_visible', 'is_separate')
]
}),
]
prepopulated_fields = {'slug': ('title',)}
raw_id_fields = ('parent', 'page')
autocomplete_lookup_fields = {'fk': ['page']}
list_display = ('title', 'get_link', 'position', 'is_visible', 'is_separate')
list_display_links = ('title',)
list_editable = ('position', 'is_visible', 'is_separate')
list_per_page = 100
sortable_field_name = 'position'
search_fields = ['title', 'slug']
list_filter = ['is_visible']
actions = ['inc_position_action', 'dec_position_action', 'set_parent_action']
def get_title(self, obj):
return '%s (%s)' % (obj.title, obj.slug)
get_title.short_description = _('Название (ссылка)')
def get_link(self, obj):
return obj.external_link if obj.external_link else '%s/pages/%s/' % (SITE_NAME, obj.slug)
get_link.short_description = _('Ссылка')
def inc_position_action(self, request, queryset):
for q in queryset:
q.inc_position()
self.message_user(request, _('У выбранных Вами страниц была увеличина позиция на 1'))
inc_position_action.short_description = _('Увеличить порядок следования у выбранных елементов')
def dec_position_action(self, request, queryset):
for q in queryset:
q.dec_position()
self.message_user(request, _('У выбранных Вами страниц была уменьшена позиция на 1'))
dec_position_action.short_description = _('Уменьшить порядок следования у выбранных елементов')
def set_parent_action(self, request, queryset):
if 'do_action' in request.POST:
form = MenuParentForm(request.POST)
if form.is_valid():
for q in queryset:
try:
q.move_to(form.cleaned_data['page'])
except InvalidMove as e:
return HttpResponse(
_(
'''Ошибка!<br>
%s<br><br>
<a href='/admin/'>Назад в админку</a>'''
) % e,
content_type='text/html'
)
Menu.objects.rebuild()
return # Ничего не возвращаем, это вернет нас на список товаров
else:
form = MenuParentForm()
return render(
request,
'admin/set_parent.html',
{
'title': _('Укажите родительский пункт меню, под который нужно переместить выбранные страницы'),
'objects': queryset, 'form': form
}
)
set_parent_action.short_description = _('Переместить выбранные страницы в родительскую категорию')
admin.site.register(Page, PageAdmin)
admin.site.register(Menu, MenuAdmin)
| gpl-3.0 | -8,807,315,346,340,670,000 | 33.935484 | 112 | 0.537627 | false |
skylina/test | public/BasePage.py | 1 | 6311 | # -*- coding: utf-8 -*-
from selenium.webdriver.support.wait import WebDriverWait
from selenium import webdriver
__author__ = 'lina'
import time
import sys
import xlrd.sheet
import time, os
class Action:
"""
BasePage封装所有页面都公用的方法,例如driver, url
"""
driver = None
# 初始化driver、url、等
def __init__(self, base_url=None, pagetitle=None):
self.base_url = base_url
self.pagetitle = pagetitle
# self.driver = webdriver.Firefox()
# self.driver.implicitly_wait(30)
# self.driver = driver
"""
通过传参选择启动浏览器
# self.browser = "Firefox" #传入浏览器对象
# if Action.driver == None:
# if self.browser.upper() == 'IE': Action.driver = webdriver.Ie()
# elif self.browser.upper() == 'CHROME': Action.driver = webdriver.Chrome()
# elif self.browser.upper() == 'FIREFOX': Action.driver = webdriver.Firefox()
# elif self.browser.upper() == 'SAFARI': Action.driver = webdriver.Safari()
# else: Action.driver = webdriver.Ie()
# Action.driver.maximize_window()
# #pass
# #print u"加载浏览器驱动失败!"
# self.driver = Action.driver
self.verificationErrors = []
"""
# 打开页面,校验页面链接是否加载正确
def _open(self, url, pagetitle):
# 使用get打开访问链接地址
self.driver.get(url)
self.driver.maximize_window()
# 使用assert进行校验,打开的链接地址是否与配置的地址一致。调用on_page()方法
assert self.on_page(pagetitle), u"打开开页面失败 %s" % url
# 重写元素定位方法
def find_element(self, *loc):
# return self.driver.find_element(*loc)
try:
WebDriverWait(self.driver, 10).until(lambda driver: driver.find_element(*loc).is_displayed())
return self.driver.find_element(*loc)
except:
print (u"%s 页面中未能找到 %s 元素" % (self, loc))
# 重写一组元素定位方法
def find_elements(self, *loc):
# return self.driver.find_element(*loc)
try:
if len(self.driver.find_elements(*loc)):
return self.driver.find_elements(*loc)
except:
print (u"%s 页面中未能找到 %s 元素" % (self, loc))
# 定位一组元素中索引为第i个的元素 i从0开始
def find_elements_i(self, loc, index=None):
# return self.driver.find_element(*loc)
try:
if len(self.driver.find_elements(*loc)):
return self.driver.find_elements(*loc)[index]
except:
print (u"%s 页面中未能找到%s的第 %s 个元素 " % (self, loc, index))
# 重写switch_frame方法
def switch_frame(self, loc):
return self.driver.switch_to_frame(loc)
# 定义open方法,调用_open()进行打开链接
def open(self):
self._open(self.base_url, self.pagetitle)
# 使用current_url获取当前窗口Url地址,进行与配置地址作比较,返回比较结果(True False)
def on_page(self, pagetitle):
return pagetitle in self.driver.title
# 定义script方法,用于执行js脚本,范围执行结果
def script(self, src):
self.driver.execute_script(src)
# 重写定义send_keys方法
def send_keys(self, loc, vaule, clear_first=True, click_first=True):
try:
if click_first:
self.find_element(*loc).click()
if clear_first:
self.find_element(*loc).clear()
self.find_element(*loc).send_keys(vaule)
except AttributeError:
print (u"%s 页面中未能找到 %s 元素" % (self, loc))
def checkTrue(self, expr, msg=None):
"""Check that the expression is true."""
if not expr:
self.saveScreenshot(self.driver, "Error")
raise msg
else:
return False
# 读取excel文件的table
def setTable(self, filepath, sheetname):
"""
filepath:文件路径
sheetname:Sheet名称
"""
data = xlrd.open_workbook(filepath)
# 通过索引顺序获取Excel表
table = data.sheet_by_name(sheetname)
return table
# 读取xls表格,使用生成器yield进行按行存储
def getTabledata(self, filepath, sheetname):
"""
filepath:文件路径
sheetname:Sheet名称
"""
table = self.setTable(filepath, sheetname)
for args in range(1, table.nrows):
# 使用生成器 yield
yield table.row_values(args)
# 获取单元格数据
def getcelldata(self, sheetname, RowNum, ColNum):
"""
sheetname:表格Sheets名称
RowNum:行号 从0开始
ColNum:列号 从0开始
"""
table = self.setTable(sheetname=sheetname)
celldata = table.cell_value(RowNum, ColNum)
return celldata
# 读取元素标签和元素唯一标识
def locate(self, index, filepath="dataEngine\\data.xls", sheetname="element"):
"""
filepath: 文件路径
sheetno:Sheet编号
index: 元素编号
返回值内容为:("id","inputid")、("xpath","/html/body/header/div[1]/nav")格式
"""
table = self.setTable(filepath, sheetname)
for i in range(1, table.nrows):
if index in table.row_values(i):
return table.row_values(i)[1:3]
# savePngName:生成图片的名称
def savePngName(self, name):
"""
name:自定义图片的名称
"""
day = time.strftime('%Y-%m-%d', time.localtime(time.time()))
fp = "result\\" + day + "\\image"
tm = self.saveTime()
type = ".png"
# 判断存放截图的目录是否存在,如果存在打印并返回目录名称,如果不存在,创建该目录后,再返回目录名称
if os.path.exists(fp):
filename = str(fp) + "\\" + str(tm) + str(" ") + str(name) + str(type)
print (filename)
# print "True"
return filename
else:
os.makedirs(fp)
filename = str(fp) + "\\" + str(tm) + str(" ") + str(name) + str(type)
print (filename)
# print "False"
return filename
# 获取系统当前时间
def saveTime(self):
"""
返回当前系统时间以括号中(2015-11-25 15_21_55)展示
"""
return time.strftime('%Y-%m-%d %H_%M_%S', time.localtime(time.time()))
# saveScreenshot:通过图片名称,进行截图保存
def saveScreenshot(self, driver , name):
"""
快照截图
name:图片名称
"""
# 获取当前路径
# print os.getcwd()
image = self.driver.save_screenshot(self.savePngName(name))
return image
def save_img(self, img_name):
pic_name = self.savePngName(img_name)
print(pic_name)
#filepath = path.join(path.dirname(path.dirname(path.dirname(path.abspath(__file__)))), pic_name)
#print(filepath)
self.driver.save_screenshot(pic_name)
time.sleep(5)
| gpl-2.0 | -5,157,220,809,121,450,000 | 25.131707 | 99 | 0.673138 | false |
matrix-org/synapse | synapse/metrics/background_process_metrics.py | 1 | 10401 | # Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import threading
from functools import wraps
from typing import TYPE_CHECKING, Dict, Optional, Set, Union
from prometheus_client.core import REGISTRY, Counter, Gauge
from twisted.internet import defer
from synapse.logging.context import LoggingContext, PreserveLoggingContext
from synapse.logging.opentracing import (
SynapseTags,
noop_context_manager,
start_active_span,
)
from synapse.util.async_helpers import maybe_awaitable
if TYPE_CHECKING:
import resource
logger = logging.getLogger(__name__)
_background_process_start_count = Counter(
"synapse_background_process_start_count",
"Number of background processes started",
["name"],
)
_background_process_in_flight_count = Gauge(
"synapse_background_process_in_flight_count",
"Number of background processes in flight",
labelnames=["name"],
)
# we set registry=None in all of these to stop them getting registered with
# the default registry. Instead we collect them all via the CustomCollector,
# which ensures that we can update them before they are collected.
#
_background_process_ru_utime = Counter(
"synapse_background_process_ru_utime_seconds",
"User CPU time used by background processes, in seconds",
["name"],
registry=None,
)
_background_process_ru_stime = Counter(
"synapse_background_process_ru_stime_seconds",
"System CPU time used by background processes, in seconds",
["name"],
registry=None,
)
_background_process_db_txn_count = Counter(
"synapse_background_process_db_txn_count",
"Number of database transactions done by background processes",
["name"],
registry=None,
)
_background_process_db_txn_duration = Counter(
"synapse_background_process_db_txn_duration_seconds",
(
"Seconds spent by background processes waiting for database "
"transactions, excluding scheduling time"
),
["name"],
registry=None,
)
_background_process_db_sched_duration = Counter(
"synapse_background_process_db_sched_duration_seconds",
"Seconds spent by background processes waiting for database connections",
["name"],
registry=None,
)
# map from description to a counter, so that we can name our logcontexts
# incrementally. (It actually duplicates _background_process_start_count, but
# it's much simpler to do so than to try to combine them.)
_background_process_counts = {} # type: Dict[str, int]
# Set of all running background processes that became active active since the
# last time metrics were scraped (i.e. background processes that performed some
# work since the last scrape.)
#
# We do it like this to handle the case where we have a large number of
# background processes stacking up behind a lock or linearizer, where we then
# only need to iterate over and update metrics for the process that have
# actually been active and can ignore the idle ones.
_background_processes_active_since_last_scrape = set() # type: Set[_BackgroundProcess]
# A lock that covers the above set and dict
_bg_metrics_lock = threading.Lock()
class _Collector:
"""A custom metrics collector for the background process metrics.
Ensures that all of the metrics are up-to-date with any in-flight processes
before they are returned.
"""
def collect(self):
global _background_processes_active_since_last_scrape
# We swap out the _background_processes set with an empty one so that
# we can safely iterate over the set without holding the lock.
with _bg_metrics_lock:
_background_processes_copy = _background_processes_active_since_last_scrape
_background_processes_active_since_last_scrape = set()
for process in _background_processes_copy:
process.update_metrics()
# now we need to run collect() over each of the static Counters, and
# yield each metric they return.
for m in (
_background_process_ru_utime,
_background_process_ru_stime,
_background_process_db_txn_count,
_background_process_db_txn_duration,
_background_process_db_sched_duration,
):
for r in m.collect():
yield r
REGISTRY.register(_Collector())
class _BackgroundProcess:
def __init__(self, desc, ctx):
self.desc = desc
self._context = ctx
self._reported_stats = None
def update_metrics(self):
"""Updates the metrics with values from this process."""
new_stats = self._context.get_resource_usage()
if self._reported_stats is None:
diff = new_stats
else:
diff = new_stats - self._reported_stats
self._reported_stats = new_stats
_background_process_ru_utime.labels(self.desc).inc(diff.ru_utime)
_background_process_ru_stime.labels(self.desc).inc(diff.ru_stime)
_background_process_db_txn_count.labels(self.desc).inc(diff.db_txn_count)
_background_process_db_txn_duration.labels(self.desc).inc(
diff.db_txn_duration_sec
)
_background_process_db_sched_duration.labels(self.desc).inc(
diff.db_sched_duration_sec
)
def run_as_background_process(desc: str, func, *args, bg_start_span=True, **kwargs):
"""Run the given function in its own logcontext, with resource metrics
This should be used to wrap processes which are fired off to run in the
background, instead of being associated with a particular request.
It returns a Deferred which completes when the function completes, but it doesn't
follow the synapse logcontext rules, which makes it appropriate for passing to
clock.looping_call and friends (or for firing-and-forgetting in the middle of a
normal synapse async function).
Args:
desc: a description for this background process type
func: a function, which may return a Deferred or a coroutine
bg_start_span: Whether to start an opentracing span. Defaults to True.
Should only be disabled for processes that will not log to or tag
a span.
args: positional args for func
kwargs: keyword args for func
Returns: Deferred which returns the result of func, but note that it does not
follow the synapse logcontext rules.
"""
async def run():
with _bg_metrics_lock:
count = _background_process_counts.get(desc, 0)
_background_process_counts[desc] = count + 1
_background_process_start_count.labels(desc).inc()
_background_process_in_flight_count.labels(desc).inc()
with BackgroundProcessLoggingContext(desc, count) as context:
try:
if bg_start_span:
ctx = start_active_span(
f"bgproc.{desc}", tags={SynapseTags.REQUEST_ID: str(context)}
)
else:
ctx = noop_context_manager()
with ctx:
return await maybe_awaitable(func(*args, **kwargs))
except Exception:
logger.exception(
"Background process '%s' threw an exception",
desc,
)
finally:
_background_process_in_flight_count.labels(desc).dec()
with PreserveLoggingContext():
# Note that we return a Deferred here so that it can be used in a
# looping_call and other places that expect a Deferred.
return defer.ensureDeferred(run())
def wrap_as_background_process(desc):
"""Decorator that wraps a function that gets called as a background
process.
Equivalent of calling the function with `run_as_background_process`
"""
def wrap_as_background_process_inner(func):
@wraps(func)
def wrap_as_background_process_inner_2(*args, **kwargs):
return run_as_background_process(desc, func, *args, **kwargs)
return wrap_as_background_process_inner_2
return wrap_as_background_process_inner
class BackgroundProcessLoggingContext(LoggingContext):
"""A logging context that tracks in flight metrics for background
processes.
"""
__slots__ = ["_proc"]
def __init__(self, name: str, instance_id: Optional[Union[int, str]] = None):
"""
Args:
name: The name of the background process. Each distinct `name` gets a
separate prometheus time series.
instance_id: an identifer to add to `name` to distinguish this instance of
the named background process in the logs. If this is `None`, one is
made up based on id(self).
"""
if instance_id is None:
instance_id = id(self)
super().__init__("%s-%s" % (name, instance_id))
self._proc = _BackgroundProcess(name, self)
def start(self, rusage: "Optional[resource._RUsage]"):
"""Log context has started running (again)."""
super().start(rusage)
# We've become active again so we make sure we're in the list of active
# procs. (Note that "start" here means we've become active, as opposed
# to starting for the first time.)
with _bg_metrics_lock:
_background_processes_active_since_last_scrape.add(self._proc)
def __exit__(self, type, value, traceback) -> None:
"""Log context has finished."""
super().__exit__(type, value, traceback)
# The background process has finished. We explicitly remove and manually
# update the metrics here so that if nothing is scraping metrics the set
# doesn't infinitely grow.
with _bg_metrics_lock:
_background_processes_active_since_last_scrape.discard(self._proc)
self._proc.update_metrics()
| apache-2.0 | 8,190,291,921,315,463,000 | 34.742268 | 87 | 0.665128 | false |
projectexpert/pmis | analytic_resource_plan_stock/models/analytic_resource_plan_line.py | 1 | 5846 | # Copyright 2017 Eficent Business and IT Consulting Services S.L.
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
import odoo.addons.decimal_precision as dp
class AnalyticResourcePlanLine(models.Model):
_inherit = 'analytic.resource.plan.line'
@api.multi
def _compute_quantities(self):
for line in self:
stock = line.with_context(
analytic_account_id=line.account_id.id).product_id.\
_product_available()
if stock.get(line.product_id.id, False):
line.incoming_qty = stock[line.product_id.id]['incoming_qty']
line.outgoing_qty = stock[line.product_id.id]['outgoing_qty']
line.virtual_available = \
stock[line.product_id.id]['virtual_available']
line.qty_available = stock[line.product_id.id]['qty_available']
else:
line.incoming_qty = 0.0
line.outgoing_qty = 0.0
line.virtual_available = 0.0
line.qty_available = 0.0
@api.multi
def _compute_done_quantities(self):
for line in self:
stock = line.with_context(
analytic_account_id_out=line.account_id.id).product_id.\
_product_available()
if stock.get(line.product_id.id, False):
# available in customer means done
line.outgoing_done_qty = (
stock[line.product_id.id]['qty_available'])
else:
line.outgoing_done_qty = 0.0
line.incoming_done_qty = (line.qty_available - line.outgoing_qty
- line.outgoing_done_qty)
qty_available = fields.Float(
string='Qty Available',
digits=dp.get_precision('Product Unit of Measure'),
compute='_compute_quantities',
help="Current quantity of products. "
"In a context with a single Stock Location, this includes "
"goods stored at this Location, or any of its children. "
"In a context with a single Warehouse, this includes "
"goods stored in the Stock Location of this Warehouse, "
"or any of its children. "
"In a context with a single Shop, this includes goods "
"stored in the Stock Location of the Warehouse of this Shop, "
"or any of its children. "
"Otherwise, this includes goods stored in any Stock Location "
"with 'internal' type."
)
virtual_available = fields.Float(
string='Virtually available',
compute='_compute_quantities',
digits=dp.get_precision('Product Unit of Measure'),
help="Forecast quantity (computed as Quantity On Hand "
"- Outgoing + Incoming) "
"In a context with a single Stock Location, this includes "
"goods stored in this location, or any of its children. "
"In a context with a single Warehouse, this includes "
"goods stored in the Stock Location of this Warehouse, "
"or any of its children. "
"In a context with a single Shop, this includes goods "
"stored in the Stock Location of the Warehouse of this Shop, "
"or any of its children. "
"Otherwise, this includes goods stored in any Stock Location "
"with 'internal' type."
)
incoming_qty = fields.Float(
string='Qty Incoming',
digits=dp.get_precision('Product Unit of Measure'),
compute='_compute_quantities',
help="Quantity of products that are planned to arrive. "
"In a context with a single Stock Location, this includes "
"goods arriving to this Location, or any of its children. "
"In a context with a single Warehouse, this includes "
"goods arriving to the Stock Location of this Warehouse, or "
"any of its children. "
"In a context with a single Shop, this includes goods "
"arriving to the Stock Location of the Warehouse of this "
"Shop, or any of its children. "
"Otherwise, this includes goods arriving to any Stock "
"Location with 'internal' type."
)
outgoing_qty = fields.Float(
string='Outgoing quantity',
default=lambda self: self.unit_amount,
compute='_compute_quantities',
digits=dp.get_precision('Product Unit of Measure'),
help="Quantity of products that are planned to leave. "
"In a context with a single Stock Location, this includes "
"goods leaving this Location, or any of its children. "
"In a context with a single Warehouse, this includes "
"goods leaving the Stock Location of this Warehouse, or "
"any of its children. "
"In a context with a single Shop, this includes goods "
"leaving the Stock Location of the Warehouse of this "
"Shop, or any of its children. "
"Otherwise, this includes goods leaving any Stock "
"Location with 'internal' type."
)
incoming_done_qty = fields.Float(
string='Qty Incoming Done',
digits=dp.get_precision('Product Unit of Measure'),
compute='_compute_done_quantities',
help="Quantity of products that have been produced or have "
"arrived."
)
outgoing_done_qty = fields.Float(
string='Qty Outgoing Done',
default=lambda self: self.unit_amount,
compute='_compute_done_quantities',
digits=dp.get_precision('Product Unit of Measure'),
help="Quantity of products that have been consumed or delivered."
)
| agpl-3.0 | 5,313,668,238,580,680,000 | 46.145161 | 79 | 0.594595 | false |
silkentrance/django-db-mixins | django_mixins/base.py | 1 | 1270 | # -*- coding: utf-8 -*-
#
# Copyright 2014 Carsten Klein <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import six
import django.db.models.base as djbase
from .utils import InstanceDescriptorMixin
from .auth import CurrentUser
class ModelBase(djbase.Model):
class Meta:
abstract = True
class MixinMeta(djbase.ModelBase):
def __new__(cls, name, bases, attrs):
# all mixins are abstract
supernew = super(MixinMeta, cls).__new__
class Meta:
abstract = True
attrs['Meta'] = Meta
return supernew(cls, name, bases, attrs)
class MixinBase(djbase.Model, InstanceDescriptorMixin, six.with_metaclass(MixinMeta)):
class Meta:
abstract = True
| apache-2.0 | 6,584,541,726,012,519,000 | 21.678571 | 86 | 0.692913 | false |
ntt-sic/heat | heat/common/identifier.py | 1 | 8227 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import collections
from heat.openstack.common import strutils
from heat.openstack.common.py3kcompat import urlutils
class HeatIdentifier(collections.Mapping):
FIELDS = (
TENANT, STACK_NAME, STACK_ID, PATH
) = (
'tenant', 'stack_name', 'stack_id', 'path'
)
path_re = re.compile(r'stacks/([^/]+)/([^/]+)(.*)')
def __init__(self, tenant, stack_name, stack_id, path=''):
'''
Initialise a HeatIdentifier from a Tenant ID, Stack name, Stack ID
and optional path. If a path is supplied and it does not begin with
"/", a "/" will be prepended.
'''
if path and not path.startswith('/'):
path = '/' + path
if '/' in stack_name:
raise ValueError(_('Stack name may not contain "/"'))
self.identity = {
self.TENANT: tenant,
self.STACK_NAME: stack_name,
self.STACK_ID: str(stack_id),
self.PATH: path,
}
@classmethod
def from_arn(cls, arn):
'''
Return a new HeatIdentifier generated by parsing the supplied ARN.
'''
fields = arn.split(':')
if len(fields) < 6 or fields[0].lower() != 'arn':
raise ValueError(_('"%s" is not a valid ARN') % arn)
id_fragment = ':'.join(fields[5:])
path = cls.path_re.match(id_fragment)
if fields[1] != 'openstack' or fields[2] != 'heat' or not path:
raise ValueError(_('"%s" is not a valid Heat ARN') % arn)
return cls(urlutils.unquote(fields[4]),
urlutils.unquote(path.group(1)),
urlutils.unquote(path.group(2)),
urlutils.unquote(path.group(3)))
@classmethod
def from_arn_url(cls, url):
'''
Return a new HeatIdentifier generated by parsing the supplied URL
The URL is expected to contain a valid arn as part of the path
'''
# Sanity check the URL
urlp = urlutils.urlparse(url)
if (urlp.scheme not in ('http', 'https') or
not urlp.netloc or not urlp.path):
raise ValueError(_('"%s" is not a valid URL') % url)
# Remove any query-string and extract the ARN
arn_url_prefix = '/arn%3Aopenstack%3Aheat%3A%3A'
match = re.search(arn_url_prefix, urlp.path, re.IGNORECASE)
if match is None:
raise ValueError(_('"%s" is not a valid ARN URL') % url)
# the +1 is to skip the leading /
url_arn = urlp.path[match.start() + 1:]
arn = urlutils.unquote(url_arn)
return cls.from_arn(arn)
def arn(self):
'''
Return an ARN of the form:
arn:openstack:heat::<tenant>:stacks/<stack_name>/<stack_id><path>
'''
return 'arn:openstack:heat::%s:%s' % (urlutils.quote(self.tenant, ''),
self._tenant_path())
def arn_url_path(self):
'''
Return an ARN quoted correctly for use in a URL
'''
return '/' + urlutils.quote(self.arn(), '')
def url_path(self):
'''
Return a URL-encoded path segment of a URL in the form:
<tenant>/stacks/<stack_name>/<stack_id><path>
'''
return '/'.join((urlutils.quote(self.tenant, ''), self._tenant_path()))
def _tenant_path(self):
'''
Return a URL-encoded path segment of a URL within a particular tenant,
in the form:
stacks/<stack_name>/<stack_id><path>
'''
return 'stacks/%s/%s%s' % (urlutils.quote(self.stack_name, ''),
urlutils.quote(self.stack_id, ''),
urlutils.quote(strutils.safe_encode(
self.path)))
def _path_components(self):
'''Return a list of the path components.'''
return self.path.lstrip('/').split('/')
def __getattr__(self, attr):
'''
Return one of the components of the identity when accessed as an
attribute.
'''
if attr not in self.FIELDS:
raise AttributeError(_('Unknown attribute "%s"') % attr)
return self.identity[attr]
def __getitem__(self, key):
'''Return one of the components of the identity.'''
if key not in self.FIELDS:
raise KeyError(_('Unknown attribute "%s"') % key)
return self.identity[key]
def __len__(self):
'''Return the number of components in an identity.'''
return len(self.FIELDS)
def __contains__(self, key):
return key in self.FIELDS
def __iter__(self):
return iter(self.FIELDS)
def __repr__(self):
return repr(dict(self))
class ResourceIdentifier(HeatIdentifier):
'''An identifier for a resource.'''
RESOURCE_NAME = 'resource_name'
def __init__(self, tenant, stack_name, stack_id, path,
resource_name=None):
'''
Return a new Resource identifier based on the identifier components of
the owning stack and the resource name.
'''
if resource_name is not None:
if '/' in resource_name:
raise ValueError(_('Resource name may not contain "/"'))
path = '/'.join([path.rstrip('/'), 'resources', resource_name])
super(ResourceIdentifier, self).__init__(tenant,
stack_name,
stack_id,
path)
def __getattr__(self, attr):
'''
Return one of the components of the identity when accessed as an
attribute.
'''
if attr == self.RESOURCE_NAME:
return self._path_components()[-1]
return HeatIdentifier.__getattr__(self, attr)
def stack(self):
'''
Return a HeatIdentifier for the owning stack
'''
return HeatIdentifier(self.tenant, self.stack_name, self.stack_id,
'/'.join(self._path_components()[:-2]))
class EventIdentifier(HeatIdentifier):
'''An identifier for an event.'''
(RESOURCE_NAME, EVENT_ID) = (ResourceIdentifier.RESOURCE_NAME, 'event_id')
def __init__(self, tenant, stack_name, stack_id, path,
event_id=None):
'''
Return a new Event identifier based on the identifier components of
the associated resource and the event ID.
'''
if event_id is not None:
path = '/'.join([path.rstrip('/'), 'events', event_id])
super(EventIdentifier, self).__init__(tenant,
stack_name,
stack_id,
path)
def __getattr__(self, attr):
'''
Return one of the components of the identity when accessed as an
attribute.
'''
if attr == self.RESOURCE_NAME:
return getattr(self.resource(), attr)
if attr == self.EVENT_ID:
return self._path_components()[-1]
return HeatIdentifier.__getattr__(self, attr)
def resource(self):
'''
Return a HeatIdentifier for the owning resource
'''
return ResourceIdentifier(self.tenant, self.stack_name, self.stack_id,
'/'.join(self._path_components()[:-2]))
def stack(self):
'''
Return a HeatIdentifier for the owning stack
'''
return self.resource().stack()
| apache-2.0 | 7,868,432,910,404,716,000 | 33.279167 | 79 | 0.54467 | false |
kensho-technologies/graphql-compiler | graphql_compiler/backend.py | 1 | 1810 | # Copyright 2019-present Kensho Technologies, LLC.
from collections import namedtuple
from .compiler import (
emit_cypher,
emit_gremlin,
emit_match,
emit_sql,
ir_lowering_cypher,
ir_lowering_gremlin,
ir_lowering_match,
ir_lowering_sql,
)
from .schema import schema_info
# A backend is a compilation target (a language we can compile to)
#
# This class defines all the necessary and sufficient functionality a backend should implement
# in order to fit into our generic testing framework.
Backend = namedtuple(
"Backend",
(
# String, the internal name of this language.
"language",
# The subclass of SchemaInfo appropriate for this backend.
"SchemaInfoClass",
# Given a SchemaInfoClass and an IR that respects its schema, return a lowered IR with
# the same semantics.
"lower_func",
# Given a SchemaInfoClass and a lowered IR that respects its schema, emit a query
# in this language with the same semantics.
"emit_func",
),
)
gremlin_backend = Backend(
language="Gremlin",
SchemaInfoClass=schema_info.CommonSchemaInfo,
lower_func=ir_lowering_gremlin.lower_ir,
emit_func=emit_gremlin.emit_code_from_ir,
)
match_backend = Backend(
language="MATCH",
SchemaInfoClass=schema_info.CommonSchemaInfo,
lower_func=ir_lowering_match.lower_ir,
emit_func=emit_match.emit_code_from_ir,
)
cypher_backend = Backend(
language="Cypher",
SchemaInfoClass=schema_info.CommonSchemaInfo,
lower_func=ir_lowering_cypher.lower_ir,
emit_func=emit_cypher.emit_code_from_ir,
)
sql_backend = Backend(
language="SQL",
SchemaInfoClass=schema_info.SQLAlchemySchemaInfo,
lower_func=ir_lowering_sql.lower_ir,
emit_func=emit_sql.emit_code_from_ir,
)
| apache-2.0 | -8,970,847,914,919,115,000 | 27.28125 | 94 | 0.699448 | false |
JYamihud/blender-organizer | py_data/modules/itemselector.py | 1 | 11370 | # -*- coding: utf-8 -*-
# system
import os
import sys
import socket
# graphics interface
import gtk
import pango
import cairo
import glib
import datetime
try:
import Image
except:
from PIL import Image
# calculational help
import datetime
# self made modules
import thumbnailer
import dialogs
import checklist
def select(pf, searchitem=""):
window = gtk.Dialog("Choose Item", None, 0, (gtk.STOCK_OK, gtk.RESPONSE_APPLY,
gtk.STOCK_CANCEL, gtk.RESPONSE_CLOSE))
box = window.get_child()
namebox = gtk.HBox(False)
box.pack_start(namebox, False)
namebox.pack_start(gtk.Label(" Search: "), False)
nameentry = gtk.Entry()
nameentry.set_text(searchitem)
namebox.pack_start(nameentry)
finalname = gtk.Entry()
finalname.set_text("")
#namebox.pack_end(finalname, False)
class draw:
def __init__(self, pf, box , win, search, finalname):
self.box = box
self.win = win
self.pf = pf
self.search = search
self.finalname = finalname
self.allowed = True
self.scroll = 0
self.dW = 0
self.DH = 0
self.mpx = 0
self.mpy = 0
self.mpf = ""
self.frame = 0
# LET'S PREPARE ALL THE ITEMS
self.listofitems = []
for CUR in ["chr", "veh", "loc", "obj"]:
print self.pf+"/dev/"+CUR
for i in os.walk(self.pf+"/dev/"+CUR).next()[1]:
self.listofitems.append([CUR,i])
self.objicon = gtk.gdk.pixbuf_new_from_file(self.pf+"/py_data/icons/obj_asset_undone.png")
self.chricon = gtk.gdk.pixbuf_new_from_file(self.pf+"/py_data/icons/chr_asset_undone.png")
self.vehicon = gtk.gdk.pixbuf_new_from_file(self.pf+"/py_data/icons/veh_asset_undone.png")
self.locicon = gtk.gdk.pixbuf_new_from_file(self.pf+"/py_data/icons/loc_asset_undone.png")
self.plus = gtk.gdk.pixbuf_new_from_file(self.pf+"/py_data/icons/plus.png")
def framegraph(widget, event):
self.frame = self.frame + 1
w, h = widget.window.get_size()
xgc = widget.window.new_gc()
mx, my, fx = widget.window.get_pointer()
# GETTING WHETHER THE WINDOW IS ACTIVE
self.winactive = self.win.is_active()
ctx = widget.window.cairo_create()
#ctx.select_font_face("Sawasdee", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL)
ctx.set_source_rgb(1,1,1)
xgc.line_width = 2
# BACKGROUND COLOR
xgc.set_rgb_fg_color(gtk.gdk.color_parse("#2b2b2b")) ## CHOSE COLOR
widget.window.draw_rectangle(xgc, True, 0, 0, w, h) ## FILL FRAME
#IF WE SEARCH
showlist = self.listofitems
if self.search.get_text() > 0:
showlist = []
for i in self.listofitems:
if self.search.get_text().lower() in i[0].lower() or self.search.get_text().lower() in i[1].lower():
showlist.append(i)
# SCROLL SO I COULD DO THAT
S = self.scroll # the scroll value
# OUTPUTTING THEM TO THE SCREEN
n = 0
i = ["", ""]
sett = True
hoti = 0
al = 0
foundsearch = False
for n, i in enumerate(sorted(showlist)):
if self.search.get_text() in ["chr", "veh", "obj", "loc", i[1]]:
foundsearch = True
if self.search.get_text() == i[1] and self.frame < 3:
self.finalname.set_text("/dev/"+i[0]+"/"+i[1])
hoti = (20*n)+S # HEIGHT OF THIS ITEM
#every even darker
if (n % 2) == 0:
xgc.set_rgb_fg_color(gtk.gdk.color_parse("#262626")) ## CHOSE COLOR
widget.window.draw_rectangle(xgc, True, 0, hoti+2, w, 20)
#mouse over
if my in range(hoti+2, hoti+22) and my in range(0, h) and mx in range(0,w):
xgc.set_rgb_fg_color(gtk.gdk.color_parse("#3f3f3f")) ## CHOSE COLOR
widget.window.draw_rectangle(xgc, True, 0, hoti+2, w, 20)
if "GDK_BUTTON1" in str(fx) and self.allowed and "GDK_BUTTON1" not in str(self.mpf) and win.is_active() and sett: #IF CLICKED
self.finalname.set_text("/dev/"+i[0]+"/"+i[1])
sett = False
# if selected 395384
if "/dev/"+i[0]+"/"+i[1] == self.finalname.get_text():
xgc.set_rgb_fg_color(gtk.gdk.color_parse("#395384")) ## CHOSE COLOR
widget.window.draw_rectangle(xgc, True, 0, hoti+2, w, 20)
ctx.set_font_size(15)
ctx.move_to( 30, hoti+17)
ctx.show_text(i[1])
#drawing icons
if i[0] == "chr":
widget.window.draw_pixbuf(None, self.chricon, 0, 0, 1, hoti+2, -1, -1, gtk.gdk.RGB_DITHER_NONE, 0, 0)
elif i[0] == "veh":
widget.window.draw_pixbuf(None, self.vehicon, 0, 0, 1, hoti+2, -1, -1, gtk.gdk.RGB_DITHER_NONE, 0, 0)
elif i[0] == "loc":
widget.window.draw_pixbuf(None, self.locicon, 0, 0, 1, hoti+2, -1, -1, gtk.gdk.RGB_DITHER_NONE, 0, 0)
elif i[0] == "obj":
widget.window.draw_pixbuf(None, self.objicon, 0, 0, 1, hoti+2, -1, -1, gtk.gdk.RGB_DITHER_NONE, 0, 0)
if n+1 == len(showlist):
hoti = hoti + 20
if len(self.search.get_text()) > 0 and foundsearch == False:
#mouse over
if my in range(hoti+2, hoti+22) and my in range(0, h) and mx in range(0,w):
xgc.set_rgb_fg_color(gtk.gdk.color_parse("#3f3f3f")) ## CHOSE COLOR
widget.window.draw_rectangle(xgc, True, 0, hoti+2, w, 20)
if "GDK_BUTTON1" in str(fx) and self.allowed and "GDK_BUTTON1" not in str(self.mpf) and win.is_active() and sett: #IF CLICKED
def ee():
self.addingnew = dialogs.AddAsset(self.pf, "chr", self.search.get_text())
path = self.addingnew.getpath()
if len(path) > 0:
print path, "PATH"
CUR = path[5:path.rfind("/")]
NAME = path[path.rfind("/")+1:]
print CUR, "CUR"
self.listofitems.append([CUR, NAME])
self.search.set_text(NAME)
self.finalname.set_text("/dev/"+CUR+"/"+NAME)
glib.timeout_add(10, ee)
al = al + 1
ctx.set_font_size(15)
ctx.move_to( 30, hoti+17)
ctx.show_text('Create item "'+self.search.get_text()+'"')
widget.window.draw_pixbuf(None, self.plus, 0, 0, 1, hoti+2, -1, -1, gtk.gdk.RGB_DITHER_NONE, 0, 0)
# SCROLLING IT SELF
# the scroll is done with the middle mouse button
if self.mpy > my and "GDK_BUTTON2" in str(fx) and "GDK_BUTTON2" in str(self.mpf):
self.scroll = self.scroll + (my-self.mpy)
if self.mpy < my and "GDK_BUTTON2" in str(fx) and "GDK_BUTTON2" in str(self.mpf):
self.scroll = self.scroll - (self.mpy-my)
if self.scroll < 0-((n+al)*20)+h-33:
self.scroll = 0-((n+al)*20)+h-33
if self.scroll > 0:
self.scroll = 0
# TESTING SOMETHING
ctx.set_font_size(20)
ctx.move_to( mx, my)
#ctx.show_text(str(mx)+":"+str(my)+" "+str(self.winactive)+" "+str(fx)+" "+self.search.get_text()+" "+self.finalname.get_text())
self.dW = w
self.DH = h
self.mpx = mx
self.mpy = my
self.mpf = fx
def callback():
if self.allowed == True:
widget.queue_draw()
glib.timeout_add(1, callback)
graph = gtk.DrawingArea()
graph.set_size_request(400,400)
self.box.pack_start(graph)
graph.show()
graph.connect("expose-event", framegraph)
drawer = draw(pf, box, window, nameentry, finalname)
box.show_all()
r = window.run()
ret = False
if r == gtk.RESPONSE_APPLY:
ret = finalname.get_text()
window.destroy()
return ret
| gpl-2.0 | 9,167,160,066,668,298,000 | 35.095238 | 151 | 0.385224 | false |
NaN-tic/nereid | nereid/sessions.py | 1 | 3596 | #This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from datetime import datetime # noqa
from flask.sessions import SessionInterface, SessionMixin
from werkzeug.contrib.sessions import Session as SessionBase, SessionStore
from flask.globals import current_app
class Session(SessionBase, SessionMixin):
"Nereid Default Session Object"
class NullSession(Session):
"""
Class used to generate nicer error messages if sessions are not
available. Will still allow read-only access to the empty session
but fail on setting.
"""
def _fail(self, *args, **kwargs):
raise RuntimeError('the session is unavailable because no secret '
'key was set. Set the secret_key on the '
'application to something unique and secret.')
__setitem__ = __delitem__ = clear = pop = popitem = \
update = setdefault = _fail
del _fail
class MemcachedSessionStore(SessionStore):
"""
Session store that stores session on memcached
:param session_class: The session class to use.
Defaults to :class:`Session`.
"""
def __init__(self, session_class=Session):
SessionStore.__init__(self, session_class)
def save(self, session):
"""
Updates the session
"""
current_app.cache.set(
session.sid, dict(session), 30 * 24 * 60 * 60
)
def delete(self, session):
"""
Deletes the session
"""
current_app.cache.delete(session.sid)
def get(self, sid):
"""
Returns session
"""
if not self.is_valid_key(sid):
return self.new()
session_data = current_app.cache.get(sid)
if session_data is None:
session_data = {}
return self.session_class(session_data, sid, False)
def list(self):
"""
Lists all sessions in the store
"""
raise Exception("Not implemented yet")
class NereidSessionInterface(SessionInterface):
"""Session Management Class"""
session_store = MemcachedSessionStore()
null_session_class = NullSession
def open_session(self, app, request):
"""
Creates or opens a new session.
:param request: an instance of :attr:`request_class`.
"""
sid = request.cookies.get(app.session_cookie_name, None)
if sid:
return self.session_store.get(sid)
else:
return self.session_store.new()
def save_session(self, app, session, response):
"""
Saves the session if it needs updates. For the default
implementation, check :meth:`open_session`.
:param session: the session to be saved
:param response: an instance of :attr:`response_class`
"""
if session.should_save:
self.session_store.save(session)
expires = self.get_expiration_time(app, session)
domain = self.get_cookie_domain(app)
from nereid.globals import request
sid = request.cookies.get(app.session_cookie_name, None)
if session.sid != sid:
# The only information in the session is the sid, and the
# only reason why a cookie should be set again is if that
# has changed
response.set_cookie(
app.session_cookie_name, session.sid,
expires=expires, httponly=False, domain=domain
)
| gpl-3.0 | 3,200,084,190,623,582,000 | 31.107143 | 77 | 0.606229 | false |
cortedeltimo/SickRage | sickbeard/clients/transmission_client.py | 1 | 5187 | # coding=utf-8
# Author: Mr_Orange <[email protected]>
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import json
import re
from base64 import b64encode
import sickbeard
from sickbeard.clients.generic import GenericClient
class TransmissionAPI(GenericClient):
def __init__(self, host=None, username=None, password=None):
super(TransmissionAPI, self).__init__('Transmission', host, username, password)
self.url = '/'.join((self.host.rstrip('/'), sickbeard.TORRENT_RPCURL.strip('/'), 'rpc'))
def _get_auth(self):
post_data = json.dumps({'method': 'session-get', })
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'), timeout=120,
verify=sickbeard.TORRENT_VERIFY_CERT)
self.auth = re.search(r'X-Transmission-Session-Id:\s*(\w+)', self.response.text).group(1)
except Exception:
return None
self.session.headers.update({'x-transmission-session-id': self.auth})
# Validating Transmission authorization
post_data = json.dumps({'arguments': {},
'method': 'session-get'})
self._request(method='post', data=post_data)
return self.auth
def _add_torrent_uri(self, result):
arguments = {
'filename': result.url,
'paused': int(sickbeard.TORRENT_PAUSED)
}
if sickbeard.TORRENT_PATH:
arguments['download-dir'] = sickbeard.TORRENT_PATH + "/" + result.show.name + "/"
post_data = json.dumps({'arguments': arguments,
'method': 'torrent-add'})
self._request(method='post', data=post_data)
return self.response.json()['result'] == "success"
def _add_torrent_file(self, result):
arguments = {
'metainfo': b64encode(result.content),
'paused': 1 if sickbeard.TORRENT_PAUSED else 0
}
if sickbeard.TORRENT_PATH:
arguments['download-dir'] = sickbeard.TORRENT_PATH + "/" + result.show.name + "/"
post_data = json.dumps({'arguments': arguments,
'method': 'torrent-add'})
self._request(method='post', data=post_data)
return self.response.json()['result'] == "success"
def _set_torrent_ratio(self, result):
ratio = None
if result.ratio:
ratio = result.ratio
mode = 0
if ratio:
if float(ratio) == -1:
ratio = 0
mode = 2
elif float(ratio) >= 0:
ratio = float(ratio)
mode = 1 # Stop seeding at seedRatioLimit
arguments = {'ids': [result.hash],
'seedRatioLimit': ratio,
'seedRatioMode': mode}
post_data = json.dumps({'arguments': arguments,
'method': 'torrent-set'})
self._request(method='post', data=post_data)
return self.response.json()['result'] == "success"
def _set_torrent_seed_time(self, result):
if sickbeard.TORRENT_SEED_TIME and sickbeard.TORRENT_SEED_TIME != -1:
time = int(60 * float(sickbeard.TORRENT_SEED_TIME))
arguments = {'ids': [result.hash],
'seedIdleLimit': time,
'seedIdleMode': 1}
post_data = json.dumps({'arguments': arguments,
'method': 'torrent-set'})
self._request(method='post', data=post_data)
return self.response.json()['result'] == "success"
else:
return True
def _set_torrent_priority(self, result):
arguments = {'ids': [result.hash]}
if result.priority == -1:
arguments['priority-low'] = []
elif result.priority == 1:
# set high priority for all files in torrent
arguments['priority-high'] = []
# move torrent to the top if the queue
arguments['queuePosition'] = 0
if sickbeard.TORRENT_HIGH_BANDWIDTH:
arguments['bandwidthPriority'] = 1
else:
arguments['priority-normal'] = []
post_data = json.dumps({'arguments': arguments,
'method': 'torrent-set'})
self._request(method='post', data=post_data)
return self.response.json()['result'] == "success"
api = TransmissionAPI()
| gpl-3.0 | 5,219,753,062,150,899,000 | 31.622642 | 101 | 0.572393 | false |
antoinearnoud/openfisca-france | openfisca_france/reforms/plfr2014.py | 1 | 4011 | # -*- coding: utf-8 -*-
from __future__ import division
import os
from ..model.base import *
dir_path = os.path.join(os.path.dirname(__file__), 'parameters')
# TODO: les baisses de charges n'ont pas été codées car annulées (toute ou en partie ?)
# par le Conseil constitutionnel
class plfr2014(Reform):
name = u'Projet de Loi de Finances Rectificative 2014'
class reduction_impot_exceptionnelle(Variable):
definition_period = YEAR
def formula_2013_01_01(foyer_fiscal, period, parameters):
janvier = period.first_month
nb_adult = foyer_fiscal('nb_adult', period)
nb_parents = foyer_fiscal.declarant_principal.famille('nb_parents', period = janvier)
rfr = foyer_fiscal('rfr', period)
params = parameters(period).plfr2014.reduction_impot_exceptionnelle
plafond = params.seuil * nb_adult + (nb_parents - nb_adult) * 2 * params.majoration_seuil
montant = params.montant_plafond * nb_adult
return min_(max_(plafond + montant - rfr, 0), montant)
class reductions(Variable):
label = u"Somme des réductions d'impôt à intégrer pour l'année 2013"
definition_period = YEAR
def formula_2013_01_01(foyer_fiscal, period, parameters):
accult = foyer_fiscal('accult', period)
adhcga = foyer_fiscal('adhcga', period)
cappme = foyer_fiscal('cappme', period)
creaen = foyer_fiscal('creaen', period)
daepad = foyer_fiscal('daepad', period)
deffor = foyer_fiscal('deffor', period)
dfppce = foyer_fiscal('dfppce', period)
doment = foyer_fiscal('doment', period)
domlog = foyer_fiscal('domlog', period)
donapd = foyer_fiscal('donapd', period)
duflot = foyer_fiscal('duflot', period)
ecpess = foyer_fiscal('ecpess', period)
garext = foyer_fiscal('garext', period)
intagr = foyer_fiscal('intagr', period)
invfor = foyer_fiscal('invfor', period)
invlst = foyer_fiscal('invlst', period)
ip_net = foyer_fiscal('ip_net', period)
locmeu = foyer_fiscal('locmeu', period)
mecena = foyer_fiscal('mecena', period)
mohist = foyer_fiscal('mohist', period)
patnat = foyer_fiscal('patnat', period)
prcomp = foyer_fiscal('prcomp', period)
reduction_impot_exceptionnelle = foyer_fiscal('reduction_impot_exceptionnelle', period)
repsoc = foyer_fiscal('repsoc', period)
resimm = foyer_fiscal('resimm', period)
rsceha = foyer_fiscal('rsceha', period)
saldom = foyer_fiscal('saldom', period)
scelli = foyer_fiscal('scelli', period)
sofica = foyer_fiscal('sofica', period)
spfcpi = foyer_fiscal('spfcpi', period)
total_reductions = accult + adhcga + cappme + creaen + daepad + deffor + dfppce + doment + domlog + \
donapd + duflot + ecpess + garext + intagr + invfor + invlst + locmeu + mecena + mohist + patnat + \
prcomp + repsoc + resimm + rsceha + saldom + scelli + sofica + spfcpi + reduction_impot_exceptionnelle
return min_(ip_net, total_reductions)
def apply(self):
for variable in [self.reduction_impot_exceptionnelle, self.reductions]:
self.update_variable(variable)
self.modify_parameters(modifier_function = modify_parameters)
def modify_parameters(parameters):
file_path = os.path.join(dir_path, 'plfr2014.yaml')
plfr2014_parameters_subtree = load_parameter_file(name='plfr2014', file_path=file_path)
file_path = os.path.join(dir_path, 'plfrss2014.yaml')
plfrss2014_parameters_subtree = load_parameter_file(name='plfrss2014', file_path=file_path)
parameters.add_child('plfr2014', plfr2014_parameters_subtree)
parameters.add_child('plfrss2014', plfrss2014_parameters_subtree)
return parameters
| agpl-3.0 | -1,933,542,702,604,743,700 | 44.477273 | 118 | 0.628186 | false |
jelovirt/dita-generator | src/ditagen/web.py | 1 | 5467 | #!/usr/bin/env python
# -*- coding: UTF-8; indent-tabs-mode:nil; tab-width:4 -*-
# This file is part of DITA DTD Generator.
#
# Copyright 2009 Jarno Elovirta <http://www.elovirta.com/>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import cgitb; cgitb.enable()
import ditagen.dita
import ditagen.dtdgen
import ditagen.dita.v1_1
import ditagen.dita.v1_2
import ditagen.generator
def print_error(__msg):
print_response_headers(None, 500, __msg)
print __msg
sys.exit()
def print_response_headers(__file_name, __code=200, __msg="Ok"):
print u"Status: %d %s" % (__code, __msg)
print u"Content-Type: text/plain; charset=UTF-8"
# print u"Content-disposition: attachment; file_name=%s.%s" % (__root, __f)
#print u"Content-disposition: file_name=%s" % __file_name #__dita.getfileName(__type, __root, __f)
print
def main(form):
"""Main method."""
__topic_type = None
__output_type = None
__id = None
__root = None
__owner = None
__nested = None
#__remove = {}
#__global_atts = None
__format = None
__domains = []
#__types = []
__version = "1.1"
__plugin_name = None
__stylesheet = None
__title = None
__file = None
try:
# read arguments
if u"version" in form:
__version = form.getfirst(u"version")
if __version not in ("1.1", "1.2"):
raise ValueError()
else:
print_error("version missing")
# get domains
for __d in form.getlist(u"domain"):
if __d in ditagen.DOMAIN_MAP[__version]:
__domains.append(ditagen.DOMAIN_MAP[__version][__d]())
# get type
__t = form.getfirst(u"type")
if __t in ditagen.TOPIC_MAP[__version]:
__topic_type = ditagen.TOPIC_MAP[__version][__t]()
__o = form.getfirst(u"output")
if __o in ditagen.OUTPUT_MAP:
__output_type = ditagen.OUTPUT_MAP[__o]
# get arguments
if u"id" in form:
__id = form.getfirst(u"id")
else:
print_error("id missing")
if u"root" in form:
__root = form.getfirst(u"root")
if u"owner" in form:
__owner = form.getfirst(u"owner")
else:
print_error("owner missing")
if u"title" in form:
__title = form.getfirst(u"title")
else:
print_error("title missing")
#if not __title:
# __title = __id.capitalize()
__nested = u"nested" in form
#__remove = dict([(n, True) for n in form.getlist("remove")])
#__global_atts = None#form.getfirst(u"attribute")
if u"file" in form:
__format = form.getfirst(u"file")
else:
print_error("file missing")
__stylesheet = form.getfirst(u"stylesheet")
__file = __id
#if __id is not None:
__topic_type = __output_type(__id, __title, __topic_type,
__owner, __file)#__root
if __topic_type == ditagen.dita.SpecializationType:
__topic_type.root = ditagen.dita.create_element(__topic_type, __root, __id)
# else would be reshelling
except:
#print u"HTTP/1.1 400 Invalid arguments"
#print
raise
# run generator
if __format== u"plugin" or not __format:
__dita_gen = ditagen.generator.PluginGenerator()
__dita_gen.out = sys.stdout
__dita_gen.topic_type = __topic_type
if not len(__domains) == 0:
__dita_gen.domains = __domains
__dita_gen.nested = __nested
__dita_gen.version = __version
#__dita_gen.set_title(__title)
if __stylesheet:
__dita_gen.set_stylesheet(__stylesheet)
if __plugin_name != None:
__dita_gen.plugin_name = __plugin_name
if __plugin_version != None:
__dita_gen.plugin_version = __plugin_version
__file_name = __dita_gen.get_file_name(__topic_type, __file, "zip")
print u"Status: 200 Ok"
#print u"Content-type: application/zip"
print u"Content-disposition: attachment; filename={0}".format(__file_name)
print
__dita_gen.generate_plugin()
else:
__dita_gen = ditagen.generator.DitaGenerator()
__dita_gen.out = sys.stdout
__dita_gen.topic_type = __topic_type
if not len(__domains) == 0:
__dita_gen.domains = __domains
__dita_gen.nested = __nested
__dita_gen.version = __version
__file_name = __dita_gen.get_file_name(__topic_type, __file, __format)
print_response_headers(__file_name)
if __format == u"dtd":
__dita_gen.generate_dtd()
elif __format == u"mod":
__dita_gen.generate_mod()
elif __format == u"ent":
__dita_gen.generate_ent()
if __name__ == "__main__":
main()
| apache-2.0 | -8,890,596,476,996,714,000 | 33.16875 | 102 | 0.554966 | false |
lukaslundgren/django-rest-framework-json-api | example/tests/test_model_viewsets.py | 1 | 7677 | import json
from django.contrib.auth import get_user_model
from django.utils import encoding
from django.core.urlresolvers import reverse
from django.conf import settings
from example.tests import TestBase
from example.tests.utils import dump_json, redump_json
from example.factories import CommentFactory
class ModelViewSetTests(TestBase):
"""
Test usage with ModelViewSets, also tests pluralization, camelization,
and underscore.
[<RegexURLPattern user-list ^identities/$>,
<RegexURLPattern user-detail ^identities/(?P<pk>[^/]+)/$>]
"""
list_url = reverse('user-list')
def setUp(self):
super(ModelViewSetTests, self).setUp()
self.detail_url = reverse('user-detail', kwargs={'pk': self.miles.pk})
setattr(settings, 'JSON_API_FORMAT_KEYS', 'dasherize')
def tearDown(self):
setattr(settings, 'JSON_API_FORMAT_KEYS', 'camelize')
def test_key_in_list_result(self):
"""
Ensure the result has a 'user' key since that is the name of the model
"""
response = self.client.get(self.list_url)
self.assertEqual(response.status_code, 200)
user = get_user_model().objects.all()[0]
expected = {
'data': [
{
'type': 'users',
'id': encoding.force_text(user.pk),
'attributes': {
'first-name': user.first_name,
'last-name': user.last_name,
'email': user.email
},
}
],
'links': {
'first': 'http://testserver/identities?page=1',
'last': 'http://testserver/identities?page=2',
'next': 'http://testserver/identities?page=2',
'prev': None
},
'meta': {
'pagination': {
'page': 1,
'pages': 2,
'count': 2
}
}
}
content_dump = redump_json(response.content)
expected_dump = dump_json(expected)
assert expected_dump == content_dump
def test_page_two_in_list_result(self):
"""
Ensure that the second page is reachable and is the correct data.
"""
response = self.client.get(self.list_url, {'page': 2})
self.assertEqual(response.status_code, 200)
user = get_user_model().objects.all()[1]
expected = {
'data': [
{
'type': 'users',
'id': encoding.force_text(user.pk),
'attributes': {
'first-name': user.first_name,
'last-name': user.last_name,
'email': user.email
},
}
],
'links': {
'first': 'http://testserver/identities?page=1',
'last': 'http://testserver/identities?page=2',
'next': None,
'prev': 'http://testserver/identities?page=1',
},
'meta': {
'pagination': {
'page': 2,
'pages': 2,
'count': 2
}
}
}
content_dump = redump_json(response.content)
expected_dump = dump_json(expected)
assert expected_dump == content_dump
def test_page_range_in_list_result(self):
"""
Ensure that the range of a page can be changed from the client,
tests pluralization as two objects means it converts ``user`` to
``users``.
"""
response = self.client.get(self.list_url, {'page_size': 2})
self.assertEqual(response.status_code, 200)
users = get_user_model().objects.all()
expected = {
'data': [
{
'type': 'users',
'id': encoding.force_text(users[0].pk),
'attributes': {
'first-name': users[0].first_name,
'last-name': users[0].last_name,
'email': users[0].email
},
},
{
'type': 'users',
'id': encoding.force_text(users[1].pk),
'attributes': {
'first-name': users[1].first_name,
'last-name': users[1].last_name,
'email': users[1].email
},
}
],
'links': {
'first': 'http://testserver/identities?page=1&page_size=2',
'last': 'http://testserver/identities?page=1&page_size=2',
'next': None,
'prev': None
},
'meta': {
'pagination': {
'page': 1,
'pages': 1,
'count': 2
}
}
}
content_dump = redump_json(response.content)
expected_dump = dump_json(expected)
assert expected_dump == content_dump
def test_key_in_detail_result(self):
"""
Ensure the result has a 'user' key.
"""
response = self.client.get(self.detail_url)
self.assertEqual(response.status_code, 200)
expected = {
'data': {
'type': 'users',
'id': encoding.force_text(self.miles.pk),
'attributes': {
'first-name': self.miles.first_name,
'last-name': self.miles.last_name,
'email': self.miles.email
},
}
}
content_dump = redump_json(response.content)
expected_dump = dump_json(expected)
assert expected_dump == content_dump
def test_key_in_post(self):
"""
Ensure a key is in the post.
"""
self.client.login(username='miles', password='pw')
data = {
'data': {
'type': 'users',
'id': encoding.force_text(self.miles.pk),
'attributes': {
'first-name': self.miles.first_name,
'last-name': self.miles.last_name,
'email': '[email protected]'
},
}
}
response = self.client.put(self.detail_url,
content_type='application/vnd.api+json',
data=dump_json(data))
content_dump = redump_json(response.content)
expected_dump = dump_json(data)
assert expected_dump == content_dump
# is it updated?
self.assertEqual(
get_user_model().objects.get(pk=self.miles.pk).email,
'[email protected]')
def test_required_relation(self):
"Author should be required on CommentSerializer"
comment = CommentFactory(author=None)
url = reverse('comment-detail', kwargs={'pk': comment.pk})
self.client.login(username='miles', password='pw')
data = {
'data': {
'type': 'comments',
'id': encoding.force_text(comment.pk),
'attributes': {},
'relationships': {'author': {'data': None}}
}
}
response = self.client.patch(url,
content_type='application/vnd.api+json',
data=dump_json(data))
self.assertEqual(response.status_code, 400)
| bsd-2-clause | 1,623,057,772,902,982,400 | 31.392405 | 78 | 0.466458 | false |
angr/angr | angr/analyses/variable_recovery/engine_vex.py | 1 | 11842 | from typing import TYPE_CHECKING
import claripy
import pyvex
from ...engines.vex.claripy.datalayer import value as claripy_value
from ...engines.light import SimEngineLightVEXMixin
from ..typehoon import typevars, typeconsts
from .engine_base import SimEngineVRBase, RichR
if TYPE_CHECKING:
from .variable_recovery_base import VariableRecoveryStateBase
class SimEngineVRVEX(
SimEngineLightVEXMixin,
SimEngineVRBase,
):
state: 'VariableRecoveryStateBase'
# Statement handlers
def _handle_Put(self, stmt):
offset = stmt.offset
r = self._expr(stmt.data)
size = stmt.data.result_size(self.tyenv) // 8
if offset == self.arch.ip_offset:
return
self._assign_to_register(offset, r, size)
def _handle_Store(self, stmt):
addr_r = self._expr(stmt.addr)
size = stmt.data.result_size(self.tyenv) // 8
r = self._expr(stmt.data)
self._store(addr_r, r, size, stmt=stmt)
def _handle_StoreG(self, stmt):
guard = self._expr(stmt.guard)
if guard is True:
addr = self._expr(stmt.addr)
size = stmt.data.result_size(self.tyenv) // 8
data = self._expr(stmt.data)
self._store(addr, data, size, stmt=stmt)
def _handle_LoadG(self, stmt):
guard = self._expr(stmt.guard)
if guard is True:
addr = self._expr(stmt.addr)
if addr is not None:
self.tmps[stmt.dst] = self._load(addr, self.tyenv.sizeof(stmt.dst) // 8)
elif guard is False:
data = self._expr(stmt.alt)
self.tmps[stmt.dst] = data
else:
self.tmps[stmt.dst] = None
def _handle_LLSC(self, stmt: pyvex.IRStmt.LLSC):
if stmt.storedata is None:
# load-link
addr = self._expr(stmt.addr)
size = self.tyenv.sizeof(stmt.result) // self.arch.byte_width
data = self._load(addr, size)
self.tmps[stmt.result] = data
else:
# store-conditional
storedata = self._expr(stmt.storedata)
addr = self._expr(stmt.addr)
size = self.tyenv.sizeof(stmt.storedata.tmp) // self.arch.byte_width
self._store(addr, storedata, size)
self.tmps[stmt.result] = RichR(1)
def _handle_NoOp(self, stmt):
pass
# Expression handlers
def _expr(self, expr) -> RichR:
"""
:param expr:
:return:
:rtype: RichR
"""
r = super()._expr(expr)
if r is None:
bits = expr.result_size(self.tyenv)
return RichR(self.state.top(bits))
return r
def _handle_Get(self, expr):
reg_offset = expr.offset
reg_size = expr.result_size(self.tyenv) // 8
return self._read_from_register(reg_offset, reg_size, expr=expr)
def _handle_Load(self, expr: pyvex.IRExpr.Load) -> RichR:
addr = self._expr(expr.addr)
size = expr.result_size(self.tyenv) // 8
return self._load(addr, size)
def _handle_CCall(self, expr): # pylint:disable=useless-return
# ccalls don't matter
return RichR(self.state.top(expr.result_size(self.tyenv)))
def _handle_Conversion(self, expr: pyvex.IRExpr.Unop) -> RichR:
return RichR(self.state.top(expr.result_size(self.tyenv)))
# Function handlers
def _handle_function(self, func_addr): # pylint:disable=unused-argument,no-self-use,useless-return
return None
def _handle_Const(self, expr):
return RichR(claripy_value(expr.con.type, expr.con.value), typevar=typeconsts.int_type(expr.con.size))
def _handle_Add(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
return RichR(r0.data + r1.data,
typevar=typeconsts.int_type(result_size),
type_constraints=None)
typevar = None
if r0.typevar is not None and r1.data.concrete:
typevar = typevars.DerivedTypeVariable(r0.typevar, typevars.AddN(r1.data._model_concrete.value))
sum_ = r0.data + r1.data
return RichR(sum_,
typevar=typevar,
type_constraints={ typevars.Subtype(r0.typevar, r1.typevar) },
)
def _handle_Sub(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
return RichR(r0.data - r1.data,
typevar=typeconsts.int_type(result_size),
type_constraints=None)
typevar = None
if r0.typevar is not None and r1.data.concrete:
typevar = typevars.DerivedTypeVariable(r0.typevar, typevars.SubN(r1.data._model_concrete.value))
diff = r0.data - r1.data
return RichR(diff,
typevar=typevar,
)
def _handle_And(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
return RichR(r0.data & r1.data)
if self.state.is_stack_address(r0.data):
r = r0.data
elif self.state.is_stack_address(r1.data):
r = r1.data
else:
r = self.state.top(result_size)
return RichR(r)
def _handle_Xor(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
return RichR(r0.data ^ r1.data)
r = self.state.top(result_size)
return RichR(r)
def _handle_Or(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
return RichR(r0.data | r1.data)
r = self.state.top(result_size)
return RichR(r)
def _handle_Not(self, expr):
arg = expr.args[0]
r0 = self._expr(arg)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete:
# constants
return RichR(~r0.data)
r = self.state.top(result_size)
return RichR(r)
def _handle_Mul(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
return RichR(r0.data * r1.data)
r = self.state.top(result_size)
return RichR(r)
def _handle_DivMod(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
try:
signed = "U" in expr.op # Iop_DivModU64to32 vs Iop_DivMod
from_size = r0.data.size()
to_size = r1.data.size()
if signed:
quotient = (r0.data.SDiv(claripy.SignExt(from_size - to_size, r1.data)))
remainder = (r0.data.SMod(claripy.SignExt(from_size - to_size, r1.data)))
quotient_size = to_size
remainder_size = to_size
result = claripy.Concat(
claripy.Extract(remainder_size - 1, 0, remainder),
claripy.Extract(quotient_size - 1, 0, quotient)
)
else:
quotient = (r0.data // claripy.ZeroExt(from_size - to_size, r1.data))
remainder = (r0.data % claripy.ZeroExt(from_size - to_size, r1.data))
quotient_size = to_size
remainder_size = to_size
result = claripy.Concat(
claripy.Extract(remainder_size - 1, 0, remainder),
claripy.Extract(quotient_size - 1, 0, quotient)
)
return RichR(result)
except ZeroDivisionError:
pass
r = self.state.top(result_size)
return RichR(r)
def _handle_Div(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
try:
return RichR(r0.data / r1.data)
except ZeroDivisionError:
pass
r = self.state.top(result_size)
return RichR(r)
def _handle_Shr(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
return RichR(claripy.LShR(r0.data, r1.data._model_concrete.value),
typevar=typeconsts.int_type(result_size),
type_constraints=None)
r = self.state.top(result_size)
return RichR(r,
typevar=r0.typevar,
)
def _handle_Sar(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
return RichR(r0.data >> r1.data._model_concrete.value,
typevar=typeconsts.int_type(result_size),
type_constraints=None)
r = self.state.top(result_size)
return RichR(r,
typevar=r0.typevar,
)
def _handle_Shl(self, expr):
arg0, arg1 = expr.args
r0 = self._expr(arg0)
r1 = self._expr(arg1)
result_size = expr.result_size(self.tyenv)
if r0.data.concrete and r1.data.concrete:
# constants
return RichR(r0.data << r1.data._model_concrete.value,
typevar=typeconsts.int_type(result_size),
type_constraints=None)
r = self.state.top(result_size)
return RichR(r,
typevar=r0.typevar,
)
def _handle_CmpEQ(self, expr):
arg0, arg1 = expr.args
_ = self._expr(arg0)
_ = self._expr(arg1)
return RichR(self.state.top(1))
def _handle_CmpNE(self, expr):
arg0, arg1 = expr.args
_ = self._expr(arg0)
_ = self._expr(arg1)
return RichR(self.state.top(1))
def _handle_CmpLE(self, expr):
arg0, arg1 = expr.args
_ = self._expr(arg0)
_ = self._expr(arg1)
return RichR(self.state.top(1))
def _handle_CmpLT(self, expr):
arg0, arg1 = expr.args
_ = self._expr(arg0)
_ = self._expr(arg1)
return RichR(self.state.top(1))
def _handle_CmpGE(self, expr):
arg0, arg1 = expr.args
_ = self._expr(arg0)
_ = self._expr(arg1)
return RichR(self.state.top(1))
def _handle_CmpGT(self, expr):
arg0, arg1 = expr.args
_ = self._expr(arg0)
_ = self._expr(arg1)
return RichR(self.state.top(1))
| bsd-2-clause | 2,412,318,608,170,448,400 | 30.163158 | 110 | 0.542476 | false |
lesteve/sphinx-gallery | sphinx_gallery/py_source_parser.py | 1 | 6344 | # -*- coding: utf-8 -*-
r"""
Parser for python source files
==============================
"""
# Created Sun Nov 27 14:03:07 2016
# Author: Óscar Nájera
from __future__ import division, absolute_import, print_function
import ast
from distutils.version import LooseVersion
from io import BytesIO
import re
import sys
import tokenize
from textwrap import dedent
from .sphinx_compatibility import getLogger
logger = getLogger('sphinx-gallery')
SYNTAX_ERROR_DOCSTRING = """
SyntaxError
===========
Example script with invalid Python syntax
"""
def parse_source_file(filename):
"""Parse source file into AST node
Parameters
----------
filename : str
File path
Returns
-------
node : AST node
content : utf-8 encoded string
"""
# can't use codecs.open(filename, 'r', 'utf-8') here b/c ast doesn't
# work with unicode strings in Python2.7 "SyntaxError: encoding
# declaration in Unicode string" In python 2.7 the string can't be
# encoded and have information about its encoding. That is particularly
# problematic since source files include in their header information
# about the file encoding.
# Minimal example to fail: ast.parse(u'# -*- coding: utf-8 -*-')
with open(filename, 'rb') as fid:
content = fid.read()
# change from Windows format to UNIX for uniformity
content = content.replace(b'\r\n', b'\n')
try:
node = ast.parse(content)
return node, content.decode('utf-8')
except SyntaxError:
return None, content.decode('utf-8')
def get_docstring_and_rest(filename):
"""Separate ``filename`` content between docstring and the rest
Strongly inspired from ast.get_docstring.
Returns
-------
docstring : str
docstring of ``filename``
rest : str
``filename`` content without the docstring
"""
node, content = parse_source_file(filename)
if node is None:
return SYNTAX_ERROR_DOCSTRING, content, 1
if not isinstance(node, ast.Module):
raise TypeError("This function only supports modules. "
"You provided {0}".format(node.__class__.__name__))
if not (node.body and isinstance(node.body[0], ast.Expr) and
isinstance(node.body[0].value, ast.Str)):
raise ValueError(('Could not find docstring in file "{0}". '
'A docstring is required by sphinx-gallery '
'unless the file is ignored by "ignore_pattern"')
.format(filename))
if LooseVersion(sys.version) >= LooseVersion('3.7'):
docstring = ast.get_docstring(node)
assert docstring is not None # should be guaranteed above
# This is just for backward compat
if len(node.body[0].value.s) and node.body[0].value.s[0] == '\n':
# just for strict backward compat here
docstring = '\n' + docstring
ts = tokenize.tokenize(BytesIO(content.encode()).readline)
# find the first string according to the tokenizer and get its end row
for tk in ts:
if tk.exact_type == 3:
lineno, _ = tk.end
break
else:
lineno = 0
else:
# this block can be removed when python 3.6 support is dropped
docstring_node = node.body[0]
docstring = docstring_node.value.s
# python2.7: Code was read in bytes needs decoding to utf-8
# unless future unicode_literals is imported in source which
# make ast output unicode strings
if hasattr(docstring, 'decode') and not isinstance(docstring, unicode):
docstring = docstring.decode('utf-8')
lineno = docstring_node.lineno # The last line of the string.
# This get the content of the file after the docstring last line
# Note: 'maxsplit' argument is not a keyword argument in python2
rest = '\n'.join(content.split('\n')[lineno:])
lineno += 1
return docstring, rest, lineno
def extract_file_config(content):
"""
Pull out the file-specific config specified in the docstring.
"""
prop_pat = re.compile(
r"^\s*#\s*sphinx_gallery_([A-Za-z0-9_]+)\s*=\s*(.+)\s*$",
re.MULTILINE)
file_conf = {}
for match in re.finditer(prop_pat, content):
name = match.group(1)
value = match.group(2)
try:
value = ast.literal_eval(value)
except (SyntaxError, ValueError):
logger.warning(
'Sphinx-gallery option %s was passed invalid value %s',
name, value)
else:
file_conf[name] = value
return file_conf
def split_code_and_text_blocks(source_file):
"""Return list with source file separated into code and text blocks.
Returns
-------
file_conf : dict
File-specific settings given in source file comments as:
``# sphinx_gallery_<name> = <value>``
blocks : list
(label, content, line_number)
List where each element is a tuple with the label ('text' or 'code'),
the corresponding content string of block and the leading line number
"""
docstring, rest_of_content, lineno = get_docstring_and_rest(source_file)
blocks = [('text', docstring, 1)]
file_conf = extract_file_config(rest_of_content)
pattern = re.compile(
r'(?P<header_line>^#{20,}.*)\s(?P<text_content>(?:^#.*\s)*)',
flags=re.M)
sub_pat = re.compile('^#', flags=re.M)
pos_so_far = 0
for match in re.finditer(pattern, rest_of_content):
code_block_content = rest_of_content[pos_so_far:match.start()]
if code_block_content.strip():
blocks.append(('code', code_block_content, lineno))
lineno += code_block_content.count('\n')
lineno += 1 # Ignored header line of hashes.
text_content = match.group('text_content')
text_block_content = dedent(re.sub(sub_pat, '', text_content)).lstrip()
if text_block_content.strip():
blocks.append(('text', text_block_content, lineno))
lineno += text_content.count('\n')
pos_so_far = match.end()
remaining_content = rest_of_content[pos_so_far:]
if remaining_content.strip():
blocks.append(('code', remaining_content, lineno))
return file_conf, blocks
| bsd-3-clause | -6,063,947,884,348,162,000 | 32.378947 | 79 | 0.613213 | false |
asteroide/immo_spider | src/apiviewer/apiviewer/wsgi.py | 1 | 2573 | from flask import Flask
from flask_restful import Resource, Api, reqparse
from apiviewer import __version__
import json
import importlib
from cobwebs.config import get_config
global_config = get_config()
driver_module = importlib.import_module(global_config['main']['mq_driver'])
mq_driver = driver_module.driver
app = Flask(__name__)
api = Api(app)
# parser = reqparse.RequestParser()
# parser.add_argument('action', type=str, help="Action to be performed on data")
# parser.add_argument('uuid', type=str, help="UUID of the object")
class Root(Resource):
def get(self):
class_objects = (Root, Data, Features)
methods = ("get", "post", "put", "head", "delete", "options")
tree = {}
for co in class_objects:
url = "/" + co.__name__.lower()
if co.__name__.lower() == "root": # nosec (not a hardcoded password)
url = "/"
tree[url] = {}
tree[url]["methods"] = list(filter(lambda x: x in dir(co), methods))
description = co.__doc__ or ""
tree[url]["description"] = description.strip()
return {"version": __version__, "tree": tree}
class Data(Resource):
"""
Endpoint to retrieve, synchronize or delete all data
"""
def get(self, uuid=None):
"""
:param uuid:
:return:
"""
if not uuid:
request = {"action": "list", "data": None}
else:
request = {"action": "get", "data": {"uuid": uuid}}
data = mq_driver.rpc.send("db_driver", json.dumps(request), global_config['main']['mq_host'])
return {'action': "list", "length": len(data), "data": data}
def post(self, uuid=None):
"""
Force synchronising the spider
:param uuid: not used here
:return: JSON object representing the result of the synchronisation
"""
# args = parser.parse_args()
# TODO
return {'uuid': uuid}
def delete(self, uuid):
"""
Delete a data object given its UUID
:param uuid:
:return:
"""
# args = parser.parse_args()
# TODO
return {'uuid': uuid}
class Features(Resource):
"""
Endpoint to only retrieve features
"""
def get(self):
return {'hello': 'world'}
api.add_resource(Root, '/')
api.add_resource(Data, '/data', '/data/', '/data/<string:uuid>')
api.add_resource(Features, '/features')
def main():
app.run(debug=True, host="0.0.0.0", port=4000) # nosec
if __name__ == '__main__':
main()
| apache-2.0 | 7,976,772,508,099,624,000 | 25.802083 | 101 | 0.563156 | false |
sserrot/champion_relationships | venv/Lib/site-packages/jupyter_core/application.py | 1 | 8439 | # encoding: utf-8
"""
A base Application class for Jupyter applications.
All Jupyter applications should inherit from this.
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
from copy import deepcopy
import logging
import os
import sys
try:
# py3
from shutil import which
except ImportError:
from .utils.shutil_which import which
try:
raw_input
except NameError:
# py3
raw_input = input
from traitlets.config.application import Application, catch_config_error
from traitlets.config.loader import ConfigFileNotFound
from traitlets import Unicode, Bool, List, observe
from .utils import ensure_dir_exists
from ipython_genutils import py3compat
from .paths import (
jupyter_config_dir, jupyter_data_dir, jupyter_runtime_dir,
jupyter_path, jupyter_config_path, allow_insecure_writes,
issue_insecure_write_warning
)
# aliases and flags
base_aliases = {
'log-level' : 'Application.log_level',
'config' : 'JupyterApp.config_file',
}
base_flags = {
'debug': ({'Application' : {'log_level' : logging.DEBUG}},
"set log level to logging.DEBUG (maximize logging output)"),
'generate-config': ({'JupyterApp': {'generate_config': True}},
"generate default config file"),
'y': ({'JupyterApp': {'answer_yes': True}},
"Answer yes to any questions instead of prompting."),
}
class NoStart(Exception):
"""Exception to raise when an application shouldn't start"""
class JupyterApp(Application):
"""Base class for Jupyter applications"""
name = 'jupyter' # override in subclasses
description = "A Jupyter Application"
aliases = base_aliases
flags = base_flags
def _log_level_default(self):
return logging.INFO
jupyter_path = List(Unicode())
def _jupyter_path_default(self):
return jupyter_path()
config_dir = Unicode()
def _config_dir_default(self):
return jupyter_config_dir()
@property
def config_file_paths(self):
path = jupyter_config_path()
if self.config_dir not in path:
path.insert(0, self.config_dir)
path.insert(0, py3compat.getcwd())
return path
data_dir = Unicode()
def _data_dir_default(self):
d = jupyter_data_dir()
ensure_dir_exists(d, mode=0o700)
return d
runtime_dir = Unicode()
def _runtime_dir_default(self):
rd = jupyter_runtime_dir()
ensure_dir_exists(rd, mode=0o700)
return rd
@observe('runtime_dir')
def _runtime_dir_changed(self, change):
ensure_dir_exists(change['new'], mode=0o700)
generate_config = Bool(False, config=True,
help="""Generate default config file."""
)
config_file_name = Unicode(config=True,
help="Specify a config file to load."
)
def _config_file_name_default(self):
if not self.name:
return ''
return self.name.replace('-','_') + u'_config'
config_file = Unicode(config=True,
help="""Full path of a config file.""",
)
answer_yes = Bool(False, config=True,
help="""Answer yes to any prompts."""
)
def write_default_config(self):
"""Write our default config to a .py config file"""
if self.config_file:
config_file = self.config_file
else:
config_file = os.path.join(self.config_dir, self.config_file_name + '.py')
if os.path.exists(config_file) and not self.answer_yes:
answer = ''
def ask():
prompt = "Overwrite %s with default config? [y/N]" % config_file
try:
return raw_input(prompt).lower() or 'n'
except KeyboardInterrupt:
print('') # empty line
return 'n'
answer = ask()
while not answer.startswith(('y', 'n')):
print("Please answer 'yes' or 'no'")
answer = ask()
if answer.startswith('n'):
return
config_text = self.generate_config_file()
if isinstance(config_text, bytes):
config_text = config_text.decode('utf8')
print("Writing default config to: %s" % config_file)
ensure_dir_exists(os.path.abspath(os.path.dirname(config_file)), 0o700)
with open(config_file, mode='w') as f:
f.write(config_text)
def migrate_config(self):
"""Migrate config/data from IPython 3"""
if os.path.exists(os.path.join(self.config_dir, 'migrated')):
# already migrated
return
from .migrate import get_ipython_dir, migrate
# No IPython dir, nothing to migrate
if not os.path.exists(get_ipython_dir()):
return
migrate()
def load_config_file(self, suppress_errors=True):
"""Load the config file.
By default, errors in loading config are handled, and a warning
printed on screen. For testing, the suppress_errors option is set
to False, so errors will make tests fail.
"""
self.log.debug("Searching %s for config files", self.config_file_paths)
base_config = 'jupyter_config'
try:
super(JupyterApp, self).load_config_file(
base_config,
path=self.config_file_paths,
)
except ConfigFileNotFound:
# ignore errors loading parent
self.log.debug("Config file %s not found", base_config)
pass
if self.config_file:
path, config_file_name = os.path.split(self.config_file)
else:
path = self.config_file_paths
config_file_name = self.config_file_name
if not config_file_name or (config_file_name == base_config):
return
try:
super(JupyterApp, self).load_config_file(
config_file_name,
path=path
)
except ConfigFileNotFound:
self.log.debug("Config file not found, skipping: %s", config_file_name)
except Exception:
# Reraise errors for testing purposes, or if set in
# self.raise_config_file_errors
if (not suppress_errors) or self.raise_config_file_errors:
raise
self.log.warning("Error loading config file: %s" %
config_file_name, exc_info=True)
# subcommand-related
def _find_subcommand(self, name):
name = '{}-{}'.format(self.name, name)
return which(name)
@property
def _dispatching(self):
"""Return whether we are dispatching to another command
or running ourselves.
"""
return bool(self.generate_config or self.subapp or self.subcommand)
subcommand = Unicode()
@catch_config_error
def initialize(self, argv=None):
# don't hook up crash handler before parsing command-line
if argv is None:
argv = sys.argv[1:]
if argv:
subc = self._find_subcommand(argv[0])
if subc:
self.argv = argv
self.subcommand = subc
return
self.parse_command_line(argv)
cl_config = deepcopy(self.config)
if self._dispatching:
return
self.migrate_config()
self.load_config_file()
# enforce cl-opts override configfile opts:
self.update_config(cl_config)
if allow_insecure_writes:
issue_insecure_write_warning()
def start(self):
"""Start the whole thing"""
if self.subcommand:
os.execv(self.subcommand, [self.subcommand] + self.argv[1:])
raise NoStart()
if self.subapp:
self.subapp.start()
raise NoStart()
if self.generate_config:
self.write_default_config()
raise NoStart()
@classmethod
def launch_instance(cls, argv=None, **kwargs):
"""Launch an instance of a Jupyter Application"""
try:
return super(JupyterApp, cls).launch_instance(argv=argv, **kwargs)
except NoStart:
return
if __name__ == '__main__':
JupyterApp.launch_instance()
| mit | 4,790,358,782,169,673,000 | 29.687273 | 86 | 0.584192 | false |
rjhd2/HadISD_v2 | set_paths_and_vars.py | 1 | 2534 | #!/usr/local/sci/bin/python2.7
#------------------------------------------------------------
# SVN Info
#$Rev:: 84 $: Revision of last commit
#$Author:: rdunn $: Author of last commit
#$Date:: 2015-12-18 16:35:07 +0000 (Fri, 18 Dec 2015) $: Date of last commit
#------------------------------------------------------------
# START
#------------------------------------------------------------
"""
Sets a load of paths and defaults.
Should be the only place to edit these each time around.
"""
import datetime as dt
import os
# File paths to read and use
HADISD_VERSION = "v201_2016p"
PREVIOUS_VERSION = "v200_2015p"
# For /project
ROOT_LOC = "/project/hadobs2/hadisd/{}".format(HADISD_VERSION)
INPUT_FILE_LOCS = "{}/code_{}/input_files/".format(ROOT_LOC, HADISD_VERSION)
# at the moment code and input files all stored on project.
# For SPICE/Slurm
ROOT_LOC = "/scratch/hadobs/{}/".format(HADISD_VERSION)
IMAGE_LOCS = "{}/img_files_{}/".format(ROOT_LOC, HADISD_VERSION)
if not os.path.exists(IMAGE_LOCS): os.mkdir(IMAGE_LOCS)
NETCDF_DATA_LOCS = "{}/netcdf_files_{}/".format(ROOT_LOC, HADISD_VERSION)
if not os.path.exists(NETCDF_DATA_LOCS): os.mkdir(NETCDF_DATA_LOCS)
ISD_DATA_LOCS = "{}/isd_files_{}/".format(ROOT_LOC, HADISD_VERSION)
if not os.path.exists(ISD_DATA_LOCS): os.mkdir(ISD_DATA_LOCS)
LOG_OUTFILE_LOCS = "{}/suppl_files_{}/".format(ROOT_LOC, HADISD_VERSION)
if not os.path.exists(LOG_OUTFILE_LOCS): os.mkdir(LOG_OUTFILE_LOCS)
OLD_ISD_DATA_LOCS = "/project/hadobs2/hadisd/{}/isd_files_{}/".format(PREVIOUS_VERSION, PREVIOUS_VERSION)
OLD_INPUT_FILE_LOCS = "/project/hadobs2/hadisd/{}/code_{}/input_files/".format(PREVIOUS_VERSION, PREVIOUS_VERSION)
# Other settings
DATASTART = dt.datetime(1931,1,1,0,0)
DATAEND = dt.datetime(2017,1,1,0,0)
process_vars = ["temperatures","dewpoints","slp","windspeeds", "winddirs", "total_cloud_cover","low_cloud_cover","mid_cloud_cover","high_cloud_cover"]
carry_thru_vars = ["cloud_base","precip1_depth","precip1_period","wind_gust", "past_sigwx1"]
# print for information each time - enables clearer checking
print "HadISD version: {}".format(HADISD_VERSION)
print "Data location : {}".format(NETCDF_DATA_LOCS)
print "Data range : {} - {}\n".format(dt.datetime.strftime(DATASTART, "%Y-%m-%d"), dt.datetime.strftime(DATAEND, "%Y-%m-%d"))
#------------------------------------------------------------
# END
#------------------------------------------------------------
| bsd-3-clause | -5,510,080,318,746,911,000 | 40.540984 | 150 | 0.59116 | false |
Mushiyo/isso | isso/wsgi.py | 1 | 5736 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import sys
import socket
try:
from urllib.parse import quote, urlparse
from socketserver import ThreadingMixIn
from http.server import HTTPServer
except ImportError:
from urllib import quote
from urlparse import urlparse
from SocketServer import ThreadingMixIn
from BaseHTTPServer import HTTPServer
from werkzeug.serving import WSGIRequestHandler
from werkzeug.wrappers import Request as _Request
from werkzeug.datastructures import Headers
from isso.compat import string_types
def host(environ): # pragma: no cover
"""
Reconstruct host from environment. A modified version
of http://www.python.org/dev/peps/pep-0333/#url-reconstruction
"""
url = environ['wsgi.url_scheme']+'://'
if environ.get('HTTP_HOST'):
url += environ['HTTP_HOST']
else:
url += environ['SERVER_NAME']
if environ['wsgi.url_scheme'] == 'https':
if environ['SERVER_PORT'] != '443':
url += ':' + environ['SERVER_PORT']
else:
if environ['SERVER_PORT'] != '80':
url += ':' + environ['SERVER_PORT']
return url + quote(environ.get('SCRIPT_NAME', ''))
def urlsplit(name):
"""
Parse :param:`name` into (netloc, port, ssl)
"""
if not (isinstance(name, string_types)):
name = str(name)
if not name.startswith(('http://', 'https://')):
name = 'http://' + name
rv = urlparse(name)
if rv.scheme == 'https' and rv.port is None:
return rv.netloc, 443, True
return rv.netloc.rsplit(':')[0], rv.port or 80, rv.scheme == 'https'
def urljoin(netloc, port, ssl):
"""
Basically the counter-part of :func:`urlsplit`.
"""
rv = ("https" if ssl else "http") + "://" + netloc
if ssl and port != 443 or not ssl and port != 80:
rv += ":%i" % port
return rv
def origin(hosts):
"""
Return a function that returns a valid HTTP Origin or localhost
if none found.
"""
hosts = [urlsplit(h) for h in hosts]
def func(environ):
if not hosts:
return "http://invalid.local"
loc = environ.get("HTTP_ORIGIN", environ.get("HTTP_REFERER", None))
if loc is None:
return urljoin(*hosts[0])
for split in hosts:
if urlsplit(loc) == split:
return urljoin(*split)
else:
return urljoin(*hosts[0])
return func
class SubURI(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
script_name = environ.get('HTTP_X_SCRIPT_NAME')
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name):
environ['PATH_INFO'] = path_info[len(script_name):]
return self.app(environ, start_response)
class CORSMiddleware(object):
"""Add Cross-origin resource sharing headers to every request."""
methods = ("HEAD", "GET", "POST", "PUT", "DELETE")
def __init__(self, app, origin, allowed=None, exposed=None):
self.app = app
self.origin = origin
self.allowed = allowed
self.exposed = exposed
def __call__(self, environ, start_response):
def add_cors_headers(status, headers, exc_info=None):
headers = Headers(headers)
headers.add("Access-Control-Allow-Origin", self.origin(environ))
headers.add("Access-Control-Allow-Credentials", "true")
headers.add("Access-Control-Allow-Methods", ", ".join(self.methods))
if self.allowed:
headers.add("Access-Control-Allow-Headers", ", ".join(self.allowed))
if self.exposed:
headers.add("Access-Control-Expose-Headers", ", ".join(self.exposed))
return start_response(status, headers.to_list(), exc_info)
if environ.get("REQUEST_METHOD") == "OPTIONS":
add_cors_headers("200 Ok", [("Content-Type", "text/plain")])
return []
return self.app(environ, add_cors_headers)
class LegacyWerkzeugMiddleware(object):
# Add compatibility with werkzeug 0.8
# -- https://github.com/posativ/isso/pull/170
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
def to_native(x, charset=sys.getdefaultencoding(), errors='strict'):
if x is None or isinstance(x, str):
return x
return x.decode(charset, errors)
def fix_headers(status, headers, exc_info=None):
headers = [(to_native(key), value) for key, value in headers]
return start_response(status, headers, exc_info)
return self.app(environ, fix_headers)
class Request(_Request):
# Assuming UTF-8, comments with 65536 characters would consume
# 128 kb memory. The remaining 128 kb cover additional parameters
# and WSGI headers.
max_content_length = 256 * 1024
class SocketWSGIRequestHandler(WSGIRequestHandler):
def run_wsgi(self):
self.client_address = ("<local>", 0)
super(SocketWSGIRequestHandler, self).run_wsgi()
class SocketHTTPServer(HTTPServer, ThreadingMixIn):
"""
A simple SocketServer to serve werkzeug's WSGIRequesthandler.
"""
multithread = True
multiprocess = False
allow_reuse_address = 1
address_family = socket.AF_UNIX
request_queue_size = 128
def __init__(self, sock, app):
HTTPServer.__init__(self, sock, SocketWSGIRequestHandler)
self.app = app
self.ssl_context = None
self.shutdown_signal = False
| mit | -1,876,850,611,275,330,300 | 26.84466 | 85 | 0.607915 | false |
SurveyMonkey/defrost | tests/test_cli.py | 1 | 3952 | import pytest
from click.testing import CliRunner
@pytest.mark.parametrize("yaml, pipfreeze, output, exit_code", [
('tests/reqs.yml', 'tests/pipfreeze_no_matching_req.txt', '', 0),
('tests/reqs.yml', 'tests/pipfreeze_satisfied_matching_req.txt', '', 0),
('tests/reqs.yml', 'tests/pipfreeze_unsatisfied_matching_req.txt', 'error: Package(foo==0.1) does not satisfy Requirement(foo>=1.0): upgrade now!\n', 1),
('tests/reqs.yml', 'tests/pipfreeze_unsatisfied_reqs_and_contains_links.txt', 'error: Package(foo==0.1) does not satisfy Requirement(foo>=1.0): upgrade now!\n', 1),
('tests/reqs.yml', 'tests/pipfreeze_unsatisfied_reqs_with_severity.txt', 'warn: Package(testseverity==0.1) does not satisfy Requirement(testseverity>=1.0): upgrade now!\n', 0),
])
def test_defrost__cli(yaml, pipfreeze, output, exit_code):
from defrost.cli import defrost
runner = CliRunner()
result = runner.invoke(
defrost, [yaml, pipfreeze], catch_exceptions=False
)
assert result.output == output
assert result.exit_code == exit_code
@pytest.mark.parametrize("exit_mode_option, yaml, pipfreeze, exit_code", [
(exit_mode_option, yaml, pipfreeze, exit_code)
for yaml, pipfreeze, exit_code in [
('tests/reqs.yml', 'tests/pipfreeze_no_matching_req.txt', 0),
('tests/reqs.yml', 'tests/pipfreeze_satisfied_matching_req.txt', 0),
('tests/reqs.yml', 'tests/pipfreeze_unsatisfied_matching_req.txt', 0),
('tests/reqs.yml', 'tests/pipfreeze_unsatisfied_reqs_with_severity.txt', 0),
] for exit_mode_option in ('-x', '--exit-mode')
])
def test_defrost__exit_mode_soft(exit_mode_option, yaml, pipfreeze, exit_code):
from defrost.cli import defrost
runner = CliRunner()
result = runner.invoke(
defrost, [exit_mode_option, 'soft', yaml, pipfreeze], catch_exceptions=False
)
assert result.exit_code == exit_code
@pytest.mark.parametrize("exit_mode_option, yaml, pipfreeze, exit_code", [
(exit_mode_option, yaml, pipfreeze, exit_code)
for yaml, pipfreeze, exit_code in [
('tests/reqs.yml', 'tests/pipfreeze_no_matching_req.txt', 0),
('tests/reqs.yml', 'tests/pipfreeze_satisfied_matching_req.txt', 0),
('tests/reqs.yml', 'tests/pipfreeze_unsatisfied_matching_req.txt', 1),
('tests/reqs.yml', 'tests/pipfreeze_unsatisfied_reqs_with_severity.txt', 0),
] for exit_mode_option in ('-x', '--exit-mode')
])
def test_defrost__exit_mode_normal(exit_mode_option, yaml, pipfreeze, exit_code):
from defrost.cli import defrost
runner = CliRunner()
result = runner.invoke(
defrost, [exit_mode_option, 'normal', yaml, pipfreeze], catch_exceptions=False
)
assert result.exit_code == exit_code
@pytest.mark.parametrize("exit_mode_option, yaml, pipfreeze, exit_code", [
(exit_mode_option, yaml, pipfreeze, exit_code)
for yaml, pipfreeze, exit_code in [
('tests/reqs.yml', 'tests/pipfreeze_no_matching_req.txt', 0),
('tests/reqs.yml', 'tests/pipfreeze_satisfied_matching_req.txt', 0),
('tests/reqs.yml', 'tests/pipfreeze_unsatisfied_matching_req.txt', 1),
('tests/reqs.yml', 'tests/pipfreeze_unsatisfied_reqs_with_severity.txt', 1),
] for exit_mode_option in ('-x', '--exit-mode')
])
def test_defrost__exit_mode_hard(exit_mode_option, yaml, pipfreeze, exit_code):
from defrost.cli import defrost
runner = CliRunner()
result = runner.invoke(
defrost, [exit_mode_option, 'hard', yaml, pipfreeze], catch_exceptions=False
)
assert result.exit_code == exit_code
@pytest.mark.parametrize("yaml, exit_code", [
('tests/reqs.yml', 0),
('tests/reqs_invalid_data.yml', 1),
('tests/reqs_unparsable.yml', 1),
])
def test_defrost_lint(yaml, exit_code):
from defrost.cli import lint
runner = CliRunner()
result = runner.invoke(
lint, [yaml], catch_exceptions=False
)
assert result.exit_code == exit_code
| mit | -5,991,600,713,043,038,000 | 41.956522 | 180 | 0.668775 | false |
kiniou/blender-smooth-slides | tools/lpod/test/test_container.py | 1 | 5651 | # -*- coding: UTF-8 -*-
#
# Copyright (c) 2009 Ars Aperta, Itaapy, Pierlis, Talend.
#
# Authors: Hervé Cauwelier <[email protected]>
# Luis Belmar-Letelier <[email protected]>
# David Versmisse <[email protected]>
#
# This file is part of Lpod (see: http://lpod-project.org).
# Lpod is free software; you can redistribute it and/or modify it under
# the terms of either:
#
# a) the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option)
# any later version.
# Lpod is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Lpod. If not, see <http://www.gnu.org/licenses/>.
#
# b) the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Import from the Standard Library
from unittest import TestCase, main
# Import from lpod
from lpod.container import ODF_EXTENSIONS
from lpod.container import odf_get_container
from lpod.container import odf_new_container_from_type
from lpod.container import odf_new_container_from_template
from lpod.vfs import vfs
class NewContainerFromTemplateTestCase(TestCase):
def test_bad_template(self):
self.assertRaises(ValueError, odf_new_container_from_template,
'../templates/notexisting')
def test_text_template(self):
uri = '../templates/text.ott'
self.assert_(odf_new_container_from_template(uri))
def test_spreadsheet_template(self):
uri = '../templates/spreadsheet.ots'
self.assert_(odf_new_container_from_template(uri))
def test_presentation_template(self):
uri = '../templates/presentation.otp'
self.assert_(odf_new_container_from_template(uri))
def test_drawing_template(self):
uri = '../templates/drawing.otg'
self.assert_(odf_new_container_from_template(uri))
class NewContainerFromTypeTestCase(TestCase):
def test_bad_type(self):
self.assertRaises(ValueError, odf_new_container_from_type,
'foobar')
def test_text_type(self):
self.assert_(odf_new_container_from_type('text'))
def test_spreadsheet_type(self):
self.assert_(odf_new_container_from_type('spreadsheet'))
def test_presentation_type(self):
self.assert_(odf_new_container_from_type('presentation'))
def test_drawing_type(self):
self.assert_(odf_new_container_from_type('drawing'))
class GetContainerTestCase(TestCase):
def test_filesystem(self):
path = 'samples/example.odt'
self.assert_(odf_get_container(path))
def test_odf_xml(self):
path = 'samples/example.xml'
self.assert_(odf_get_container(path))
def test_http(self):
uri = 'http://ftp.lpod-project.org/example.odt'
self.assert_(odf_get_container(uri))
def test_ftp(self):
uri = 'ftp://ftp.lpod-project.org/example.odt'
self.assert_(odf_get_container(uri))
class ContainerTestCase(TestCase):
def test_clone(self):
container = odf_new_container_from_type('text')
clone = container.clone()
self.assertEqual(clone.uri, None)
self.assertNotEqual(clone._odf_container__data, None)
def test_get_part_xml(self):
container = odf_get_container('samples/example.odt')
content = container.get_part('content')
xml_decl = '<?xml version="1.0" encoding="UTF-8"?>'
self.assert_(content.startswith(xml_decl))
def test_get_part_mimetype(self):
container = odf_get_container('samples/example.odt')
mimetype = container.get_part('mimetype')
self.assertEqual(mimetype, ODF_EXTENSIONS['odt'])
def test_odf_xml_bad_part(self):
container = odf_get_container('samples/example.xml')
self.assertRaises(ValueError, container.get_part, 'Pictures/a.jpg')
def test_odf_xml_part_xml(self):
container = odf_get_container('samples/example.xml')
meta = container.get_part('meta')
self.assert_(meta.startswith('<office:document-meta>'))
def test_set_part(self):
container = odf_get_container('samples/example.odt')
path = 'Pictures/a.jpg'
data = 'JFIFIThinkImAnImage'
container.set_part(path, data)
self.assertEqual(container.get_part(path), data)
def test_del_part(self):
container = odf_get_container('samples/example.odt')
# Not a realistic test
path = 'content'
container.del_part(path)
self.assertRaises(ValueError, container.get_part, path)
class ContainerSaveTestCase(TestCase):
def setUp(self):
vfs.make_folder('trash')
def tearDown(self):
vfs.remove('trash')
def test_save_zip(self):
"""TODO: 2 cases
1. from "zip" to "zip"
2. from "flat" to "zip"
"""
container = odf_get_container('samples/example.odt')
container.save('trash/example.odt')
# TODO FINISH ME
# XXX We must implement the flat xml part
def xtest_save_flat(self):
"""TODO: 2 cases
1. from "zip" to "flat"
2. from "flat" to "flat"
"""
raise NotImplementedError
if __name__ == '__main__':
main()
| gpl-3.0 | -1,907,470,718,767,404,300 | 27.974359 | 75 | 0.649735 | false |
lidatong/dataclasses-json | dataclasses_json/utils.py | 1 | 4383 | import inspect
import sys
from datetime import datetime, timezone
from typing import Collection, Mapping, Optional, TypeVar, Any
def _get_type_cons(type_):
"""More spaghetti logic for 3.6 vs. 3.7"""
if sys.version_info.minor == 6:
try:
cons = type_.__extra__
except AttributeError:
try:
cons = type_.__origin__
except AttributeError:
cons = type_
else:
cons = type_ if cons is None else cons
else:
try:
cons = type_.__origin__ if cons is None else cons
except AttributeError:
cons = type_
else:
cons = type_.__origin__
return cons
def _get_type_origin(type_):
"""Some spaghetti logic to accommodate differences between 3.6 and 3.7 in
the typing api"""
try:
origin = type_.__origin__
except AttributeError:
if sys.version_info.minor == 6:
try:
origin = type_.__extra__
except AttributeError:
origin = type_
else:
origin = type_ if origin is None else origin
else:
origin = type_
return origin
def _hasargs(type_, *args):
try:
res = all(arg in type_.__args__ for arg in args)
except AttributeError:
return False
else:
return res
def _isinstance_safe(o, t):
try:
result = isinstance(o, t)
except Exception:
return False
else:
return result
def _issubclass_safe(cls, classinfo):
try:
return issubclass(cls, classinfo)
except Exception:
return (_is_new_type_subclass_safe(cls, classinfo)
if _is_new_type(cls)
else False)
def _is_new_type_subclass_safe(cls, classinfo):
super_type = getattr(cls, "__supertype__", None)
if super_type:
return _is_new_type_subclass_safe(super_type, classinfo)
try:
return issubclass(cls, classinfo)
except Exception:
return False
def _is_new_type(type_):
return inspect.isfunction(type_) and hasattr(type_, "__supertype__")
def _is_optional(type_):
return (_issubclass_safe(type_, Optional) or
_hasargs(type_, type(None)) or
type_ is Any)
def _is_mapping(type_):
return _issubclass_safe(_get_type_origin(type_), Mapping)
def _is_collection(type_):
return _issubclass_safe(_get_type_origin(type_), Collection)
def _is_nonstr_collection(type_):
return (_issubclass_safe(_get_type_origin(type_), Collection)
and not _issubclass_safe(type_, str))
def _timestamp_to_dt_aware(timestamp: float):
tz = datetime.now(timezone.utc).astimezone().tzinfo
dt = datetime.fromtimestamp(timestamp, tz=tz)
return dt
def _undefined_parameter_action_safe(cls):
try:
if cls.dataclass_json_config is None:
return
action_enum = cls.dataclass_json_config['undefined']
except (AttributeError, KeyError):
return
if action_enum is None or action_enum.value is None:
return
return action_enum
def _handle_undefined_parameters_safe(cls, kvs, usage: str):
"""
Checks if an undefined parameters action is defined and performs the
according action.
"""
undefined_parameter_action = _undefined_parameter_action_safe(cls)
usage = usage.lower()
if undefined_parameter_action is None:
return kvs if usage != "init" else cls.__init__
if usage == "from":
return undefined_parameter_action.value.handle_from_dict(cls=cls,
kvs=kvs)
elif usage == "to":
return undefined_parameter_action.value.handle_to_dict(obj=cls,
kvs=kvs)
elif usage == "dump":
return undefined_parameter_action.value.handle_dump(obj=cls)
elif usage == "init":
return undefined_parameter_action.value.create_init(obj=cls)
else:
raise ValueError(
f"usage must be one of ['to', 'from', 'dump', 'init'], "
f"but is '{usage}'")
# Define a type for the CatchAll field
# https://stackoverflow.com/questions/59360567/define-a-custom-type-that-behaves-like-typing-any
CatchAllVar = TypeVar("CatchAllVar", bound=Mapping)
| mit | 3,251,248,470,436,851,000 | 27.096154 | 96 | 0.588182 | false |
simonvh/fluff | tests/test_commands.py | 1 | 4188 | import pytest
import os
import urllib.request
import tarfile
from tempfile import NamedTemporaryFile
from sklearn.metrics import v_measure_score
from fluff.parse import parse_cmds
from fluff.commands.profile import profile
from fluff.commands.heatmap import heatmap
from fluff.commands.bandplot import bandplot
@pytest.fixture
def bamfile():
return "tests/data/H3K4me3.bam"
@pytest.fixture
def bwfile():
return "tests/data/profile.bw"
@pytest.fixture
def bedfile():
return "tests/data/profile.bed"
@pytest.fixture
def regionfile():
return "tests/data/profile_region.bed"
@pytest.fixture
def test_data_from_osf():
fnames = [
"tests/data/big/H1_H3K27ac.bam",
"tests/data/big/H1_H3K27ac.bam.bai",
"tests/data/big/mesenchymal_H3K27ac.bam",
"tests/data/big/mesenchymal_H3K27ac.bam.bai",
"tests/data/big/mesendoderm_H3K27ac.bam",
"tests/data/big/mesendoderm_H3K27ac.bam.bai",
"tests/data/big/neuronal_progenitor_H3K27ac.bam",
"tests/data/big/neuronal_progenitor_H3K27ac.bam.bai",
"tests/data/big/trophoblast_H3K27ac.bam",
"tests/data/big/trophoblast_H3K27ac.bam.bai",
"tests/data/big/peaks.bed",
]
download = False
for fname in fnames:
if not os.path.exists(fname):
download = True
break
if download:
# test data tarball on osf.io
url = "https://osf.io/6yftg/download"
tarball = "tests/data/big/test_data.tgz"
urllib.request.urlretrieve(url, tarball)
with tarfile.open(tarball) as tf:
tf.extractall(path="tests/data/big/")
os.unlink(tarball)
clusters = "tests/data/big/clusters.kmeans.euclidean.5.txt"
return fnames[-1], [f for f in fnames if f[-3:] == "bam"], clusters
def test_profile(bamfile):
# Only tests of the command runs successfully,
# doesnt't check the image
with NamedTemporaryFile(prefix="fluff.", suffix=".png") as tmp:
args = ["profile",
"-i", "scaffold_1:44749422-44750067",
"-d", bamfile,
"-o", tmp.name]
args = parse_cmds().parse_args(args)
profile(args)
def test_heatmap(bamfile, bedfile, bwfile, regionfile):
# Only tests of the command runs successfully,
# doesnt't check the image
with NamedTemporaryFile(prefix="fluff.", suffix=".png") as tmp:
args = ["heatmap",
"-f", regionfile,
"-d", bamfile, bwfile, bedfile,
"-o", tmp.name]
args = parse_cmds().parse_args(args)
heatmap(args)
def test_plots_big(test_data_from_osf):
peaks, bamfiles, clusters = test_data_from_osf
with NamedTemporaryFile(prefix="fluff.", suffix=".png") as f:
args = [
"heatmap",
'-f', peaks,
"-d", *bamfiles,
"-o", f.name,
"-C", "kmeans",
"-k", "5",
]
args = parse_cmds().parse_args(args)
heatmap(args)
# Reading current clusters
fcluster = f.name + "_clusters.bed"
pred_clusters = []
for line in open(fcluster):
vals = line.strip().split("\t")
pred_clusters.append([f"{vals[0]}:{vals[1]}-{vals[2]}", vals[4]])
# Reading reference clusters
cmp_clusters = []
for line in open(clusters):
vals = line.strip().split("\t")
cmp_clusters.append(vals)
# sort by region name
cmp_clusters = [x[1] for x in sorted(cmp_clusters)]
pred_clusters = [x[1] for x in sorted(pred_clusters)]
v = v_measure_score(cmp_clusters, pred_clusters)
assert v > 0.25
# Test bandplot
args = [
"bandplot",
'-f', fcluster,
"-d", *bamfiles,
"-o", f.name,
]
args = parse_cmds().parse_args(args)
bandplot(args)
tmpnames = [
f.name + "_clusters.bed",
f.name + "_readCounts.txt",
]
for name in tmpnames:
os.unlink(name)
| mit | -5,502,848,502,794,335,000 | 29.794118 | 77 | 0.567813 | false |
tbursztyka/python-elf | setup.py | 1 | 1070 | ##############
# Setup File #
##############
"""
python-elf - A python library to manipulate ELF format
Copyright (C) 2008 Tomasz Bursztyka
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from distutils.core import setup, Extension
elf_packages = ['elf', 'elf.core']
setup (name = 'elf',
version = '0.1',
description = 'python-elf',
packages = elf_packages,
author = "Tomasz Bursztyka"
)
#######
# EOF #
#######
| lgpl-3.0 | 4,951,030,473,274,327,000 | 28.722222 | 73 | 0.663551 | false |
toabctl/contrail-sandesh | library/python/pysandesh/sandesh_stats.py | 1 | 10781 | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
#
# sandesh_stats.py
#
from pysandesh.sandesh_base import Sandesh
from pysandesh.gen_py.sandesh_uve.ttypes import SandeshMessageStats
from pysandesh.gen_py.sandesh.ttypes import SandeshTxDropReason, \
SandeshRxDropReason
class SandeshMessageStatistics(object):
def __init__(self):
self._message_type_stats = {}
self._aggregate_stats = SandeshMessageStats()
# end __init__
def message_type_stats(self):
return self._message_type_stats
# end message_type_stats
def aggregate_stats(self):
return self._aggregate_stats
# end aggregate_stats
def update_tx_stats(self, message_type, nbytes,
drop_reason=SandeshTxDropReason.NoDrop):
if SandeshTxDropReason.MinDropReason < drop_reason < \
SandeshTxDropReason.MaxDropReason:
try:
message_stats = self._message_type_stats[message_type]
except KeyError:
message_stats = SandeshMessageStats()
self._message_type_stats[message_type] = message_stats
finally:
self._update_tx_stats_internal(message_stats, nbytes,
drop_reason)
self._update_tx_stats_internal(self._aggregate_stats, nbytes,
drop_reason)
return True
return False
# end update_tx_stats
def update_rx_stats(self, message_type, nbytes,
drop_reason=SandeshRxDropReason.NoDrop):
if SandeshRxDropReason.MinDropReason < drop_reason < \
SandeshRxDropReason.MaxDropReason:
try:
message_stats = self._message_type_stats[message_type]
except KeyError:
message_stats = SandeshMessageStats()
self._message_type_stats[message_type] = message_stats
finally:
self._update_rx_stats_internal(message_stats, nbytes,
drop_reason)
self._update_rx_stats_internal(self._aggregate_stats, nbytes,
drop_reason)
return True
return False
# end update_rx_stats
def _update_tx_stats_internal(self, msg_stats, nbytes, drop_reason):
if drop_reason is SandeshTxDropReason.NoDrop:
try:
msg_stats.messages_sent += 1
msg_stats.bytes_sent += nbytes
except TypeError:
msg_stats.messages_sent = 1
msg_stats.bytes_sent = nbytes
else:
if msg_stats.messages_sent_dropped:
msg_stats.messages_sent_dropped += 1
msg_stats.bytes_sent_dropped += nbytes
else:
msg_stats.messages_sent_dropped = 1
msg_stats.bytes_sent_dropped = nbytes
if drop_reason is SandeshTxDropReason.ValidationFailed:
if msg_stats.messages_sent_dropped_validation_failed:
msg_stats.messages_sent_dropped_validation_failed += 1
msg_stats.bytes_sent_dropped_validation_failed += nbytes
else:
msg_stats.messages_sent_dropped_validation_failed = 1
msg_stats.bytes_sent_dropped_validation_failed = nbytes
elif drop_reason is SandeshTxDropReason.QueueLevel:
if msg_stats.messages_sent_dropped_queue_level:
msg_stats.messages_sent_dropped_queue_level += 1
msg_stats.bytes_sent_dropped_queue_level += nbytes
else:
msg_stats.messages_sent_dropped_queue_level = 1
msg_stats.bytes_sent_dropped_queue_level = nbytes
elif drop_reason is SandeshTxDropReason.NoClient:
if msg_stats.messages_sent_dropped_no_client:
msg_stats.messages_sent_dropped_no_client += 1
msg_stats.bytes_sent_dropped_no_client += nbytes
else:
msg_stats.messages_sent_dropped_no_client = 1
msg_stats.bytes_sent_dropped_no_client = nbytes
elif drop_reason is SandeshTxDropReason.NoSession:
if msg_stats.messages_sent_dropped_no_session:
msg_stats.messages_sent_dropped_no_session += 1
msg_stats.bytes_sent_dropped_no_session += nbytes
else:
msg_stats.messages_sent_dropped_no_session = 1
msg_stats.bytes_sent_dropped_no_session = nbytes
elif drop_reason is SandeshTxDropReason.NoQueue:
if msg_stats.messages_sent_dropped_no_queue:
msg_stats.messages_sent_dropped_no_queue += 1
msg_stats.bytes_sent_dropped_no_queue += nbytes
else:
msg_stats.messages_sent_dropped_no_queue = 1
msg_stats.bytes_sent_dropped_no_queue = nbytes
elif drop_reason is SandeshTxDropReason.ClientSendFailed:
if msg_stats.messages_sent_dropped_client_send_failed:
msg_stats.messages_sent_dropped_client_send_failed += 1
msg_stats.bytes_sent_dropped_client_send_failed += nbytes
else:
msg_stats.messages_sent_dropped_client_send_failed = 1
msg_stats.bytes_sent_dropped_client_send_failed = nbytes
elif drop_reason is SandeshTxDropReason.HeaderWriteFailed:
if msg_stats.messages_sent_dropped_header_write_failed:
msg_stats.messages_sent_dropped_header_write_failed += 1
msg_stats.bytes_sent_dropped_header_write_failed += nbytes
else:
msg_stats.messages_sent_dropped_header_write_failed = 1
msg_stats.bytes_sent_dropped_header_write_failed = nbytes
elif drop_reason is SandeshTxDropReason.WriteFailed:
if msg_stats.messages_sent_dropped_write_failed:
msg_stats.messages_sent_dropped_write_failed += 1
msg_stats.bytes_sent_dropped_write_failed += nbytes
else:
msg_stats.messages_sent_dropped_write_failed = 1
msg_stats.bytes_sent_dropped_write_failed = nbytes
elif drop_reason is SandeshTxDropReason.SessionNotConnected:
if msg_stats.messages_sent_dropped_session_not_connected:
msg_stats.messages_sent_dropped_session_not_connected += 1
msg_stats.bytes_sent_dropped_session_not_connected += nbytes
else:
msg_stats.messages_sent_dropped_session_not_connected = 1
msg_stats.bytes_sent_dropped_session_not_connected = nbytes
elif drop_reason is SandeshTxDropReason.WrongClientSMState:
if msg_stats.messages_sent_dropped_wrong_client_sm_state:
msg_stats.messages_sent_dropped_wrong_client_sm_state += 1
msg_stats.bytes_sent_dropped_wrong_client_sm_state += nbytes
else:
msg_stats.messages_sent_dropped_wrong_client_sm_state = 1
msg_stats.bytes_sent_dropped_wrong_client_sm_state = nbytes
else:
assert 0, 'Unhandled Tx drop reason <%s>' % (str(drop_reason))
# end _update_tx_stats_internal
def _update_rx_stats_internal(self, msg_stats, nbytes, drop_reason):
if drop_reason is SandeshRxDropReason.NoDrop:
if msg_stats.messages_received:
msg_stats.messages_received += 1
msg_stats.bytes_received += nbytes
else:
msg_stats.messages_received = 1
msg_stats.bytes_received = nbytes
else:
if msg_stats.messages_received_dropped:
msg_stats.messages_received_dropped += 1
msg_stats.bytes_received_dropped += nbytes
else:
msg_stats.messages_received_dropped = 1
msg_stats.bytes_received_dropped = nbytes
if drop_reason is SandeshRxDropReason.QueueLevel:
if msg_stats.messages_received_dropped_queue_level:
msg_stats.messages_received_dropped_queue_level += 1
msg_stats.bytes_received_dropped_queue_level += nbytes
else:
msg_stats.messages_received_dropped_queue_level = 1
msg_stats.bytes_received_dropped_queue_level = nbytes
elif drop_reason is SandeshRxDropReason.NoQueue:
if msg_stats.messages_received_dropped_no_queue:
msg_stats.messages_received_dropped_no_queue += 1
msg_stats.bytes_received_dropped_no_queue += nbytes
else:
msg_stats.messages_received_dropped_no_queue = 1
msg_stats.bytes_received_dropped_no_queue = nbytes
elif drop_reason is SandeshRxDropReason.ControlMsgFailed:
if msg_stats.messages_received_dropped_control_msg_failed:
msg_stats.messages_received_dropped_control_msg_failed += 1
msg_stats.bytes_received_dropped_control_msg_failed += nbytes
else:
msg_stats.messages_received_dropped_control_msg_failed = 1
msg_stats.bytes_received_dropped_control_msg_failed = nbytes
elif drop_reason is SandeshRxDropReason.CreateFailed:
if msg_stats.messages_received_dropped_create_failed:
msg_stats.messages_received_dropped_create_failed += 1
msg_stats.bytes_received_dropped_create_failed += nbytes
else:
msg_stats.messages_received_dropped_create_failed = 1
msg_stats.bytes_received_dropped_create_failed = nbytes
elif drop_reason is SandeshRxDropReason.DecodingFailed:
if msg_stats.messages_received_dropped_decoding_failed:
msg_stats.messages_received_dropped_decoding_failed += 1
msg_stats.bytes_received_dropped_decoding_failed += nbytes
else:
msg_stats.messages_received_dropped_decoding_failed = 1
msg_stats.bytes_received_dropped_decoding_failed = nbytes
else:
assert 0, 'Unhandled Rx drop reason <%s>' % (str(drop_reason))
# end _update_rx_stats_internal
# end class SandeshMessageStatistics
| apache-2.0 | 7,344,674,149,269,573,000 | 50.831731 | 81 | 0.581208 | false |
zasdfgbnm/qutip | qutip/control/grape.py | 1 | 19982 | # This file is part of QuTiP: Quantum Toolbox in Python.
#
# Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names
# of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
"""
This module contains functions that implement the GRAPE algorithm for
calculating pulse sequences for quantum systems.
"""
__all__ = ['plot_grape_control_fields',
'grape_unitary', 'cy_grape_unitary', 'grape_unitary_adaptive']
import warnings
import time
import numpy as np
from scipy.interpolate import interp1d
import scipy.sparse as sp
from qutip.qobj import Qobj
from qutip.ui.progressbar import BaseProgressBar
from qutip.control.cy_grape import cy_overlap, cy_grape_inner
from qutip.qip.gates import gate_sequence_product
import qutip.logging
logger = qutip.logging.get_logger()
class GRAPEResult:
"""
Class for representing the result of a GRAPE simulation.
Attributes
----------
u : array
GRAPE control pulse matrix.
H_t : time-dependent Hamiltonian
The time-dependent Hamiltonian that realize the GRAPE pulse sequence.
U_f : Qobj
The final unitary transformation that is realized by the evolution
of the system with the GRAPE generated pulse sequences.
"""
def __init__(self, u=None, H_t=None, U_f=None):
self.u = u
self.H_t = H_t
self.U_f = U_f
def plot_grape_control_fields(times, u, labels, uniform_axes=False):
"""
Plot a series of plots showing the GRAPE control fields given in the
given control pulse matrix u.
Parameters
----------
times : array
Time coordinate array.
u : array
Control pulse matrix.
labels : list
List of labels for each control pulse sequence in the control pulse
matrix.
uniform_axes : bool
Whether or not to plot all pulse sequences using the same y-axis scale.
"""
import matplotlib.pyplot as plt
R, J, M = u.shape
fig, axes = plt.subplots(J, 1, figsize=(8, 2 * J), squeeze=False)
y_max = abs(u).max()
for r in range(R):
for j in range(J):
if r == R - 1:
lw, lc, alpha = 2.0, 'k', 1.0
axes[j, 0].set_ylabel(labels[j], fontsize=18)
axes[j, 0].set_xlabel(r'$t$', fontsize=18)
axes[j, 0].set_xlim(0, times[-1])
else:
lw, lc, alpha = 0.5, 'b', 0.25
axes[j, 0].step(times, u[r, j, :], lw=lw, color=lc, alpha=alpha)
if uniform_axes:
axes[j, 0].set_ylim(-y_max, y_max)
fig.tight_layout()
return fig, axes
def _overlap(A, B):
return (A.dag() * B).tr() / A.shape[0]
# return cy_overlap(A.data, B.data)
def grape_unitary(U, H0, H_ops, R, times, eps=None, u_start=None,
u_limits=None, interp_kind='linear', use_interp=False,
alpha=None, beta=None, phase_sensitive=True,
progress_bar=BaseProgressBar()):
"""
Calculate control pulses for the Hamiltonian operators in H_ops so that the
unitary U is realized.
Experimental: Work in progress.
Parameters
----------
U : Qobj
Target unitary evolution operator.
H0 : Qobj
Static Hamiltonian (that cannot be tuned by the control fields).
H_ops: list of Qobj
A list of operators that can be tuned in the Hamiltonian via the
control fields.
R : int
Number of GRAPE iterations.
time : array / list
Array of time coordinates for control pulse evalutation.
u_start : array
Optional array with initial control pulse values.
Returns
-------
Instance of GRAPEResult, which contains the control pulses calculated
with GRAPE, a time-dependent Hamiltonian that is defined by the
control pulses, as well as the resulting propagator.
"""
if eps is None:
eps = 0.1 * (2 * np.pi) / (times[-1])
M = len(times)
J = len(H_ops)
u = np.zeros((R, J, M))
if u_limits and len(u_limits) != 2:
raise ValueError("u_limits must be a list with two values")
if u_limits:
warnings.warn("Caution: Using experimental feature u_limits")
if u_limits and u_start:
# make sure that no values in u0 violates the u_limits conditions
u_start = np.array(u_start)
u_start[u_start < u_limits[0]] = u_limits[0]
u_start[u_start > u_limits[1]] = u_limits[1]
if u_start is not None:
for idx, u0 in enumerate(u_start):
u[0, idx, :] = u0
if beta:
warnings.warn("Causion: Using experimental feature time-penalty")
progress_bar.start(R)
for r in range(R - 1):
progress_bar.update(r)
dt = times[1] - times[0]
if use_interp:
ip_funcs = [interp1d(times, u[r, j, :], kind=interp_kind,
bounds_error=False, fill_value=u[r, j, -1])
for j in range(J)]
def _H_t(t, args=None):
return H0 + sum([float(ip_funcs[j](t)) * H_ops[j]
for j in range(J)])
U_list = [(-1j * _H_t(times[idx]) * dt).expm()
for idx in range(M-1)]
else:
def _H_idx(idx):
return H0 + sum([u[r, j, idx] * H_ops[j] for j in range(J)])
U_list = [(-1j * _H_idx(idx) * dt).expm() for idx in range(M-1)]
U_f_list = []
U_b_list = []
U_f = 1
U_b = 1
for n in range(M - 1):
U_f = U_list[n] * U_f
U_f_list.append(U_f)
U_b_list.insert(0, U_b)
U_b = U_list[M - 2 - n].dag() * U_b
for j in range(J):
for m in range(M-1):
P = U_b_list[m] * U
Q = 1j * dt * H_ops[j] * U_f_list[m]
if phase_sensitive:
du = - _overlap(P, Q)
else:
du = - 2 * _overlap(P, Q) * _overlap(U_f_list[m], P)
if alpha:
# penalty term for high power control signals u
du += -2 * alpha * u[r, j, m] * dt
if beta:
# penalty term for late control signals u
du += -2 * beta * m * u[r, j, m] * dt
u[r + 1, j, m] = u[r, j, m] + eps * du.real
if u_limits:
if u[r + 1, j, m] < u_limits[0]:
u[r + 1, j, m] = u_limits[0]
elif u[r + 1, j, m] > u_limits[1]:
u[r + 1, j, m] = u_limits[1]
u[r + 1, j, -1] = u[r + 1, j, -2]
if use_interp:
ip_funcs = [interp1d(times, u[R - 1, j, :], kind=interp_kind,
bounds_error=False, fill_value=u[R - 1, j, -1])
for j in range(J)]
H_td_func = [H0] + [[H_ops[j], lambda t, args, j=j: ip_funcs[j](t)]
for j in range(J)]
else:
H_td_func = [H0] + [[H_ops[j], u[-1, j, :]] for j in range(J)]
progress_bar.finished()
# return U_f_list[-1], H_td_func, u
return GRAPEResult(u=u, U_f=U_f_list[-1], H_t=H_td_func)
def cy_grape_unitary(U, H0, H_ops, R, times, eps=None, u_start=None,
u_limits=None, interp_kind='linear', use_interp=False,
alpha=None, beta=None, phase_sensitive=True,
progress_bar=BaseProgressBar()):
"""
Calculate control pulses for the Hamitonian operators in H_ops so that the
unitary U is realized.
Experimental: Work in progress.
Parameters
----------
U : Qobj
Target unitary evolution operator.
H0 : Qobj
Static Hamiltonian (that cannot be tuned by the control fields).
H_ops: list of Qobj
A list of operators that can be tuned in the Hamiltonian via the
control fields.
R : int
Number of GRAPE iterations.
time : array / list
Array of time coordinates for control pulse evalutation.
u_start : array
Optional array with initial control pulse values.
Returns
-------
Instance of GRAPEResult, which contains the control pulses calculated
with GRAPE, a time-dependent Hamiltonian that is defined by the
control pulses, as well as the resulting propagator.
"""
if eps is None:
eps = 0.1 * (2 * np.pi) / (times[-1])
M = len(times)
J = len(H_ops)
u = np.zeros((R, J, M))
H_ops_data = [H_op.data for H_op in H_ops]
if u_limits and len(u_limits) != 2:
raise ValueError("u_limits must be a list with two values")
if u_limits:
warnings.warn("Causion: Using experimental feature u_limits")
if u_limits and u_start:
# make sure that no values in u0 violates the u_limits conditions
u_start = np.array(u_start)
u_start[u_start < u_limits[0]] = u_limits[0]
u_start[u_start > u_limits[1]] = u_limits[1]
if u_limits:
use_u_limits = 1
u_min = u_limits[0]
u_max = u_limits[1]
else:
use_u_limits = 0
u_min = 0.0
u_max = 0.0
if u_start is not None:
for idx, u0 in enumerate(u_start):
u[0, idx, :] = u0
if beta:
warnings.warn("Causion: Using experimental feature time-penalty")
alpha_val = alpha if alpha else 0.0
beta_val = beta if beta else 0.0
progress_bar.start(R)
for r in range(R - 1):
progress_bar.update(r)
dt = times[1] - times[0]
if use_interp:
ip_funcs = [interp1d(times, u[r, j, :], kind=interp_kind,
bounds_error=False, fill_value=u[r, j, -1])
for j in range(J)]
def _H_t(t, args=None):
return H0 + sum([float(ip_funcs[j](t)) * H_ops[j]
for j in range(J)])
U_list = [(-1j * _H_t(times[idx]) * dt).expm().data
for idx in range(M-1)]
else:
def _H_idx(idx):
return H0 + sum([u[r, j, idx] * H_ops[j] for j in range(J)])
U_list = [(-1j * _H_idx(idx) * dt).expm().data
for idx in range(M-1)]
U_f_list = []
U_b_list = []
U_f = 1
U_b = sp.eye(*(U.shape))
for n in range(M - 1):
U_f = U_list[n] * U_f
U_f_list.append(U_f)
U_b_list.insert(0, U_b)
U_b = U_list[M - 2 - n].T.conj().tocsr() * U_b
cy_grape_inner(U.data, u, r, J, M, U_b_list, U_f_list, H_ops_data,
dt, eps, alpha_val, beta_val, phase_sensitive,
use_u_limits, u_min, u_max)
if use_interp:
ip_funcs = [interp1d(times, u[R - 1, j, :], kind=interp_kind,
bounds_error=False, fill_value=u[R - 1, j, -1])
for j in range(J)]
H_td_func = [H0] + [[H_ops[j], lambda t, args, j=j: ip_funcs[j](t)]
for j in range(J)]
else:
H_td_func = [H0] + [[H_ops[j], u[-1, j, :]] for j in range(J)]
progress_bar.finished()
return GRAPEResult(u=u, U_f=Qobj(U_f_list[-1], dims=U.dims),
H_t=H_td_func)
def grape_unitary_adaptive(U, H0, H_ops, R, times, eps=None, u_start=None,
u_limits=None, interp_kind='linear',
use_interp=False, alpha=None, beta=None,
phase_sensitive=False, overlap_terminate=1.0,
progress_bar=BaseProgressBar()):
"""
Calculate control pulses for the Hamiltonian operators in H_ops so that
the unitary U is realized.
Experimental: Work in progress.
Parameters
----------
U : Qobj
Target unitary evolution operator.
H0 : Qobj
Static Hamiltonian (that cannot be tuned by the control fields).
H_ops: list of Qobj
A list of operators that can be tuned in the Hamiltonian via the
control fields.
R : int
Number of GRAPE iterations.
time : array / list
Array of time coordinates for control pulse evalutation.
u_start : array
Optional array with initial control pulse values.
Returns
-------
Instance of GRAPEResult, which contains the control pulses calculated
with GRAPE, a time-dependent Hamiltonian that is defined by the
control pulses, as well as the resulting propagator.
"""
if eps is None:
eps = 0.1 * (2 * np.pi) / (times[-1])
eps_vec = np.array([eps / 2, eps, 2 * eps])
eps_log = np.zeros(R)
overlap_log = np.zeros(R)
best_k = 0
_k_overlap = np.array([0.0, 0.0, 0.0])
M = len(times)
J = len(H_ops)
K = len(eps_vec)
Uf = [None for _ in range(K)]
u = np.zeros((R, J, M, K))
if u_limits and len(u_limits) != 2:
raise ValueError("u_limits must be a list with two values")
if u_limits:
warnings.warn("Causion: Using experimental feature u_limits")
if u_limits and u_start:
# make sure that no values in u0 violates the u_limits conditions
u_start = np.array(u_start)
u_start[u_start < u_limits[0]] = u_limits[0]
u_start[u_start > u_limits[1]] = u_limits[1]
if u_start is not None:
for idx, u0 in enumerate(u_start):
for k in range(K):
u[0, idx, :, k] = u0
if beta:
warnings.warn("Causion: Using experimental feature time-penalty")
if phase_sensitive:
_fidelity_function = lambda x: x
else:
_fidelity_function = lambda x: abs(x) ** 2
best_k = 1
_r = 0
_prev_overlap = 0
progress_bar.start(R)
for r in range(R - 1):
progress_bar.update(r)
_r = r
eps_log[r] = eps_vec[best_k]
logger.debug("eps_vec: {}".format(eps_vec))
_t0 = time.time()
dt = times[1] - times[0]
if use_interp:
ip_funcs = [interp1d(times, u[r, j, :, best_k], kind=interp_kind,
bounds_error=False,
fill_value=u[r, j, -1, best_k])
for j in range(J)]
def _H_t(t, args=None):
return H0 + sum([float(ip_funcs[j](t)) * H_ops[j]
for j in range(J)])
U_list = [(-1j * _H_t(times[idx]) * dt).expm()
for idx in range(M-1)]
else:
def _H_idx(idx):
return H0 + sum([u[r, j, idx, best_k] * H_ops[j]
for j in range(J)])
U_list = [(-1j * _H_idx(idx) * dt).expm() for idx in range(M-1)]
logger.debug("Time 1: %fs" % (time.time() - _t0))
_t0 = time.time()
U_f_list = []
U_b_list = []
U_f = 1
U_b = 1
for m in range(M - 1):
U_f = U_list[m] * U_f
U_f_list.append(U_f)
U_b_list.insert(0, U_b)
U_b = U_list[M - 2 - m].dag() * U_b
logger.debug("Time 2: %fs" % (time.time() - _t0))
_t0 = time.time()
for j in range(J):
for m in range(M-1):
P = U_b_list[m] * U
Q = 1j * dt * H_ops[j] * U_f_list[m]
if phase_sensitive:
du = - cy_overlap(P.data, Q.data)
else:
du = (- 2 * cy_overlap(P.data, Q.data) *
cy_overlap(U_f_list[m].data, P.data))
if alpha:
# penalty term for high power control signals u
du += -2 * alpha * u[r, j, m, best_k] * dt
if beta:
# penalty term for late control signals u
du += -2 * beta * k ** 2 * u[r, j, k] * dt
for k, eps_val in enumerate(eps_vec):
u[r + 1, j, m, k] = u[r, j, m, k] + eps_val * du.real
if u_limits:
if u[r + 1, j, m, k] < u_limits[0]:
u[r + 1, j, m, k] = u_limits[0]
elif u[r + 1, j, m, k] > u_limits[1]:
u[r + 1, j, m, k] = u_limits[1]
u[r + 1, j, -1, :] = u[r + 1, j, -2, :]
logger.debug("Time 3: %fs" % (time.time() - _t0))
_t0 = time.time()
for k, eps_val in enumerate(eps_vec):
def _H_idx(idx):
return H0 + sum([u[r + 1, j, idx, k] * H_ops[j]
for j in range(J)])
U_list = [(-1j * _H_idx(idx) * dt).expm() for idx in range(M-1)]
Uf[k] = gate_sequence_product(U_list)
_k_overlap[k] = _fidelity_function(cy_overlap(Uf[k].data,
U.data)).real
best_k = np.argmax(_k_overlap)
logger.debug("k_overlap: ", _k_overlap, best_k)
if _prev_overlap > _k_overlap[best_k]:
logger.debug("Regression, stepping back with smaller eps.")
u[r + 1, :, :, :] = u[r, :, :, :]
eps_vec /= 2
else:
if best_k == 0:
eps_vec /= 2
elif best_k == 2:
eps_vec *= 2
_prev_overlap = _k_overlap[best_k]
overlap_log[r] = _k_overlap[best_k]
if overlap_terminate < 1.0:
if _k_overlap[best_k] > overlap_terminate:
logger.info("Reached target fidelity, terminating.")
break
logger.debug("Time 4: %fs" % (time.time() - _t0))
_t0 = time.time()
if use_interp:
ip_funcs = [interp1d(times, u[_r, j, :, best_k], kind=interp_kind,
bounds_error=False, fill_value=u[R - 1, j, -1])
for j in range(J)]
H_td_func = [H0] + [[H_ops[j], lambda t, args, j=j: ip_funcs[j](t)]
for j in range(J)]
else:
H_td_func = [H0] + [[H_ops[j], u[_r, j, :, best_k]] for j in range(J)]
progress_bar.finished()
result = GRAPEResult(u=u[:_r, :, :, best_k], U_f=Uf[best_k],
H_t=H_td_func)
result.eps = eps_log
result.overlap = overlap_log
return result
| bsd-3-clause | 3,366,839,895,533,289,500 | 29.931889 | 79 | 0.517015 | false |
maximx1/zfscan | zfscan/commands/CommandLineStatements.py | 1 | 1075 | class CommandLineStatements:
"""Batch of command line statements surrounding zfs"""
"""Checks the disks in the system"""
check_disks = "ls -la /dev/disk/by-id"
"""
zpool command to check status
{0} = pool
"""
zpool_status = "sudo zpool status {0}"
"""
zpool command to turn off drive access
{0} = pool
{1} = drive
"""
zpool_offline = "sudo zpool offline {0} {1}"
"""
zpool command to turn on drive access
{0} = pool
{1} = drive
"""
zpool_online = "sudo zpool online {0} {1}"
"""
zpool replace command
{0} = pool
{1} = old
{2} = new
"""
zpool_replace = "sudo zpool replace {0} {1} {2} -f"
"""zpool import command to show available imports"""
zpool_import_list = "sudo zpool import"
"""
zpool import command
{0} = importable pool
"""
zpool_import = "sudo zpool import {0}"
"""zfs list command"""
zfs_list = "sudo zfs list"
"""Disk usage"""
disk_usage = "df -h" | mit | -2,060,873,528,293,840,100 | 21.416667 | 58 | 0.525581 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.