repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
junion-org/junion | junion/twitter/util.py | 1 | 6239 | #!/usr/bin/env python
# coding: utf-8
"""
Twitter関連のユーティリティモジュール
"""
import re
import time
import calendar
import HTMLParser
# html special charsをアンエスケープするためのパーサ
parser = HTMLParser.HTMLParser()
def get_text_and_entities(tw):
"""
ツイートを渡すと,エンティティ除外文書とエンティティリストを返す
"""
if 'entities' in tw:
return _get_text_and_entities_ent(tw)
else:
return _get_text_and_entities_reg(tw)
def _get_text_and_entities_ent(tw):
"""
entities情報を用いて,エンティティ除外文書とエンティティリストを返す
URLが存在するにも関わらず,entities情報にはないということがあったので
正規表現もあわせて用いて抽出を行う
"""
raw_text = tw['text']
entities = tw['entities']
indices = []
urls = []
mentions = []
hashtags = []
# entitiesを取得
if 'urls' in entities:
for url in entities['urls']:
urls.append(url['url'])
indices.append(url['indices'])
if 'user_mentions' in entities:
for mention in entities['user_mentions']:
mentions.append(mention['screen_name'])
indices.append(mention['indices'])
if 'hashtags' in entities:
for hashtag in entities['hashtags']:
hashtags.append(hashtag['text'])
indices.append(hashtag['indices'])
# textからentitiesを除外
cur = 0
text = ''
for i, indice in enumerate(sorted(indices, key=lambda x:x[0])):
text += raw_text[cur:indice[0]]
cur = indice[1]
text += raw_text[cur:]
# 正規表現を用いてentitiesを抽出
text, ent_reg = _get_text_and_entities_reg({'text': text})
if 'urls' in ent_reg:
urls += ent_reg['urls']
if 'mentions' in ent_reg:
mentions += ent_reg['mentions']
if 'hashtags' in ent_reg:
hashtags += ent_reg['hashtags']
# entitiesの保存
entities = {}
if urls:
entities['urls'] = urls
if mentions:
entities['mentions'] = mentions
if hashtags:
entities['hashtags'] = hashtags
return text, entities
def _get_text_and_entities_reg(tw):
"""
正規表現を用いて,エンティティ除外文書とエンティティリストを返す
"""
text = tw['text'] if tw['text'] else ''
urls = get_urls(text)
mentions = get_mentions(text)
hashtags = get_hashtags(text)
entities = urls + mentions + hashtags
for entity in entities:
text = text.replace(entity, '')
entities = {}
if urls:
entities['urls'] = urls
if mentions:
entities['mentions'] = mentions
if hashtags:
entities['hashtags'] = hashtags
return unescape(text), entities
def get_urls(s):
"""
文字列からURLを抽出して返す
URLに使用可能な文字は以下を参照
http://tools.ietf.org/html/rfc2396
http://jbpe.tripod.com/rfcj/rfc2396.ej.sjis.txt(日本語訳)
正規表現の特殊文字は以下を参照
http://www.python.jp/doc/release/library/re.html#module-re
"""
r = re.compile(r"https?://[\w;/?:@&=+$,\-.!~*'()%]+")
return r.findall(s)
def get_mentions(s):
"""
文字列から@screen_nameを抽出して返す
@screen_nameに使用可能な文字は英数字とアンダースコアで15文字以下
adminやtwitterが含まれるものは不可
以下を参照
https://support.twitter.com/groups/31-twitter-basics/topics/104-welcome-to-twitter-support/articles/230266-#
"""
r = re.compile(r'@\w+')
return r.findall(s)
def get_hashtags(s):
"""
文字列から#hashtagを抽出して返す
ハッシュタグに句読 ( , . ; ' ? ! 等) が含まれると、その句読までの文字がハッシュタグとして扱われる
以下を参照
ハッシュ記号#の直前はスペースである必要がある
https://support.twitter.com/articles/450254-#
http://d.hatena.ne.jp/sutara_lumpur/20101012/1286860552
"""
r = re.compile(
u'(^|[^ヲ-゚ー゛゜々ヾヽぁ-ヶ一-龠a-zA-Z0-9\w&/]+)(' +
u'[##]' +
u'[ヲ-゚ー゛゜々ヾヽぁ-ヶ一-龠a-zA-Z0-9\w]*' +
u'[ヲ-゚ー゛゜々ヾヽぁ-ヶ一-龠a-zA-Z0-9a-zA-Z]+' +
u'[ヲ-゚ー゛゜々ヾヽぁ-ヶ一-龠a-zA-Z0-9\w]*)'
)
return [x[1] for x in r.findall(s)]
def twittertime2unixtime(twitter_time):
"""
Twitter時間からUNIX時間に変換
"""
unix_time = calendar.timegm(time.strptime(twitter_time, '%a %b %d %H:%M:%S +0000 %Y'))
return unix_time
def twittertime2localtime(twitter_time):
"""
Twitter時間からローカル時間(日本時間)に変換
"""
unix_time = calendar.timegm(time.strptime(twitter_time, '%a %b %d %H:%M:%S +0000 %Y'))
return time.localtime(unix_time)
def unixtime2localtime(unix_time):
"""
UNIX時間からローカル時間(日本時間)に変換
"""
return time.localtime(unix_time)
def unixtime2twittertime(unix_time):
"""
UNIX時間からTwitter時間に変換
"""
return time.strftime('%a %b %d %H:%M:%S +0000 %Y', time.gmtime(unix_time))
def localtime2unixtime(local_time):
"""
ローカル時間(日本時間)からUNIX時間に変換
"""
return time.mktime(local_time)
def localtime2twittertime(local_time):
"""
ローカル時間(日本時間)からTwitter時間に変換
"""
unix_time = time.mktime(local_time)
return time.strftime('%a %b %d %H:%M:%S +0000 %Y', time.gmtime(unix_time))
def unescape(text):
"""
html special charsをアンエスケープして元の文字列に戻す
"""
return parser.unescape(text)
def unescape_dquote(text):
"""
MeCab辞書の文字列がダブルクォーテーションで囲まれていた場合
通常の文字列に直して返す
"""
if text[0] == '"' and text[-1] == '"':
text = text[1:-1].replace('""', '"')
return text
| mit | 7,831,926,185,606,672,000 | 25.989305 | 116 | 0.602536 | false |
deepmind/lamb | lamb/dyneval.py | 1 | 4538 | # Copyright 2018 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Dynamic evaluation."""
# pylint: disable=missing-docstring
# pylint: disable=g-complex-comprehension
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
class Dyneval(object):
def __init__(self, grads_and_vars, learning_rate, decay_rate, epsilon):
with tf.variable_scope('dyneval'):
# convert_to_tensor densifies IndexedSlices
self._grads = [tf.convert_to_tensor(grad) for grad, _ in grads_and_vars]
self._vars = [var for _, var in grads_and_vars]
self._learning_rate = learning_rate
self._decay_rate = decay_rate
def shadow_vars():
return [
tf.get_variable(
var.name.replace('/', '-').replace(':', '-'),
var.get_shape(), initializer=tf.zeros_initializer(),
trainable=False)
for var in self._vars]
with tf.variable_scope('save'):
self._saves = shadow_vars()
with tf.variable_scope('sum_squared_grads'):
self._sum_squared_grads = shadow_vars()
self._save = self._make_save()
self._restore = self._make_restore()
# These are for computing an RMSProplike estimate of the variance of
# minibatch gradients. Here, this quantity is estimated on the training
# set once, while gradient descent happens on validation/test.
self._num_squared_grads = tf.get_variable(
'num_squared_grads', [], initializer=tf.zeros_initializer(),
trainable=False)
self._zero_sum_squared_grads = self._make_zero_sum_squared_grads()
self._add_squared_grads = self._make_add_squared_grads()
self._epsilon = epsilon
self._update = self._make_update()
def _make_save(self):
assignments = []
for save, var in zip(self._saves, self._vars):
assignments.append(save.assign(var))
return tf.group(assignments)
def _make_restore(self):
assignments = []
for save, var in zip(self._saves, self._vars):
assignments.append(var.assign(save))
return tf.group(assignments)
def _make_update(self):
mss = []
gsum = 0.0
count = 0
for sum_squared_grads in self._sum_squared_grads:
ms = tf.sqrt(sum_squared_grads / self._num_squared_grads)
gsum += tf.reduce_sum(ms)
count += tf.reduce_sum(tf.ones_like(ms))
mss.append(ms)
gsum = gsum / count
assignments = []
for grad, var, save, sum_squared_grads, ms in zip(
self._grads, self._vars, self._saves, self._sum_squared_grads, mss):
decay_rate = tf.minimum(1.0, self._decay_rate*(ms/gsum))
delta = (-self._learning_rate*grad / (ms + self._epsilon) +
decay_rate*(save-var))
assignments.append(var.assign_add(delta))
return tf.group(assignments)
def _make_add_squared_grads(self):
assignments = []
for sum_squared_grads, grads in zip(self._sum_squared_grads, self._grads):
assignments.append(sum_squared_grads.assign_add(tf.square(grads)))
return tf.group(assignments + [self._num_squared_grads.assign_add(1)])
def _make_zero_sum_squared_grads(self):
assignments = []
for sum_squared_grads in self._sum_squared_grads:
assignments.append(sum_squared_grads.assign(
tf.zeros_like(sum_squared_grads)))
return tf.group(assignments + [self._num_squared_grads.assign(0)])
def save(self):
tf.get_default_session().run(self._save)
def restore(self):
tf.get_default_session().run(self._restore)
def update_op(self):
return self._update
def zero_sum_squared_grads(self):
tf.get_default_session().run(self._zero_sum_squared_grads)
def add_squared_grads_op(self):
return self._add_squared_grads
def __enter__(self):
self.save()
def __exit__(self, type_, value, traceback):
self.restore()
| apache-2.0 | -5,901,291,351,476,678,000 | 34.732283 | 78 | 0.647422 | false |
SUSE-Cloud/glance | glance/tests/integration/legacy_functional/test_v1_api.py | 1 | 62578 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import hashlib
import json
import os
import tempfile
import testtools
from glance.openstack.common import jsonutils
from glance.openstack.common import timeutils
from glance.tests.integration.legacy_functional import base
from glance.tests.utils import minimal_headers
FIVE_KB = 5 * 1024
FIVE_GB = 5 * 1024 * 1024 * 1024
class TestApi(base.ApiTest):
def test_get_head_simple_post(self):
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. GET /images/detail
# Verify no public images
path = "/v1/images/detail"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 2. POST /images with public image named Image1
# attribute and no custom properties. Verify a 200 OK is returned
image_data = "*" * FIVE_KB
headers = minimal_headers('Image1')
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers,
body=image_data)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
# 3. HEAD image
# Verify image found now
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
# 4. GET image
# Verify all information on image we just added is correct
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image_headers = {
'x-image-meta-id': image_id,
'x-image-meta-name': 'Image1',
'x-image-meta-is_public': 'True',
'x-image-meta-status': 'active',
'x-image-meta-disk_format': 'raw',
'x-image-meta-container_format': 'ovf',
'x-image-meta-size': str(FIVE_KB)}
expected_std_headers = {
'content-length': str(FIVE_KB),
'content-type': 'application/octet-stream'}
for expected_key, expected_value in expected_image_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
response[expected_key]))
for expected_key, expected_value in expected_std_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
response[expected_key]))
self.assertEqual(content, "*" * FIVE_KB)
self.assertEqual(hashlib.md5(content).hexdigest(),
hashlib.md5("*" * FIVE_KB).hexdigest())
# 5. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_result = {"images": [
{"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"name": "Image1",
"checksum": "c2e5db72bd7fd153f53ede5da5a06de3",
"size": 5120}]}
self.assertEqual(json.loads(content), expected_result)
# 6. GET /images/detail
# Verify image and all its metadata
path = "/v1/images/detail"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image = {
"status": "active",
"name": "Image1",
"deleted": False,
"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"is_public": True,
"deleted_at": None,
"properties": {},
"size": 5120}
image = json.loads(content)
for expected_key, expected_value in expected_image.items():
self.assertEqual(expected_value, image['images'][0][expected_key],
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
image['images'][0][expected_key]))
# 7. PUT image with custom properties of "distro" and "arch"
# Verify 200 returned
headers = {'X-Image-Meta-Property-Distro': 'Ubuntu',
'X-Image-Meta-Property-Arch': 'x86_64'}
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['image']['properties']['arch'], "x86_64")
self.assertEqual(data['image']['properties']['distro'], "Ubuntu")
# 8. GET /images/detail
# Verify image and all its metadata
path = "/v1/images/detail"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image = {
"status": "active",
"name": "Image1",
"deleted": False,
"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"is_public": True,
"deleted_at": None,
"properties": {'distro': 'Ubuntu', 'arch': 'x86_64'},
"size": 5120}
image = json.loads(content)
for expected_key, expected_value in expected_image.items():
self.assertEqual(expected_value, image['images'][0][expected_key],
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
image['images'][0][expected_key]))
# 9. PUT image and remove a previously existing property.
headers = {'X-Image-Meta-Property-Arch': 'x86_64'}
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
path = "/v1/images/detail"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images'][0]
self.assertEqual(len(data['properties']), 1)
self.assertEqual(data['properties']['arch'], "x86_64")
# 10. PUT image and add a previously deleted property.
headers = {'X-Image-Meta-Property-Distro': 'Ubuntu',
'X-Image-Meta-Property-Arch': 'x86_64'}
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
data = json.loads(content)
path = "/v1/images/detail"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images'][0]
self.assertEqual(len(data['properties']), 2)
self.assertEqual(data['properties']['arch'], "x86_64")
self.assertEqual(data['properties']['distro'], "Ubuntu")
self.assertNotEqual(data['created_at'], data['updated_at'])
# DELETE image
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
def test_queued_process_flow(self):
"""
We test the process flow where a user registers an image
with Glance but does not immediately upload an image file.
Later, the user uploads an image file using a PUT operation.
We track the changing of image status throughout this process.
0. GET /images
- Verify no public images
1. POST /images with public image named Image1 with no location
attribute and no image data.
- Verify 201 returned
2. GET /images
- Verify one public image
3. HEAD image
- Verify image now in queued status
4. PUT image with image data
- Verify 200 returned
5. HEAD images
- Verify image now in active status
6. GET /images
- Verify one public image
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with public image named Image1
# with no location or image data
headers = minimal_headers('Image1')
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['checksum'], None)
self.assertEqual(data['image']['size'], 0)
self.assertEqual(data['image']['container_format'], 'ovf')
self.assertEqual(data['image']['disk_format'], 'raw')
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
image_id = data['image']['id']
# 2. GET /images
# Verify 1 public image
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['images'][0]['id'], image_id)
self.assertEqual(data['images'][0]['checksum'], None)
self.assertEqual(data['images'][0]['size'], 0)
self.assertEqual(data['images'][0]['container_format'], 'ovf')
self.assertEqual(data['images'][0]['disk_format'], 'raw')
self.assertEqual(data['images'][0]['name'], "Image1")
# 3. HEAD /images
# Verify status is in queued
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
self.assertEqual(response['x-image-meta-status'], "queued")
self.assertEqual(response['x-image-meta-size'], '0')
self.assertEqual(response['x-image-meta-id'], image_id)
# 4. PUT image with image data, verify 200 returned
image_data = "*" * FIVE_KB
headers = {'Content-Type': 'application/octet-stream'}
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'PUT', headers=headers,
body=image_data)
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
# 5. HEAD /images
# Verify status is in active
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
self.assertEqual(response['x-image-meta-status'], "active")
# 6. GET /images
# Verify 1 public image still...
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['images'][0]['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['images'][0]['id'], image_id)
self.assertEqual(data['images'][0]['size'], FIVE_KB)
self.assertEqual(data['images'][0]['container_format'], 'ovf')
self.assertEqual(data['images'][0]['disk_format'], 'raw')
self.assertEqual(data['images'][0]['name'], "Image1")
# DELETE image
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
def test_size_greater_2G_mysql(self):
"""
A test against the actual datastore backend for the registry
to ensure that the image size property is not truncated.
:see https://bugs.launchpad.net/glance/+bug/739433
"""
# 1. POST /images with public image named Image1
# attribute and a size of 5G. Use the HTTP engine with an
# X-Image-Meta-Location attribute to make Glance forego
# "adding" the image data.
# Verify a 201 OK is returned
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Location': 'http://example.com/fakeimage',
'X-Image-Meta-Size': str(FIVE_GB),
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-disk_format': 'raw',
'X-image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
# 2. HEAD /images
# Verify image size is what was passed in, and not truncated
path = response.get('location')
response, content = self.http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-size'], str(FIVE_GB))
self.assertEqual(response['x-image-meta-name'], 'Image1')
self.assertEqual(response['x-image-meta-is_public'], 'True')
def test_v1_not_enabled(self):
self.config(enable_v1_api=False)
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 300)
def test_v1_enabled(self):
self.config(enable_v1_api=True)
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
def test_zero_initial_size(self):
"""
A test to ensure that an image with size explicitly set to zero
has status that immediately transitions to active.
"""
# 1. POST /images with public image named Image1
# attribute and a size of zero.
# Verify a 201 OK is returned
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Size': '0',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-disk_format': 'raw',
'X-image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
# 2. HEAD image-location
# Verify image size is zero and the status is active
path = response.get('location')
response, content = self.http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-size'], '0')
self.assertEqual(response['x-image-meta-status'], 'active')
# 3. GET image-location
# Verify image content is empty
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(len(content), 0)
def test_traceback_not_consumed(self):
"""
A test that errors coming from the POST API do not
get consumed and print the actual error message, and
not something like <traceback object at 0x1918d40>
:see https://bugs.launchpad.net/glance/+bug/755912
"""
# POST /images with binary data, but not setting
# Content-Type to application/octet-stream, verify a
# 400 returned and that the error is readable.
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
path = "/v1/images"
headers = minimal_headers('Image1')
headers['Content-Type'] = 'not octet-stream'
response, content = self.http.request(path, 'POST',
body=test_data_file.name,
headers=headers)
self.assertEqual(response.status, 400)
expected = "Content-Type must be application/octet-stream"
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
def test_filtered_images(self):
"""
Set up four test images and ensure each query param filter works
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
image_ids = []
# 1. POST /images with three public images, and one private image
# with various attributes
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'True',
'X-Image-Meta-Property-pants': 'are on'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are on")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Image!',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vhd',
'X-Image-Meta-Size': '20',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'False',
'X-Image-Meta-Property-pants': 'are on'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are on")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Image!',
'X-Image-Meta-Status': 'saving',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '21',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'False',
'X-Image-Meta-Property-pants': 'are off'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are off")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Private Image',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '22',
'X-Image-Meta-Is-Public': 'False',
'X-Image-Meta-Protected': 'False'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['is_public'], False)
image_ids.append(data['image']['id'])
# 2. GET /images
# Verify three public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# 3. GET /images with name filter
# Verify correct images returned with name
params = "name=My%20Image!"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['name'], "My Image!")
# 4. GET /images with status filter
# Verify correct images returned with status
params = "status=queued"
path = "/v1/images/detail?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
for image in data['images']:
self.assertEqual(image['status'], "queued")
params = "status=active"
path = "/v1/images/detail?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# 5. GET /images with container_format filter
# Verify correct images returned with container_format
params = "container_format=ovf"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['container_format'], "ovf")
# 6. GET /images with disk_format filter
# Verify correct images returned with disk_format
params = "disk_format=vdi"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['disk_format'], "vdi")
# 7. GET /images with size_max filter
# Verify correct images returned with size <= expected
params = "size_max=20"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertTrue(image['size'] <= 20)
# 8. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_min=20"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertTrue(image['size'] >= 20)
# 9. Get /images with is_public=None filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=None"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 4)
# 10. Get /images with is_public=False filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=False"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['name'], "My Private Image")
# 11. Get /images with is_public=True filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=True"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
for image in data['images']:
self.assertNotEqual(image['name'], "My Private Image")
# 12. Get /images with protected=False filter
# Verify correct images returned with property
params = "protected=False"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertNotEqual(image['name'], "Image1")
# 13. Get /images with protected=True filter
# Verify correct images returned with property
params = "protected=True"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['name'], "Image1")
# 14. GET /images with property filter
# Verify correct images returned with property
params = "property-pants=are%20on"
path = "/v1/images/detail?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['properties']['pants'], "are on")
# 15. GET /images with property filter and name filter
# Verify correct images returned with property and name
# Make sure you quote the url when using more than one param!
params = "name=My%20Image!&property-pants=are%20on"
path = "/v1/images/detail?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['properties']['pants'], "are on")
self.assertEqual(image['name'], "My Image!")
# 16. GET /images with past changes-since filter
yesterday = timeutils.isotime(timeutils.utcnow() -
datetime.timedelta(1))
params = "changes-since=%s" % yesterday
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# one timezone west of Greenwich equates to an hour ago
# taking care to pre-urlencode '+' as '%2B', otherwise the timezone
# '+' is wrongly decoded as a space
# TODO(eglynn): investigate '+' --> <SPACE> decoding, an artifact
# of WSGI/webob dispatch?
now = timeutils.utcnow()
hour_ago = now.strftime('%Y-%m-%dT%H:%M:%S%%2B01:00')
params = "changes-since=%s" % hour_ago
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# 17. GET /images with future changes-since filter
tomorrow = timeutils.isotime(timeutils.utcnow() +
datetime.timedelta(1))
params = "changes-since=%s" % tomorrow
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# one timezone east of Greenwich equates to an hour from now
now = timeutils.utcnow()
hour_hence = now.strftime('%Y-%m-%dT%H:%M:%S-01:00')
params = "changes-since=%s" % hour_hence
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# 18. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_min=-1"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("filter size_min got -1" in content)
# 19. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_max=-1"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("filter size_max got -1" in content)
# 20. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "min_ram=-1"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("Bad value passed to filter min_ram got -1" in content)
# 21. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "protected=imalittleteapot"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("protected got imalittleteapot" in content)
# 22. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "is_public=imalittleteapot"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("is_public got imalittleteapot" in content)
def test_limited_images(self):
"""
Ensure marker and limit query params work
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
image_ids = []
# 1. POST /images with three public images with various attributes
headers = minimal_headers('Image1')
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = minimal_headers('Image2')
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = minimal_headers('Image3')
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
# 2. GET /images with all images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = json.loads(content)['images']
self.assertEqual(len(images), 3)
# 3. GET /images with limit of 2
# Verify only two images were returned
params = "limit=2"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 2)
self.assertEqual(data[0]['id'], images[0]['id'])
self.assertEqual(data[1]['id'], images[1]['id'])
# 4. GET /images with marker
# Verify only two images were returned
params = "marker=%s" % images[0]['id']
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 2)
self.assertEqual(data[0]['id'], images[1]['id'])
self.assertEqual(data[1]['id'], images[2]['id'])
# 5. GET /images with marker and limit
# Verify only one image was returned with the correct id
params = "limit=1&marker=%s" % images[1]['id']
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['id'], images[2]['id'])
# 6. GET /images/detail with marker and limit
# Verify only one image was returned with the correct id
params = "limit=1&marker=%s" % images[1]['id']
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['id'], images[2]['id'])
# DELETE images
for image_id in image_ids:
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
def test_ordered_images(self):
"""
Set up three test images and ensure each query param filter works
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with three public images with various attributes
image_ids = []
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'ASDF',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'bare',
'X-Image-Meta-Disk-Format': 'iso',
'X-Image-Meta-Size': '2',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'XYZ',
'X-Image-Meta-Status': 'saving',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '5',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
# 2. GET /images with no query params
# Verify three public images sorted by created_at desc
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[2])
self.assertEqual(data['images'][1]['id'], image_ids[1])
self.assertEqual(data['images'][2]['id'], image_ids[0])
# 3. GET /images sorted by name asc
params = 'sort_key=name&sort_dir=asc'
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[1])
self.assertEqual(data['images'][1]['id'], image_ids[0])
self.assertEqual(data['images'][2]['id'], image_ids[2])
# 4. GET /images sorted by size desc
params = 'sort_key=size&sort_dir=desc'
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[0])
self.assertEqual(data['images'][1]['id'], image_ids[2])
self.assertEqual(data['images'][2]['id'], image_ids[1])
# 5. GET /images sorted by size desc with a marker
params = 'sort_key=size&sort_dir=desc&marker=%s' % image_ids[0]
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
self.assertEqual(data['images'][0]['id'], image_ids[2])
self.assertEqual(data['images'][1]['id'], image_ids[1])
# 6. GET /images sorted by name asc with a marker
params = 'sort_key=name&sort_dir=asc&marker=%s' % image_ids[2]
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# DELETE images
for image_id in image_ids:
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
def test_duplicate_image_upload(self):
"""
Upload initial image, then attempt to upload duplicate image
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with public image named Image1
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image = json.loads(content)['image']
# 2. POST /images with public image named Image1, and ID: 1
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1 Update',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Id': image['id'],
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 409)
def test_delete_not_existing(self):
"""
We test the following:
0. GET /images/1
- Verify 404
1. DELETE /images/1
- Verify 404
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. DELETE /images/1
# Verify 404 returned
path = "/v1/images/1"
response, content = self.http.request(path, 'DELETE')
self.assertEqual(response.status, 404)
def _do_test_post_image_content_bad_format(self, format):
"""
We test that missing container/disk format fails with 400 "Bad Request"
:see https://bugs.launchpad.net/glance/+bug/933702
"""
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = json.loads(content)['images']
self.assertEqual(len(images), 0)
path = "/v1/images"
# POST /images without given format being specified
headers = minimal_headers('Image1')
headers['X-Image-Meta-' + format] = 'bad_value'
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
response, content = self.http.request(path, 'POST',
headers=headers,
body=test_data_file.name)
self.assertEqual(response.status, 400)
type = format.replace('_format', '')
expected = "Invalid %s format 'bad_value' for image" % type
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
# make sure the image was not created
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = json.loads(content)['images']
self.assertEqual(len(images), 0)
def test_post_image_content_bad_container_format(self):
self._do_test_post_image_content_bad_format('container_format')
def test_post_image_content_bad_disk_format(self):
self._do_test_post_image_content_bad_format('disk_format')
def _do_test_put_image_content_missing_format(self, format):
"""
We test that missing container/disk format only fails with
400 "Bad Request" when the image content is PUT (i.e. not
on the original POST of a queued image).
:see https://bugs.launchpad.net/glance/+bug/937216
"""
# POST queued image
path = "/v1/images"
headers = {
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Is-Public': 'True',
}
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
self.addDetail('image_data', testtools.content.json_content(data))
# PUT image content images without given format being specified
path = "/v1/images/%s" % (image_id)
headers = minimal_headers('Image1')
del headers['X-Image-Meta-' + format]
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
response, content = self.http.request(path, 'PUT',
headers=headers,
body=test_data_file.name)
self.assertEqual(response.status, 400)
type = format.replace('_format', '')
expected = "Invalid %s format 'None' for image" % type
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
def test_put_image_content_bad_container_format(self):
self._do_test_put_image_content_missing_format('container_format')
def test_put_image_content_bad_disk_format(self):
self._do_test_put_image_content_missing_format('disk_format')
def _do_test_mismatched_attribute(self, attribute, value):
"""
Test mismatched attribute.
"""
image_data = "*" * FIVE_KB
headers = minimal_headers('Image1')
headers[attribute] = value
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers,
body=image_data)
self.assertEqual(response.status, 400)
images_dir = os.path.join(self.test_dir, 'images')
image_count = len([name for name in os.listdir(images_dir)
if os.path.isfile(os.path.join(images_dir, name))])
self.assertEquals(image_count, 0)
def test_mismatched_size(self):
"""
Test mismatched size.
"""
self._do_test_mismatched_attribute('x-image-meta-size',
str(FIVE_KB + 1))
def test_mismatched_checksum(self):
"""
Test mismatched checksum.
"""
self._do_test_mismatched_attribute('x-image-meta-checksum',
'foobar')
class TestApiWithFakeAuth(base.ApiTest):
def __init__(self, *args, **kwargs):
super(TestApiWithFakeAuth, self).__init__(*args, **kwargs)
self.api_flavor = 'fakeauth'
self.registry_flavor = 'fakeauth'
def test_ownership(self):
# Add an image with admin privileges and ensure the owner
# can be set to something other than what was used to authenticate
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
create_headers = {
'X-Image-Meta-Name': 'MyImage',
'X-Image-Meta-disk_format': 'raw',
'X-Image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Owner': 'tenant2',
}
create_headers.update(auth_headers)
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=create_headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD',
headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant2', response['x-image-meta-owner'])
# Now add an image without admin privileges and ensure the owner
# cannot be set to something other than what was used to authenticate
auth_headers = {
'X-Auth-Token': 'user1:tenant1:role1',
}
create_headers.update(auth_headers)
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=create_headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
# We have to be admin to see the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
create_headers.update(auth_headers)
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD',
headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant1', response['x-image-meta-owner'])
# Make sure the non-privileged user can't update their owner either
update_headers = {
'X-Image-Meta-Name': 'MyImage2',
'X-Image-Meta-Owner': 'tenant2',
'X-Auth-Token': 'user1:tenant1:role1',
}
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'PUT',
headers=update_headers)
self.assertEqual(response.status, 200)
# We have to be admin to see the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD',
headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant1', response['x-image-meta-owner'])
# An admin user should be able to update the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant3:admin',
}
update_headers = {
'X-Image-Meta-Name': 'MyImage2',
'X-Image-Meta-Owner': 'tenant2',
}
update_headers.update(auth_headers)
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'PUT',
headers=update_headers)
self.assertEqual(response.status, 200)
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD',
headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant2', response['x-image-meta-owner'])
def test_image_visibility_to_different_users(self):
owners = ['admin', 'tenant1', 'tenant2', 'none']
visibilities = {'public': 'True', 'private': 'False'}
image_ids = {}
for owner in owners:
for visibility, is_public in visibilities.items():
name = '%s-%s' % (owner, visibility)
headers = {
'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': name,
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Is-Public': is_public,
'X-Image-Meta-Owner': owner,
'X-Auth-Token': 'createuser:createtenant:admin',
}
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_ids[name] = data['image']['id']
def list_images(tenant, role='', is_public=None):
auth_token = 'user:%s:%s' % (tenant, role)
headers = {'X-Auth-Token': auth_token}
path = "/v1/images/detail"
if is_public is not None:
path += '?is_public=%s' % is_public
response, content = self.http.request(path, 'GET', headers=headers)
self.assertEqual(response.status, 200)
return json.loads(content)['images']
# 1. Known user sees public and their own images
images = list_images('tenant1')
self.assertEquals(len(images), 5)
for image in images:
self.assertTrue(image['is_public'] or image['owner'] == 'tenant1')
# 2. Unknown user sees only public images
images = list_images('none')
self.assertEquals(len(images), 4)
for image in images:
self.assertTrue(image['is_public'])
# 3. Unknown admin sees only public images
images = list_images('none', role='admin')
self.assertEquals(len(images), 4)
for image in images:
self.assertTrue(image['is_public'])
# 4. Unknown admin, is_public=none, shows all images
images = list_images('none', role='admin', is_public='none')
self.assertEquals(len(images), 8)
# 5. Unknown admin, is_public=true, shows only public images
images = list_images('none', role='admin', is_public='true')
self.assertEquals(len(images), 4)
for image in images:
self.assertTrue(image['is_public'])
# 6. Unknown admin, is_public=false, sees only private images
images = list_images('none', role='admin', is_public='false')
self.assertEquals(len(images), 4)
for image in images:
self.assertFalse(image['is_public'])
# 7. Known admin sees public and their own images
images = list_images('admin', role='admin')
self.assertEquals(len(images), 5)
for image in images:
self.assertTrue(image['is_public'] or image['owner'] == 'admin')
# 8. Known admin, is_public=none, shows all images
images = list_images('admin', role='admin', is_public='none')
self.assertEquals(len(images), 8)
# 9. Known admin, is_public=true, sees all public and their images
images = list_images('admin', role='admin', is_public='true')
self.assertEquals(len(images), 5)
for image in images:
self.assertTrue(image['is_public'] or image['owner'] == 'admin')
# 10. Known admin, is_public=false, sees all private images
images = list_images('admin', role='admin', is_public='false')
self.assertEquals(len(images), 4)
for image in images:
self.assertFalse(image['is_public'])
def test_property_protections(self):
# Enable property protection
self.config(property_protection_file=self.property_file)
self.init()
CREATE_HEADERS = {
'X-Image-Meta-Name': 'MyImage',
'X-Image-Meta-disk_format': 'raw',
'X-Image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Owner': 'tenant2',
}
# Create an image for role member with extra properties
# Raises 403 since user is not allowed to create 'foo'
auth_headers = {
'X-Auth-Token': 'user1:tenant1:member',
}
custom_props = {
'x-image-meta-property-foo': 'bar'
}
auth_headers.update(custom_props)
auth_headers.update(CREATE_HEADERS)
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=auth_headers)
self.assertEqual(response.status, 403)
# Create an image for role member without 'foo'
auth_headers = {
'X-Auth-Token': 'user1:tenant1:member',
}
custom_props = {
'x-image-meta-property-x_owner_foo': 'o_s_bar',
}
auth_headers.update(custom_props)
auth_headers.update(CREATE_HEADERS)
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=auth_headers)
self.assertEqual(response.status, 201)
# Returned image entity should have 'x_owner_foo'
data = json.loads(content)
self.assertEqual(data['image']['properties']['x_owner_foo'],
'o_s_bar')
# Create an image for role spl_role with extra properties
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
custom_props = {
'X-Image-Meta-Property-spl_create_prop': 'create_bar',
'X-Image-Meta-Property-spl_read_prop': 'read_bar',
'X-Image-Meta-Property-spl_update_prop': 'update_bar',
'X-Image-Meta-Property-spl_delete_prop': 'delete_bar'
}
auth_headers.update(custom_props)
auth_headers.update(CREATE_HEADERS)
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=auth_headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
# Attempt to update two properties, one protected(spl_read_prop), the
# other not(spl_update_prop). Request should be forbidden.
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
custom_props = {
'X-Image-Meta-Property-spl_read_prop': 'r',
'X-Image-Meta-Property-spl_update_prop': 'u',
'X-Glance-Registry-Purge-Props': 'False'
}
auth_headers.update(auth_headers)
auth_headers.update(custom_props)
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT',
headers=auth_headers)
self.assertEqual(response.status, 403)
# Attempt to create properties which are forbidden
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
custom_props = {
'X-Image-Meta-Property-spl_new_prop': 'new',
'X-Glance-Registry-Purge-Props': 'True'
}
auth_headers.update(auth_headers)
auth_headers.update(custom_props)
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT',
headers=auth_headers)
self.assertEqual(response.status, 403)
# Attempt to update, create and delete properties
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
custom_props = {
'X-Image-Meta-Property-spl_create_prop': 'create_bar',
'X-Image-Meta-Property-spl_read_prop': 'read_bar',
'X-Image-Meta-Property-spl_update_prop': 'u',
'X-Glance-Registry-Purge-Props': 'True'
}
auth_headers.update(auth_headers)
auth_headers.update(custom_props)
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT',
headers=auth_headers)
self.assertEqual(response.status, 200)
# Returned image entity should reflect the changes
image = json.loads(content)
# 'spl_update_prop' has update permission for spl_role
# hence the value has changed
self.assertEqual('u', image['image']['properties']['spl_update_prop'])
# 'spl_delete_prop' has delete permission for spl_role
# hence the property has been deleted
self.assertTrue('spl_delete_prop' not in image['image']['properties'])
# 'spl_create_prop' has create permission for spl_role
# hence the property has been created
self.assertEqual('create_bar',
image['image']['properties']['spl_create_prop'])
# Image Deletion should work
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'DELETE',
headers=auth_headers)
self.assertEqual(response.status, 200)
# This image should be no longer be directly accessible
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'HEAD',
headers=auth_headers)
self.assertEqual(response.status, 404)
| apache-2.0 | -2,538,296,298,288,473,000 | 41.339648 | 79 | 0.566205 | false |
Vagab0nd/SiCKRAGE | lib3/twilio/rest/events/v1/subscription/subscribed_event.py | 1 | 14449 | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class SubscribedEventList(ListResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact [email protected]. """
def __init__(self, version, subscription_sid):
"""
Initialize the SubscribedEventList
:param Version version: Version that contains the resource
:param subscription_sid: Subscription SID.
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventList
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventList
"""
super(SubscribedEventList, self).__init__(version)
# Path Solution
self._solution = {'subscription_sid': subscription_sid, }
self._uri = '/Subscriptions/{subscription_sid}/SubscribedEvents'.format(**self._solution)
def stream(self, limit=None, page_size=None):
"""
Streams SubscribedEventInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'])
def list(self, limit=None, page_size=None):
"""
Lists SubscribedEventInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of SubscribedEventInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventPage
"""
data = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(method='GET', uri=self._uri, params=data, )
return SubscribedEventPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of SubscribedEventInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return SubscribedEventPage(self._version, response, self._solution)
def create(self, type, version=values.unset):
"""
Create the SubscribedEventInstance
:param unicode type: Type of event being subscribed to.
:param unicode version: The schema version that the subscription should use.
:returns: The created SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
"""
data = values.of({'Type': type, 'Version': version, })
payload = self._version.create(method='POST', uri=self._uri, data=data, )
return SubscribedEventInstance(
self._version,
payload,
subscription_sid=self._solution['subscription_sid'],
)
def get(self, type):
"""
Constructs a SubscribedEventContext
:param type: Type of event being subscribed to.
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
"""
return SubscribedEventContext(
self._version,
subscription_sid=self._solution['subscription_sid'],
type=type,
)
def __call__(self, type):
"""
Constructs a SubscribedEventContext
:param type: Type of event being subscribed to.
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
"""
return SubscribedEventContext(
self._version,
subscription_sid=self._solution['subscription_sid'],
type=type,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Events.V1.SubscribedEventList>'
class SubscribedEventPage(Page):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact [email protected]. """
def __init__(self, version, response, solution):
"""
Initialize the SubscribedEventPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param subscription_sid: Subscription SID.
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventPage
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventPage
"""
super(SubscribedEventPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of SubscribedEventInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
"""
return SubscribedEventInstance(
self._version,
payload,
subscription_sid=self._solution['subscription_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Events.V1.SubscribedEventPage>'
class SubscribedEventContext(InstanceContext):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact [email protected]. """
def __init__(self, version, subscription_sid, type):
"""
Initialize the SubscribedEventContext
:param Version version: Version that contains the resource
:param subscription_sid: Subscription SID.
:param type: Type of event being subscribed to.
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
"""
super(SubscribedEventContext, self).__init__(version)
# Path Solution
self._solution = {'subscription_sid': subscription_sid, 'type': type, }
self._uri = '/Subscriptions/{subscription_sid}/SubscribedEvents/{type}'.format(**self._solution)
def update(self, version):
"""
Update the SubscribedEventInstance
:param unicode version: The schema version that the subscription should use.
:returns: The updated SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
"""
data = values.of({'Version': version, })
payload = self._version.update(method='POST', uri=self._uri, data=data, )
return SubscribedEventInstance(
self._version,
payload,
subscription_sid=self._solution['subscription_sid'],
type=self._solution['type'],
)
def delete(self):
"""
Deletes the SubscribedEventInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete(method='DELETE', uri=self._uri, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Events.V1.SubscribedEventContext {}>'.format(context)
class SubscribedEventInstance(InstanceResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact [email protected]. """
def __init__(self, version, payload, subscription_sid, type=None):
"""
Initialize the SubscribedEventInstance
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
"""
super(SubscribedEventInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload.get('account_sid'),
'type': payload.get('type'),
'version': deserialize.integer(payload.get('version')),
'subscription_sid': payload.get('subscription_sid'),
'url': payload.get('url'),
}
# Context
self._context = None
self._solution = {'subscription_sid': subscription_sid, 'type': type or self._properties['type'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: SubscribedEventContext for this SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
"""
if self._context is None:
self._context = SubscribedEventContext(
self._version,
subscription_sid=self._solution['subscription_sid'],
type=self._solution['type'],
)
return self._context
@property
def account_sid(self):
"""
:returns: Account SID.
:rtype: unicode
"""
return self._properties['account_sid']
@property
def type(self):
"""
:returns: Type of event being subscribed to.
:rtype: unicode
"""
return self._properties['type']
@property
def version(self):
"""
:returns: The schema version that the subscription should use.
:rtype: unicode
"""
return self._properties['version']
@property
def subscription_sid(self):
"""
:returns: Subscription SID.
:rtype: unicode
"""
return self._properties['subscription_sid']
@property
def url(self):
"""
:returns: The URL of this resource.
:rtype: unicode
"""
return self._properties['url']
def update(self, version):
"""
Update the SubscribedEventInstance
:param unicode version: The schema version that the subscription should use.
:returns: The updated SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
"""
return self._proxy.update(version, )
def delete(self):
"""
Deletes the SubscribedEventInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Events.V1.SubscribedEventInstance {}>'.format(context)
| gpl-3.0 | 4,776,578,688,993,812,000 | 35.579747 | 107 | 0.639006 | false |
mosaic-cloud/mosaic-components-httpg | applications/mosaic-httpg/sources/mosaic_httpg_tester.py | 1 | 20676 |
import json
import os
import pprint
import random
import string
import struct
import sys
import time
import pika
_verbose = True
_broker_host = "127.0.0.1"
_broker_port = 21688
_broker_user = "guest"
_broker_password = "guest"
_broker_virtual_host = "/"
_handlers_exchange_identifier = "mosaic-http-requests"
_handlers_queue_identifier = "mosaic-http-requests"
_handlers_queue_routing_key = "#"
_reconnect_sleep = 1
_consume_sleep = 1
_glitch_probability_ = 0.0
def _loop () :
while True :
_connection = None
_channel = None
try :
if _verbose : print >> sys.stderr, "[ ] connecting..."
_connection = pika.BlockingConnection (pika.ConnectionParameters (
_broker_host, port = _broker_port, virtual_host = _broker_virtual_host,
credentials = pika.PlainCredentials (_broker_user, _broker_password)))
_channel = _connection.channel ()
except Exception as _error :
if _connection is not None :
try :
_connection.close ()
except :
pass
del _connection
del _channel
if _verbose : print >> sys.stderr, "[ee] failed while connecting: %r; sleeping and then reconnecting..." % (_error,)
time.sleep (_reconnect_sleep)
continue
try :
if _verbose : print >> sys.stderr, "[ ] declaring..."
_channel.exchange_declare (
exchange = _handlers_exchange_identifier, type = "topic",
durable = False, auto_delete = False)
_channel.queue_declare (
queue = _handlers_queue_identifier,
exclusive = False, durable = False, auto_delete = False)
_channel.queue_bind (
queue = _handlers_queue_identifier, exchange = _handlers_exchange_identifier,
routing_key = _handlers_queue_routing_key)
except Exception as _error :
print >> sys.stderr, "[ee] failed while declaring: %r; aborting!" % (_error,)
exit (1)
def _handle (_channel, _method, _properties, _body) :
if _verbose : print >> sys.stderr, "[ ] handling..."
_request_data = _body
_request_content_type = _properties.content_type
_request_content_encoding = _properties.content_encoding
_response_data, _response_content_type, _response_content_encoding, _callback_exchange, _callback_routing_key \
= _handle_message (_request_data, _request_content_type, _request_content_encoding)
if _verbose : print >> sys.stderr, "[ ] publishing: `%s` <- `%s`..." % (_callback_exchange, _callback_routing_key)
_channel.basic_publish (
_callback_exchange, _callback_routing_key, _response_data,
properties = pika.BasicProperties (content_type = _response_content_type, content_encoding = _response_content_encoding),
mandatory = False, immediate = False)
_channel.basic_ack (delivery_tag = _method.delivery_tag, multiple = False)
return
# _channel.basic_qos (prefetch_size = 0, prefetch_count = 16, global_ = False)
if False :
# while _connection.is_alive () :
while True :
_outcome = None
try :
if _verbose : print >> sys.stderr, "[ ] polling..."
_outcome = _channel.basic_get (queue = _handlers_queue_identifier)
except Exception as _error :
del _outcome
if _verbose : print >> sys.stderr, "[ee] failed while polling: %r; exiting loop..." % (_error,)
break
if isinstance (_outcome, pika.spec.Basic.GetOk) :
_handle (_channel, _outcome, _outcome.get_properties (), _outcome.get_body ())
elif isinstance (_outcome, pika.spec.Basic.GetEmpty) :
if _verbose : print >> sys.stderr, "[ ] nothing; sleeping..."
time.sleep (_consume_sleep)
else :
print >> sys.stderr, "[ee] unexpected polling outcome: %r; ignoring" % (_outcome,)
del _outcome
else :
_channel.basic_consume (_handle, queue = _handlers_queue_identifier, exclusive = False, no_ack = False)
_channel.start_consuming ()
try :
_channel.close ()
except :
pass
try :
_connection.close ()
except :
pass
del _connection
del _channel
return
def _handle_message (_request_data, _request_content_type, _request_content_encoding) :
_request, _callback_identifier, _callback_exchange, _callback_routing_key \
= _decode_request_message_body (_request_data, _request_content_type, _request_content_encoding)
_response = _process (_request)
_response_data, _response_content_type, _response_content_encoding \
= _encode_response_message_body (_response, _callback_identifier)
_glitch = _maybe_glitch (_response, _callback_identifier, _response_data, _response_content_type, _response_content_encoding)
if _glitch is not None :
_response_data, _response_content_type, _response_content_encoding = _glitch
return (_response_data, _response_content_type, _response_content_encoding, _callback_exchange, _callback_routing_key)
def _encode_response_message_body (_response, _callback_identifier) :
if _verbose : print >> sys.stderr, "[ ] encoding message:"
_decoded_headers = {
"version" : 1,
"callback-identifier" : _callback_identifier,
"http-version" : _response.http_version,
"http-code" : _response.http_code,
"http-status" : _response.http_status,
"http-headers" : _response.http_headers,
"http-body" : "following"
}
if _verbose : print >> sys.stderr, "[ ] -> decoded headers:"
if _verbose : pprint.pprint (_decoded_headers, sys.stderr)
_decoded_body = _response.http_body
if _verbose : print >> sys.stderr, "[ ] -> decoded body:"
if _verbose : print >> sys.stderr, _decoded_headers
_encoded_headers = json.dumps (_decoded_headers, False, True, False, True, None, None, None, 'utf-8')
_encoded_headers_size = len (_encoded_headers)
if _verbose : print >> sys.stderr, "[ ] -> encoded headers size: %d" % (_encoded_headers_size,)
if _verbose : print >> sys.stderr, "[ ] -> encoded headers: %r" % (_encoded_headers,)
_encoded_body = _response.http_body
_encoded_body_size = len (_encoded_body)
if _verbose : print >> sys.stderr, "[ ] -> encoded body size: %d" % (_encoded_body_size,)
if _verbose : print >> sys.stderr, "[ ] -> encoded body: %r" % (_encoded_body,)
_data = ''.join ([
struct.pack (">L", _encoded_headers_size),
_encoded_headers,
struct.pack (">L", _encoded_body_size),
_encoded_body])
_data_size = len (_data)
if _verbose : print >> sys.stderr, "[ ] -> data size: %d" % (_data_size)
if _verbose : print >> sys.stderr, "[ ] -> data: %r" % (_data,)
_content_type = 'application/octet-stream'
_content_encoding = 'binary'
if _verbose : print >> sys.stderr, "[ ] -> content type: %r;" % (_content_type,)
if _verbose : print >> sys.stderr, "[ ] -> content encoding: %r;" % (_content_encoding,)
return (_data, _content_type, _content_encoding)
def _decode_request_message_body (_data, _content_type, _content_encoding) :
if _verbose : print >> sys.stderr, "[ ] decoding message:"
if _verbose : print >> sys.stderr, "[ ] -> content type: %r;" % (_content_type,)
if _verbose : print >> sys.stderr, "[ ] -> content encoding: %r;" % (_content_encoding,)
_data_size = len (_data)
if _verbose : print >> sys.stderr, "[ ] -> data size: %d;" % (_data_size,)
if _verbose : print >> sys.stderr, "[ ] -> data: %r;" % (_data,)
assert _content_type == 'application/octet-stream'
assert _content_encoding == 'binary'
assert _data_size >= 4
_encoded_headers_size = struct.unpack (">L", _data[0:4]) [0]
_encoded_headers_offset = 4
_encoded_headers_limit = _encoded_headers_offset + _encoded_headers_size
assert _data_size >= _encoded_headers_limit
_encoded_headers = _data[_encoded_headers_offset : _encoded_headers_limit]
if _verbose : print >> sys.stderr, "[ ] -> encoded headers size: %d;" % (_encoded_headers_size,)
if _verbose : print >> sys.stderr, "[ ] -> encoded headers: %r;" % (_encoded_headers,)
_decoded_headers = json.loads (_encoded_headers, 'utf-8')
if _verbose : print >> sys.stderr, "[ ] -> decoded headers: %r;" % (_decoded_headers,)
if _verbose : print >> sys.stderr, "[ ] -> decoded headers:"
if _verbose : pprint.pprint (_decoded_headers, sys.stderr)
assert _decoded_headers.get ('version') == 1
_http_body_type = _decoded_headers.get ('http-body')
if _http_body_type == 'empty' :
assert _data_size == _encoded_headers_limit
_encoded_body = ''
_encoded_body_size = len (_encoded_body)
elif _http_body_type == 'embedded' :
assert _data_size == _encoded_headers_limit
_encoded_body = _decoded_headers.get ('http-body-content')
_encoded_body_size = len (_encoded_body)
elif _http_body_type == 'following' :
assert _data_size >= _encoded_headers_limit + 4
_encoded_body_size = struct.unpack (">L", _data[_encoded_headers_limit : _encoded_headers_limit + 4]) [0]
_encoded_body_offset = _encoded_headers_limit + 4
_encoded_body_limit = _encoded_body_offset + _encoded_body_size
assert _data_size == _encoded_body_limit
_encoded_body = _data[_encoded_body_offset : 4 + _encoded_body_limit]
else :
assert False
if _verbose : print >> sys.stderr, "[ ] -> encoded body size: %d;" % (_encoded_body_size,)
if _verbose : print >> sys.stderr, "[ ] -> encoded body: %r;" % (_encoded_body,)
_decoded_body = _encoded_body
if _verbose : print >> sys.stderr, "[ ] -> decoded body:"
if _verbose : print >> sys.stderr, _decoded_body
_request = _Request (
socket_remote_ip = _decoded_headers.get ('socket-remote-ip'),
socket_remote_port = _decoded_headers.get ('socket-remote-port'),
socket_remote_fqdn = _decoded_headers.get ('socket-remote-fqdn'),
socket_local_ip = _decoded_headers.get ('socket-local-ip'),
socket_local_port = _decoded_headers.get ('socket-local-port'),
socket_local_fqdn = _decoded_headers.get ('socket-local-fqdn'),
http_version = _decoded_headers.get ('http-version'),
http_method = _decoded_headers.get ('http-method'),
http_uri = _decoded_headers.get ('http-uri'),
http_headers = _decoded_headers.get ('http-headers'),
http_body = _decoded_body)
_callback_identifier = str (_decoded_headers.get ('callback-identifier'))
_callback_exchange = str (_decoded_headers.get ('callback-exchange'))
_callback_routing_key = str (_decoded_headers.get ('callback-routing-key'))
if _verbose : print >> sys.stderr, "[ ] -> callback identifier: %r;" % (_callback_identifier,)
if _verbose : print >> sys.stderr, "[ ] -> callback exchange: %r;" % (_callback_exchange,)
if _verbose : print >> sys.stderr, "[ ] -> callback routing key: %r;" % (_callback_routing_key,)
return (_request, _callback_identifier, _callback_exchange, _callback_routing_key)
class _Request (object) :
def __init__ (self,
socket_remote_ip = None, socket_remote_port = None, socket_remote_fqdn = None,
socket_local_ip = None, socket_local_port = None, socket_local_fqdn = None,
http_version = None, http_method = None, http_uri = None,
http_headers = None, http_body = None) :
self.socket_remote_ip = socket_remote_ip
self.socket_remote_port = socket_remote_port
self.socket_remote_fqdn = socket_remote_fqdn
self.socket_local_ip = socket_local_ip
self.socket_local_port = socket_local_port
self.socket_local_fqdn = socket_local_fqdn
self.http_version = http_version
self.http_method = http_method
self.http_uri = http_uri
self.http_headers = http_headers
self.http_body = http_body
return
class _Response (object) :
def __init__ (self,
http_version = None, http_code = None, http_status = None,
http_headers = None, http_body = None) :
self.http_version = http_version
self.http_code = http_code
self.http_status = http_status
self.http_headers = http_headers
self.http_body = http_body
return
def _process (_request) :
if _verbose : print >> sys.stderr, "[ ] processing:"
if _verbose : print >> sys.stderr, "[ ] -> method: %s" % (_request.http_method,)
if _verbose : print >> sys.stderr, "[ ] -> uri: %s" % (_request.http_uri,)
_body = "Ok: pid = %d, time = %f" % (os.getpid (), time.time ())
_response = _Response (
http_version = _request.http_version,
http_code = 200,
http_status = "Ok",
http_headers = {
"Content-Length" : str (len (_body)),
"Content-Type" : "text/plain",
},
http_body = _body)
return _response
def _maybe_glitch (_response_, _callback_identifier_, _response_data_, _response_content_type_, _response_content_encoding_) :
global _glitch_probability_
if random.random () > _glitch_probability_ :
sys.stderr.write ('.')
return None
sys.stderr.write ('!')
_response_data = None
_response_content_type = None
_response_content_encoding = None
_response_headers_data = None
_response_headers_size = None
_response_body_data = None
_response_body_size = None
_response_headers = {
"version" : 1,
"callback-identifier" : _callback_identifier_,
"http-version" : _response_.http_version,
"http-code" : _response_.http_code,
"http-status" : _response_.http_status,
"http-headers" : _response_.http_headers,
"http-body" : "following"}
_response_body = None
if not hasattr (_maybe_glitch, "_glitches") :
_glitches = [
('content-type/none', 0.1), ('content-type/random', 0.1), ('content-type/garbage', 0.1),
('content-encoding/none', 0.1), ('content-encoding/random', 0.1), ('content-encoding/garbage', 0.1),
('response-headers/version', 0.1), ('response-headers/callback-identifier', 0.1),
('response-headers/http-version', 0.1), ('response-headers/http-code', 0.0), ('response-headers/http-status', 0.1),
('response-headers/http-headers', 0.1), ('response-headers/http-body', 0.1), ('response-headers/http-body-content', 0.1),
('response-body/none', 0.01), ('response-body/random', 0.01), ('response-body/garbage', 0.01),
('response-data/none', 0.01), ('response-data/random', 0.01), ('response-data/garbage', 0.01),
('response-headers-data/none', 0.01), ('response-headers-data/random', 0.01), ('response-headers-data/garbage', 0.01),
('response-headers-data/size', 0.01), ('response-body-data/size', 0.01)]
_sum = 0.0
for _glitch_identifier, _glitch_probability in _glitches :
_sum += _glitch_probability
for _glitch_index in xrange (len (_glitches)) :
_glitches[_glitch_index] = (_glitches[_glitch_index][0], _glitches[_glitch_index][1] / _sum)
_maybe_glitch._glitches = _glitches
else :
_glitches = _maybe_glitch._glitches
while True :
_glitch = None
_glitch_remaining_probability = 1.0
for _glitch_identifier, _glitch_probability in _glitches :
if random.random () <= (_glitch_probability / _glitch_remaining_probability) :
_glitch = _glitch_identifier
break
_glitch_remaining_probability -= _glitch_probability
assert _glitch is not None
if _glitch == 'content-type/none' :
if _response_content_type is not None :
continue
_response_content_type = ''
elif _glitch == 'content-type/random' :
if _response_content_type is not None :
continue
_response_content_type = _generate_printable_string (1, 64)
elif _glitch == 'content-type/garbage' :
if _response_content_type is not None :
continue
_response_content_type = _generate_garbage_string (1, 64)
elif _glitch == 'content-encoding/none' :
if _response_content_encoding is not None :
continue
_response_content_encoding = ''
elif _glitch == 'content-encoding/random' :
if _response_content_encoding is not None :
continue
_response_content_encoding = _generate_printable_string (1, 64)
elif _glitch == 'content-encoding/garbage' :
if _response_content_encoding is not None :
continue
_response_content_encoding = _generate_garbage_string (1, 64)
elif _glitch == 'response-data/none' :
if _response_data is not None :
continue
_response_data = ''
elif _glitch == 'response-data/random' :
if _response_data is not None :
continue
_response_data = _generate_printable_string (1, 128)
elif _glitch == 'response-data/garbage' :
if _response_data is not None :
continue
_response_data = _generate_garbage_string (1, 128)
elif _glitch == 'response-headers-data/none' :
if _response_headers_data is not None :
continue
_response_headers_data = ''
elif _glitch == 'response-headers-data/random' :
if _response_headers_data is not None :
continue
_response_headers_data = _generate_printable_string (1, 128)
elif _glitch == 'response-headers-data/garbage' :
if _response_headers_data is not None :
continue
_response_headers_data = _generate_garbage_string (1, 128)
elif _glitch == 'response-headers-data/size' :
if _response_headers_size is not None :
continue
_response_headers_size = random.randint (0, 1 << 32 - 1)
elif _glitch == 'response-body-data/size' :
if _response_headers_size is not None :
continue
_response_body_size = random.randint (0, 1 << 32 - 1)
elif _glitch == 'response-headers/version' :
_response_headers['version'] = _generate_random_json ()
elif _glitch == 'response-headers/callback-identifier' :
_response_headers['callback-identifier'] = _generate_random_json ()
elif _glitch == 'response-headers/http-version' :
_response_headers['http-version'] = _generate_random_json ()
elif _glitch == 'response-headers/http-code' :
_response_headers['http-code'] = _generate_random_json ()
elif _glitch == 'response-headers/http-status' :
_response_headers['http-status'] = _generate_random_json ()
elif _glitch == 'response-headers/http-headers' :
_response_headers['http-headers'] = _generate_random_json ()
elif _glitch == 'response-headers/http-body' :
_response_headers['http-body'] = _generate_random_json ()
elif _glitch == 'response-headers/http-body-content' :
_response_headers['http-body-content'] = _generate_random_json ()
elif _glitch == 'response-body/none' :
if _response_body is not None :
continue
_response_body = ''
elif _glitch == 'response-body/random' :
if _response_body is not None :
continue
_response_body = _generate_printable_string (1, 128)
elif _glitch == 'response-body/garbage' :
if _response_body is not None :
continue
_response_body = _generate_garbage_string (1, 128)
else :
print >> sys.stderr, '[ee] unknown glitch: ' + _glitch
if _response_data is not None :
break
if random.random > 0.2 :
break
if _response_data is None :
if _response_headers_data is None :
_response_headers_data = json.dumps (_response_headers, False, True, False, True, None, None, None, 'utf-8')
if _response_headers_size is None :
_response_headers_size = len (_response_headers_data)
_response_headers_data = struct.pack (">L", _response_headers_size) + _response_headers_data
if _response_body_data is None :
if _response_body is None :
_response_body = _response_.http_body
_response_body_data = _response_body
if _response_body_size is None :
_response_body_size = len (_response_body_data)
_response_body_data = struct.pack (">L", _response_body_size) + _response_body_data
_response_data = _response_headers_data + _response_body_data
if _response_content_type is None :
_response_content_type = _response_content_type_
if _response_content_encoding is None :
_response_content_encoding = _response_content_encoding_
return _response_data, _response_content_type, _response_content_encoding
def _generate_printable_string (_min_length, _max_length) :
return ''.join ([chr (random.randint (32, 127)) for i in xrange (random.randint (_min_length, _max_length))])
def _generate_garbage_string (_min_length, _max_length) :
return ''.join ([chr (random.randint (0, 255)) for i in xrange (random.randint (_min_length, _max_length))])
def _generate_random_json (_depth_probability = 1.0) :
if random.random () < _depth_probability :
_choice = random.randint (0, 5)
else :
_choice = random.randint (0, 3)
if _choice == 0 :
return _generate_printable_string (1, 32)
elif _choice == 1 :
return random.randint (-1 << 31, 1 << 31 - 1)
elif _choice == 2 :
return random.random () * random.randint (-1 << 31, 1 << 31 - 1)
elif _choice == 3 :
return random.choice ([True, False, None])
elif _choice == 4 :
return [_generate_random_json (_depth_probability * 0.01) for i in xrange (0, 128)]
elif _choice == 5 :
_dict = {}
for i in xrange (0, 128) :
_dict[_generate_printable_string (1, 32)] = _generate_random_json (_depth_probability * 0.01)
return _dict
else :
assert False
return None
if __name__ == '__main__' :
assert len (sys.argv) == 1
_loop ()
| apache-2.0 | -3,355,641,044,008,359,000 | 34.895833 | 126 | 0.653415 | false |
doctori/PythonTDD | lists/tests/test_forms.py | 1 | 1907 | from django.test import TestCase
from lists.forms import (
DUPLICATE_ITEM_ERROR,EMPTY_ITEM_ERROR,
ExistingListItemForm,ItemForm
)
from lists.models import Item, List
class ItemFormTest(TestCase):
def test_form_renders_item_text_input(self):
form = ItemForm()
self.assertIn('placeholder="Enter a to-do item"', form.as_p())
self.assertIn('class="form-control input-lg"', form.as_p())
def test_form_validation_for_blank_items(self):
form = ItemForm(data={'text':''})
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['text'],
[EMPTY_ITEM_ERROR]
)
def test_form_save_handles_saving_item_to_a_list(self):
list_ = List.objects.create()
form = ItemForm(data={'text':'save me'})
new_item = form.save(for_list=list_)
self.assertEqual(new_item, Item.objects.first())
self.assertEqual(new_item.text, 'save me')
self.assertEqual(new_item.list, list_)
class ExistingListItemFormTest(TestCase):
def test_form_renders_item_text_input(self):
list_ = List.objects.create()
form = ExistingListItemForm(for_list=list_)
self.assertIn('placeholder="Enter a to-do item"', form.as_p())
def test_form_save(self):
list_ = List.objects.create()
form = ExistingListItemForm(for_list=list_, data={'text':'save me!'})
new_item = form.save()
self.assertEqual(new_item, Item.objects.all()[0])
def test_form_validation_for_blank_items(self):
list_ = List.objects.create()
form = ExistingListItemForm(for_list=list_,data={'text':''})
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['text'],
[EMPTY_ITEM_ERROR]
)
def test_form_validation_for_duplicate_items(self):
list_ = List.objects.create()
Item.objects.create(list=list_,text='Am I Unique ?')
form = ExistingListItemForm(for_list=list_,data={'text':'Am I Unique ?'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],[DUPLICATE_ITEM_ERROR])
| gpl-2.0 | -2,520,031,414,164,869,600 | 33.053571 | 75 | 0.706869 | false |
pferreir/indico-backup | indico/MaKaC/plugins/Collaboration/ravem.py | 1 | 3751 | # -*- coding: utf-8 -*-
##
## $id$
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from MaKaC.plugins.Collaboration.collaborationTools import CollaborationTools
from indico.core.logger import Logger
from requests.auth import HTTPDigestAuth
import requests
from urllib import urlencode
class RavemClient(object):
""" Singleton for the client for RAVEM API
"""
_instance = None
def __init__(self, username, password, url):
self._username = username
self._password = password
self._url = url
def performOperation(self, operation):
data = requests.get(self._url + operation, auth=HTTPDigestAuth(self._username, self._password), verify=False)
return data
@classmethod
def getInstance(cls, ravem_api_url=None, username=None, password=None):
if cls._instance is None or (ravem_api_url is not None or username is not None or password is not None):
if ravem_api_url is None:
ravem_api_url = CollaborationTools.getCollaborationOptionValue('ravemAPIURL')
if username is None:
username = CollaborationTools.getCollaborationOptionValue('ravemUsername')
if password is None:
password = CollaborationTools.getCollaborationOptionValue('ravemPassword')
try:
cls._instance = RavemClient(username, password, ravem_api_url)
except Exception:
Logger.get("Ravem").exception("Problem building RavemClient")
raise
return cls._instance
class RavemApi(object):
""" This class performs low-level operations by getting the corresponding
client and calling a service.
"""
@classmethod
def _api_operation(cls, service, *args, **kwargs):
try:
url = "/%s?%s" % (service, urlencode(kwargs))
ravemClient = RavemClient.getInstance()
return ravemClient.performOperation(url)
except Exception, e:
Logger.get('Ravem').exception("""Ravem API's '%s' operation not successfull: %s""" % (service, e.message))
raise
@classmethod
def isLegacyEndpointConnected(cls, room_ip):
return cls._api_operation("getstatus", where="vc_endpoint_legacy_ip", value=room_ip)
@classmethod
def isVidyoPanoramaConnected(cls, vidyo_panorama_id):
return cls._api_operation("getstatus", where="vc_endpoint_vidyo_username", value=vidyo_panorama_id)
@classmethod
def disconnectLegacyEndpoint(cls, room_ip, service_type, room_name):
return cls._api_operation("videoconference/disconnect", type=service_type, where="vc_endpoint_legacy_ip",
value=room_ip, vidyo_room_name=room_name)
@classmethod
def disconnectVidyoPanorama(cls, vidyo_panorama_id, service_type, room_name):
return cls._api_operation("videoconference/disconnect", type=service_type, where="vc_endpoint_vidyo_username",
value=vidyo_panorama_id, vidyo_room_name=room_name)
| gpl-3.0 | 205,378,133,880,764,830 | 39.333333 | 118 | 0.672354 | false |
helixyte/TheLMA | thelma/repositories/rdb/schema/tables/samplemolecule.py | 1 | 1342 | """
This file is part of the TheLMA (THe Laboratory Management Application) project.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
Sample molecule table.
"""
from sqlalchemy import CheckConstraint
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Float
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import Table
__docformat__ = 'reStructuredText en'
__all__ = ['create_table']
def create_table(metadata, sample_tbl, molecule_tbl):
"Table factory."
tbl = Table('sample_molecule', metadata,
Column('sample_id', Integer,
ForeignKey(sample_tbl.c.sample_id,
onupdate='CASCADE', ondelete='CASCADE'),
primary_key=True, index=True),
Column('molecule_id', Integer,
ForeignKey(molecule_tbl.c.molecule_id,
onupdate='CASCADE', ondelete='RESTRICT'),
primary_key=True, index=True),
Column('concentration', Float, CheckConstraint('concentration>=0.0')),
Column('freeze_thaw_cycles', Integer,
CheckConstraint('freeze_thaw_cycles IS NULL OR '
'freeze_thaw_cycles >= 0')),
Column('checkout_date', DateTime(timezone=True)),
)
return tbl
| mit | -2,361,379,783,342,005,000 | 35.27027 | 80 | 0.646796 | false |
avedaee/DIRAC | Core/Utilities/InstallTools.py | 1 | 84293 | ########################################################################
# $HeadURL$
# File : InstallTools.py
# Author : Ricardo Graciani
########################################################################
"""
Collection of Tools for installation of DIRAC components: MySQL, DB's, Services's, Agents
It only makes use of defaults in LocalInstallation Section in dirac.cfg
The Following Options are used:
/DIRAC/Setup: Setup to be used for any operation
/LocalInstallation/InstanceName: Name of the Instance for the current Setup (default /DIRAC/Setup)
/LocalInstallation/LogLevel: LogLevel set in "run" script for all components installed
/LocalInstallation/RootPath: Used instead of rootPath in "run" script if defined (if links are used to named versions)
/LocalInstallation/InstancePath: Location where runit and startup directories are created (default rootPath)
/LocalInstallation/UseVersionsDir: DIRAC is installed under versions/<Versioned Directory> with a link from pro
(This option overwrites RootPath and InstancePath)
/LocalInstallation/Host: Used when build the URL to be published for the installed service (default: socket.getfqdn())
/LocalInstallation/RunitDir: Location where runit directory is created (default InstancePath/runit)
/LocalInstallation/StartupDir: Location where startup directory is created (default InstancePath/startup)
/LocalInstallation/MySQLDir: Location where mysql databases are created (default InstancePath/mysql)
/LocalInstallation/Database/User: (default Dirac)
/LocalInstallation/Database/Password: (must be set for SystemAdministrator Service to work)
/LocalInstallation/Database/RootPwd: (must be set for SystemAdministrator Service to work)
/LocalInstallation/Database/Host: (must be set for SystemAdministrator Service to work)
/LocalInstallation/Database/MySQLSmallMem: Configure a MySQL with small memory requirements for testing purposes innodb_buffer_pool_size=200MB
/LocalInstallation/Database/MySQLLargeMem: Configure a MySQL with high memory requirements for production purposes innodb_buffer_pool_size=10000MB
The setupSite method (used by the dirac-setup-site command) will use the following info:
/LocalInstallation/Systems: List of Systems to be defined for this instance in the CS (default: Configuration, Framework)
/LocalInstallation/Databases: List of Databases to be installed and configured
/LocalInstallation/Services: List of System/ServiceName to be setup
/LocalInstallation/Agents: List of System/AgentName to be setup
/LocalInstallation/WebPortal: Boolean to setup the Web Portal (default no)
/LocalInstallation/ConfigurationMaster: Boolean, requires Configuration/Server to be given in the list of Services (default: no)
/LocalInstallation/PrivateConfiguration: Boolean, requires Configuration/Server to be given in the list of Services (default: no)
If a Master Configuration Server is being installed the following Options can be used:
/LocalInstallation/ConfigurationName: Name of the Configuration (default: Setup )
/LocalInstallation/AdminUserName: Name of the Admin user (default: None )
/LocalInstallation/AdminUserDN: DN of the Admin user certificate (default: None )
/LocalInstallation/AdminUserEmail: Email of the Admin user (default: None )
/LocalInstallation/AdminGroupName: Name of the Admin group (default: dirac_admin )
/LocalInstallation/HostDN: DN of the host certificate (default: None )
/LocalInstallation/VirtualOrganization: Name of the main Virtual Organization (default: None)
"""
__RCSID__ = "$Id$"
#
import os, re, glob, stat, time, shutil, socket
gDefaultPerms = stat.S_IWUSR | stat.S_IRUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH
import DIRAC
from DIRAC import rootPath
from DIRAC import gLogger
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Utilities.CFG import CFG
from DIRAC.Core.Utilities.Version import getVersion
from DIRAC.Core.Utilities.Subprocess import systemCall
from DIRAC.ConfigurationSystem.Client.CSAPI import CSAPI
from DIRAC.ConfigurationSystem.Client.Helpers import cfgPath, cfgPathToList, cfgInstallPath, \
cfgInstallSection, ResourcesDefaults, CSGlobals
from DIRAC.Core.Security.Properties import ALARMS_MANAGEMENT, SERVICE_ADMINISTRATOR, \
CS_ADMINISTRATOR, JOB_ADMINISTRATOR, \
FULL_DELEGATION, PROXY_MANAGEMENT, OPERATOR, \
NORMAL_USER, TRUSTED_HOST
from DIRAC.ConfigurationSystem.Client import PathFinder
from DIRAC.Core.Base.private.ModuleLoader import ModuleLoader
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.Base.ExecutorModule import ExecutorModule
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.Core.Utilities.PrettyPrint import printTable
from DIRAC.Core.Utilities.Platform import getPlatformString
# On command line tools this can be set to True to abort after the first error.
exitOnError = False
# First some global defaults
gLogger.debug( 'DIRAC Root Path =', rootPath )
def loadDiracCfg( verbose = False ):
"""
Read again defaults from dirac.cfg
"""
global localCfg, cfgFile, setup, instance, logLevel, linkedRootPath, host
global basePath, instancePath, runitDir, startDir
global db, mysqlDir, mysqlDbDir, mysqlLogDir, mysqlMyOrg, mysqlMyCnf, mysqlStartupScript
global mysqlRootPwd, mysqlUser, mysqlPassword, mysqlHost, mysqlMode
global mysqlSmallMem, mysqlLargeMem, mysqlPort, mysqlRootUser
from DIRAC.Core.Utilities.Network import getFQDN
localCfg = CFG()
cfgFile = os.path.join( rootPath, 'etc', 'dirac.cfg' )
try:
localCfg.loadFromFile( cfgFile )
except Exception:
gLogger.always( "Can't load ", cfgFile )
gLogger.always( "Might be OK if setting up the site" )
setup = localCfg.getOption( cfgPath( 'DIRAC', 'Setup' ), '' )
instance = localCfg.getOption( cfgInstallPath( 'InstanceName' ), setup )
logLevel = localCfg.getOption( cfgInstallPath( 'LogLevel' ), 'INFO' )
linkedRootPath = localCfg.getOption( cfgInstallPath( 'RootPath' ), rootPath )
useVersionsDir = localCfg.getOption( cfgInstallPath( 'UseVersionsDir' ), False )
host = localCfg.getOption( cfgInstallPath( 'Host' ), getFQDN() )
basePath = os.path.dirname( rootPath )
instancePath = localCfg.getOption( cfgInstallPath( 'InstancePath' ), rootPath )
if useVersionsDir:
# This option takes precedence
instancePath = os.path.dirname( os.path.dirname( rootPath ) )
linkedRootPath = os.path.join( instancePath, 'pro' )
if verbose:
gLogger.notice( 'Using Instance Base Dir at', instancePath )
runitDir = os.path.join( instancePath, 'runit' )
runitDir = localCfg.getOption( cfgInstallPath( 'RunitDir' ), runitDir )
if verbose:
gLogger.notice( 'Using Runit Dir at', runitDir )
startDir = os.path.join( instancePath, 'startup' )
startDir = localCfg.getOption( cfgInstallPath( 'StartupDir' ), startDir )
if verbose:
gLogger.notice( 'Using Startup Dir at', startDir )
# Now some MySQL default values
db = {}
mysqlDir = os.path.join( instancePath, 'mysql' )
mysqlDir = localCfg.getOption( cfgInstallPath( 'MySQLDir' ), mysqlDir )
if verbose:
gLogger.notice( 'Using MySQL Dir at', mysqlDir )
mysqlDbDir = os.path.join( mysqlDir, 'db' )
mysqlLogDir = os.path.join( mysqlDir, 'log' )
mysqlMyOrg = os.path.join( rootPath, 'mysql', 'etc', 'my.cnf' )
mysqlMyCnf = os.path.join( mysqlDir, '.my.cnf' )
mysqlStartupScript = os.path.join( rootPath, 'mysql', 'share', 'mysql', 'mysql.server' )
mysqlRootPwd = localCfg.getOption( cfgInstallPath( 'Database', 'RootPwd' ), mysqlRootPwd )
if verbose and mysqlRootPwd:
gLogger.notice( 'Reading Root MySQL Password from local configuration' )
mysqlUser = localCfg.getOption( cfgInstallPath( 'Database', 'User' ), '' )
if mysqlUser:
if verbose:
gLogger.notice( 'Reading MySQL User from local configuration' )
else:
mysqlUser = 'Dirac'
mysqlPassword = localCfg.getOption( cfgInstallPath( 'Database', 'Password' ), mysqlPassword )
if verbose and mysqlPassword:
gLogger.notice( 'Reading %s MySQL Password from local configuration ' % mysqlUser )
mysqlHost = localCfg.getOption( cfgInstallPath( 'Database', 'Host' ), '' )
if mysqlHost:
if verbose:
gLogger.notice( 'Using MySQL Host from local configuration', mysqlHost )
else:
# if it is not defined use the same as for dirac services
mysqlHost = host
mysqlPort = localCfg.getOption( cfgInstallPath( 'Database', 'Port' ), 0 )
if mysqlPort:
if verbose:
gLogger.notice( 'Using MySQL Port from local configuration ', mysqlPort )
else:
# if it is not defined use the same as for dirac services
mysqlPort = 3306
mysqlRootUser = localCfg.getOption( cfgInstallPath( 'Database', 'RootUser' ), '' )
if mysqlRootUser:
if verbose:
gLogger.notice( 'Using MySQL root user from local configuration ', mysqlRootUser )
else:
# if it is not defined use root
mysqlRootUser = 'root'
mysqlMode = localCfg.getOption( cfgInstallPath( 'Database', 'MySQLMode' ), '' )
if verbose and mysqlMode:
gLogger.notice( 'Configuring MySQL server as %s' % mysqlMode )
mysqlSmallMem = localCfg.getOption( cfgInstallPath( 'Database', 'MySQLSmallMem' ), False )
if verbose and mysqlSmallMem:
gLogger.notice( 'Configuring MySQL server for Low Memory uasge' )
mysqlLargeMem = localCfg.getOption( cfgInstallPath( 'Database', 'MySQLLargeMem' ), False )
if verbose and mysqlLargeMem:
gLogger.notice( 'Configuring MySQL server for Large Memory uasge' )
# FIXME: we probably need a better way to do this
mysqlRootPwd = ''
mysqlPassword = ''
mysqlMode = ''
localCfg = None
cfgFile = ''
setup = ''
instance = ''
logLevel = ''
linkedRootPath = ''
host = ''
basePath = ''
instancePath = ''
runitDir = ''
startDir = ''
db = {}
mysqlDir = ''
mysqlDbDir = ''
mysqlLogDir = ''
mysqlMyOrg = ''
mysqlMyCnf = ''
mysqlStartupScript = ''
mysqlUser = ''
mysqlHost = ''
mysqlPort = ''
mysqlRootUser = ''
mysqlSmallMem = ''
mysqlLargeMem = ''
loadDiracCfg()
def getInfo( extensions ):
result = getVersion()
if not result['OK']:
return result
rDict = result['Value']
if setup:
rDict['Setup'] = setup
else:
rDict['Setup'] = 'Unknown'
return S_OK( rDict )
def getExtensions():
"""
Get the list of installed extensions
"""
initList = glob.glob( os.path.join( rootPath, '*DIRAC', '__init__.py' ) )
extensions = [ os.path.basename( os.path.dirname( k ) ) for k in initList]
try:
extensions.remove( 'DIRAC' )
except Exception:
error = 'DIRAC is not properly installed'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return S_OK( extensions )
def _addCfgToDiracCfg( cfg, verbose = False ):
"""
Merge cfg into existing dirac.cfg file
"""
global localCfg
if str( localCfg ):
newCfg = localCfg.mergeWith( cfg )
else:
newCfg = cfg
result = newCfg.writeToFile( cfgFile )
if not result:
return result
loadDiracCfg( verbose )
return result
def _addCfgToCS( cfg ):
"""
Merge cfg into central CS
"""
cfgClient = CSAPI()
result = cfgClient.downloadCSData()
if not result['OK']:
return result
result = cfgClient.mergeFromCFG( cfg )
if not result['OK']:
return result
result = cfgClient.commit()
return result
def _addCfgToLocalCS( cfg ):
"""
Merge cfg into local CS
"""
csName = localCfg.getOption( cfgPath( 'DIRAC', 'Configuration', 'Name' ) , '' )
if not csName:
error = 'Missing %s' % cfgPath( 'DIRAC', 'Configuration', 'Name' )
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
csCfg = CFG()
csFile = os.path.join( rootPath, 'etc', '%s.cfg' % csName )
if os.path.exists( csFile ):
csCfg.loadFromFile( csFile )
if str( csCfg ):
newCfg = csCfg.mergeWith( cfg )
else:
newCfg = cfg
return newCfg.writeToFile( csFile )
def _getCentralCfg( installCfg ):
"""
Create the skeleton of central Cfg for an initial Master CS
"""
# First copy over from installation cfg
centralCfg = CFG()
# DIRAC/Extensions
extensions = localCfg.getOption( cfgInstallPath( 'Extensions' ), [] )
while 'Web' in list( extensions ):
extensions.remove( 'Web' )
centralCfg.createNewSection( 'DIRAC', '' )
if extensions:
centralCfg['DIRAC'].addKey( 'Extensions', ','.join( extensions ), '' )
vo = localCfg.getOption( cfgInstallPath( 'VirtualOrganization' ), '' )
if vo:
centralCfg['DIRAC'].addKey( 'VirtualOrganization', vo, '' )
for section in [ 'Systems', 'Resources',
'Resources/Sites', 'Resources/Domains',
'Operations', 'Website', 'Registry' ]:
if installCfg.isSection( section ):
centralCfg.createNewSection( section, contents = installCfg[section] )
# Now try to add things from the Installation section
# Registry
adminUserName = localCfg.getOption( cfgInstallPath( 'AdminUserName' ), '' )
adminUserDN = localCfg.getOption( cfgInstallPath( 'AdminUserDN' ), '' )
adminUserEmail = localCfg.getOption( cfgInstallPath( 'AdminUserEmail' ), '' )
adminGroupName = localCfg.getOption( cfgInstallPath( 'AdminGroupName' ), 'dirac_admin' )
hostDN = localCfg.getOption( cfgInstallPath( 'HostDN' ), '' )
defaultGroupName = 'user'
adminGroupProperties = [ ALARMS_MANAGEMENT, SERVICE_ADMINISTRATOR,
CS_ADMINISTRATOR, JOB_ADMINISTRATOR,
FULL_DELEGATION, PROXY_MANAGEMENT, OPERATOR ]
defaultGroupProperties = [ NORMAL_USER ]
defaultHostProperties = [ TRUSTED_HOST, CS_ADMINISTRATOR,
JOB_ADMINISTRATOR, FULL_DELEGATION,
PROXY_MANAGEMENT, OPERATOR ]
for section in ( cfgPath( 'Registry' ),
cfgPath( 'Registry', 'Users' ),
cfgPath( 'Registry', 'Groups' ),
cfgPath( 'Registry', 'Hosts' ) ):
if not centralCfg.isSection( section ):
centralCfg.createNewSection( section )
if adminUserName:
if not ( adminUserDN and adminUserEmail ):
gLogger.error( 'AdminUserName is given but DN or Mail is missing it will not be configured' )
else:
for section in [ cfgPath( 'Registry', 'Users', adminUserName ),
cfgPath( 'Registry', 'Groups', defaultGroupName ),
cfgPath( 'Registry', 'Groups', adminGroupName ) ]:
if not centralCfg.isSection( section ):
centralCfg.createNewSection( section )
if centralCfg['Registry'].existsKey( 'DefaultGroup' ):
centralCfg['Registry'].deleteKey( 'DefaultGroup' )
centralCfg['Registry'].addKey( 'DefaultGroup', defaultGroupName, '' )
if centralCfg['Registry']['Users'][adminUserName].existsKey( 'DN' ):
centralCfg['Registry']['Users'][adminUserName].deleteKey( 'DN' )
centralCfg['Registry']['Users'][adminUserName].addKey( 'DN', adminUserDN, '' )
if centralCfg['Registry']['Users'][adminUserName].existsKey( 'Email' ):
centralCfg['Registry']['Users'][adminUserName].deleteKey( 'Email' )
centralCfg['Registry']['Users'][adminUserName].addKey( 'Email' , adminUserEmail, '' )
# Add Admin User to Admin Group and default group
for group in [adminGroupName, defaultGroupName]:
if not centralCfg['Registry']['Groups'][group].isOption( 'Users' ):
centralCfg['Registry']['Groups'][group].addKey( 'Users', '', '' )
users = centralCfg['Registry']['Groups'][group].getOption( 'Users', [] )
if adminUserName not in users:
centralCfg['Registry']['Groups'][group].appendToOption( 'Users', ', %s' % adminUserName )
if not centralCfg['Registry']['Groups'][group].isOption( 'Properties' ):
centralCfg['Registry']['Groups'][group].addKey( 'Properties', '', '' )
properties = centralCfg['Registry']['Groups'][adminGroupName].getOption( 'Properties', [] )
for prop in adminGroupProperties:
if prop not in properties:
properties.append( prop )
centralCfg['Registry']['Groups'][adminGroupName].appendToOption( 'Properties', ', %s' % prop )
properties = centralCfg['Registry']['Groups'][defaultGroupName].getOption( 'Properties', [] )
for prop in defaultGroupProperties:
if prop not in properties:
properties.append( prop )
centralCfg['Registry']['Groups'][defaultGroupName].appendToOption( 'Properties', ', %s' % prop )
# Add the master Host description
if hostDN:
hostSection = cfgPath( 'Registry', 'Hosts', host )
if not centralCfg.isSection( hostSection ):
centralCfg.createNewSection( hostSection )
if centralCfg['Registry']['Hosts'][host].existsKey( 'DN' ):
centralCfg['Registry']['Hosts'][host].deleteKey( 'DN' )
centralCfg['Registry']['Hosts'][host].addKey( 'DN', hostDN, '' )
if not centralCfg['Registry']['Hosts'][host].isOption( 'Properties' ):
centralCfg['Registry']['Hosts'][host].addKey( 'Properties', '', '' )
properties = centralCfg['Registry']['Hosts'][host].getOption( 'Properties', [] )
for prop in defaultHostProperties:
if prop not in properties:
properties.append( prop )
centralCfg['Registry']['Hosts'][host].appendToOption( 'Properties', ', %s' % prop )
# Operations
if adminUserEmail:
operationsCfg = __getCfg( cfgPath( 'Operations', 'Defaults', 'EMail' ), 'Production', adminUserEmail )
centralCfg = centralCfg.mergeWith( operationsCfg )
operationsCfg = __getCfg( cfgPath( 'Operations', 'Defaults', 'EMail' ), 'Logging', adminUserEmail )
centralCfg = centralCfg.mergeWith( operationsCfg )
# Website
websiteCfg = __getCfg( cfgPath( 'Website', 'Authorization',
'systems', 'configuration' ), 'Default', 'all' )
websiteCfg['Website'].addKey( 'DefaultGroups',
', '.join( ['visitor', defaultGroupName, adminGroupName] ), '' )
websiteCfg['Website'].addKey( 'DefaultSetup', setup, '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'showHistory' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'commitConfiguration' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'showCurrentDiff' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'showDiff' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'rollbackToVersion' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'manageRemoteConfig' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].appendToOption( 'manageRemoteConfig' ,
', ServiceAdministrator' )
centralCfg = centralCfg.mergeWith( websiteCfg )
return centralCfg
def __getCfg( section, option = '', value = '' ):
"""
Create a new Cfg with given info
"""
if not section:
return None
cfg = CFG()
sectionList = []
for sect in cfgPathToList( section ):
if not sect:
continue
sectionList.append( sect )
cfg.createNewSection( cfgPath( *sectionList ) )
if not sectionList:
return None
if option and value:
sectionList.append( option )
cfg.setOption( cfgPath( *sectionList ), value )
return cfg
def addOptionToDiracCfg( option, value ):
"""
Add Option to dirac.cfg
"""
optionList = cfgPathToList( option )
optionName = optionList[-1]
section = cfgPath( *optionList[:-1] )
cfg = __getCfg( section, optionName, value )
if not cfg:
return S_ERROR( 'Wrong option: %s = %s' % ( option, value ) )
if _addCfgToDiracCfg( cfg ):
return S_OK()
return S_ERROR( 'Could not merge %s=%s with local configuration' % ( option, value ) )
def addDefaultOptionsToCS( gConfig, componentType, systemName,
component, extensions, mySetup = setup,
specialOptions = {}, overwrite = False,
addDefaultOptions = True ):
""" Add the section with the component options to the CS
"""
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', mySetup, system )
if gConfig:
compInstance = gConfig.getValue( instanceOption, '' )
else:
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
sectionName = "Agents"
if componentType == 'service':
sectionName = "Services"
elif componentType == 'executor':
sectionName = "Executors"
# Check if the component CS options exist
addOptions = True
componentSection = cfgPath( 'Systems', system, compInstance, sectionName, component )
if not overwrite:
if gConfig:
result = gConfig.getOptions( componentSection )
if result['OK']:
addOptions = False
if not addOptions:
return S_OK( 'Component options already exist' )
# Add the component options now
result = getComponentCfg( componentType, system, component, compInstance, extensions, specialOptions, addDefaultOptions )
if not result['OK']:
return result
compCfg = result['Value']
gLogger.notice( 'Adding to CS', '%s %s/%s' % ( componentType, system, component ) )
resultAddToCFG = _addCfgToCS( compCfg )
if componentType == 'executor':
# Is it a container ?
execList = compCfg.getOption( '%s/Load' % componentSection, [] )
for element in execList:
result = addDefaultOptionsToCS( gConfig, componentType, systemName, element, extensions, setup,
{}, overwrite )
resultAddToCFG.setdefault( 'Modules', {} )
resultAddToCFG['Modules'][element] = result['OK']
return resultAddToCFG
def addDefaultOptionsToComponentCfg( componentType, systemName, component, extensions ):
"""
Add default component options local component cfg
"""
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', setup, system )
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
# Add the component options now
result = getComponentCfg( componentType, system, component, compInstance, extensions )
if not result['OK']:
return result
compCfg = result['Value']
compCfgFile = os.path.join( rootPath, 'etc', '%s_%s.cfg' % ( system, component ) )
return compCfg.writeToFile( compCfgFile )
def addCfgToComponentCfg( componentType, systemName, component, cfg ):
"""
Add some extra configuration to the local component cfg
"""
sectionName = 'Services'
if componentType == 'agent':
sectionName = 'Agents'
if not cfg:
return S_OK()
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', setup, system )
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
compCfgFile = os.path.join( rootPath, 'etc', '%s_%s.cfg' % ( system, component ) )
compCfg = CFG()
if os.path.exists( compCfgFile ):
compCfg.loadFromFile( compCfgFile )
sectionPath = cfgPath( 'Systems', system, compInstance, sectionName )
newCfg = __getCfg( sectionPath )
newCfg.createNewSection( cfgPath( sectionPath, component ), 'Added by InstallTools', cfg )
if newCfg.writeToFile( compCfgFile ):
return S_OK( compCfgFile )
error = 'Can not write %s' % compCfgFile
gLogger.error( error )
return S_ERROR( error )
def getComponentCfg( componentType, system, component, compInstance, extensions,
specialOptions = {}, addDefaultOptions = True ):
"""
Get the CFG object of the component configuration
"""
sectionName = 'Services'
if componentType == 'agent':
sectionName = 'Agents'
if componentType == 'executor':
sectionName = 'Executors'
componentModule = component
if "Module" in specialOptions:
componentModule = specialOptions['Module']
compCfg = CFG()
if addDefaultOptions:
extensionsDIRAC = [ x + 'DIRAC' for x in extensions ] + extensions
for ext in extensionsDIRAC + ['DIRAC']:
cfgTemplatePath = os.path.join( rootPath, ext, '%sSystem' % system, 'ConfigTemplate.cfg' )
if os.path.exists( cfgTemplatePath ):
gLogger.notice( 'Loading configuration template', cfgTemplatePath )
# Look up the component in this template
loadCfg = CFG()
loadCfg.loadFromFile( cfgTemplatePath )
compCfg = loadCfg.mergeWith( compCfg )
compPath = cfgPath( sectionName, componentModule )
if not compCfg.isSection( compPath ):
error = 'Can not find %s in template' % compPath
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
compCfg = compCfg[sectionName][componentModule]
# Delete Dependencies section if any
compCfg.deleteKey( 'Dependencies' )
sectionPath = cfgPath( 'Systems', system, compInstance, sectionName )
cfg = __getCfg( sectionPath )
cfg.createNewSection( cfgPath( sectionPath, component ), '', compCfg )
for option, value in specialOptions.items():
cfg.setOption( cfgPath( sectionPath, component, option ), value )
# Add the service URL
if componentType == "service":
port = compCfg.getOption( 'Port' , 0 )
if port and host:
urlsPath = cfgPath( 'Systems', system, compInstance, 'URLs' )
cfg.createNewSection( urlsPath )
cfg.setOption( cfgPath( urlsPath, component ),
'dips://%s:%d/%s/%s' % ( host, port, system, component ) )
return S_OK( cfg )
def addDatabaseOptionsToCS( gConfig, systemName, dbName, mySetup = setup, overwrite = False ):
"""
Add the section with the database options to the CS
"""
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', mySetup, system )
if gConfig:
compInstance = gConfig.getValue( instanceOption, '' )
else:
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
# Check if the component CS options exist
addOptions = True
if not overwrite:
databasePath = cfgPath( 'Systems', system, compInstance, 'Databases', dbName )
result = gConfig.getOptions( databasePath )
if result['OK']:
addOptions = False
if not addOptions:
return S_OK( 'Database options already exist' )
# Add the component options now
result = getDatabaseCfg( system, dbName, compInstance )
if not result['OK']:
return result
databaseCfg = result['Value']
gLogger.notice( 'Adding to CS', '%s/%s' % ( system, dbName ) )
return _addCfgToCS( databaseCfg )
def getDatabaseCfg( system, dbName, compInstance ):
"""
Get the CFG object of the database configuration
"""
databasePath = cfgPath( 'Systems', system, compInstance, 'Databases', dbName )
cfg = __getCfg( databasePath, 'DBName', dbName )
cfg.setOption( cfgPath( databasePath, 'Host' ), mysqlHost )
return S_OK( cfg )
def addSystemInstance( systemName, compInstance, mySetup = setup, myCfg = False ):
"""
Add a new system instance to dirac.cfg and CS
"""
system = systemName.replace( 'System', '' )
gLogger.notice( 'Adding %s system as %s instance for %s setup to dirac.cfg and CS' % ( system, compInstance, mySetup ) )
cfg = __getCfg( cfgPath( 'DIRAC', 'Setups', mySetup ), system, compInstance )
if myCfg:
if not _addCfgToDiracCfg( cfg ):
return S_ERROR( 'Failed to add system instance to dirac.cfg' )
return _addCfgToCS( cfg )
def printStartupStatus( rDict ):
"""
Print in nice format the return dictionary from getStartupComponentStatus
(also returned by runsvctrlComponent)
"""
fields = ['Name','Runit','Uptime','PID']
records = []
try:
for comp in rDict:
records.append( [comp, rDict[comp]['RunitStatus'], rDict[comp]['Timeup'], rDict[comp]['PID'] ] )
printTable( fields, records )
except Exception, x:
print "Exception while gathering data for printing: %s" % str( x )
return S_OK()
def printOverallStatus( rDict ):
"""
Print in nice format the return dictionary from getOverallStatus
"""
fields = ['System','Name','Type','Setup','Installed','Runit','Uptime','PID']
records = []
try:
for compType in rDict:
for system in rDict[compType]:
for component in rDict[compType][system]:
record = [ system, component, compType.lower()[:-1] ]
if rDict[compType][system][component]['Setup']:
record.append( 'SetUp' )
else:
record.append( 'NotSetUp' )
if rDict[compType][system][component]['Installed']:
record.append( 'Installed' )
else:
record.append( 'NotInstalled' )
record.append( str( rDict[compType][system][component]['RunitStatus'] ) )
record.append( str( rDict[compType][system][component]['Timeup'] ) )
record.append( str( rDict[compType][system][component]['PID'] ) )
records.append( record )
printTable( fields, records )
except Exception, x:
print "Exception while gathering data for printing: %s" % str( x )
return S_OK()
def getAvailableSystems( extensions ):
""" Get the list of all systems (in all given extensions) locally available
"""
systems = []
for extension in extensions:
extensionPath = os.path.join( DIRAC.rootPath, extension, '*System' )
for system in [ os.path.basename( k ).split( 'System' )[0] for k in glob.glob( extensionPath ) ]:
if system not in systems:
systems.append( system )
return systems
def getSoftwareComponents( extensions ):
""" Get the list of all the components ( services and agents ) for which the software
is installed on the system
"""
# The Gateway does not need a handler
services = { 'Framework' : ['Gateway'] }
agents = {}
executors = {}
for extension in ['DIRAC'] + [ x + 'DIRAC' for x in extensions]:
if not os.path.exists( os.path.join( rootPath, extension ) ):
# Not all the extensions are necessarily installed in this instance
continue
systemList = os.listdir( os.path.join( rootPath, extension ) )
for sys in systemList:
system = sys.replace( 'System', '' )
try:
agentDir = os.path.join( rootPath, extension, sys, 'Agent' )
agentList = os.listdir( agentDir )
for agent in agentList:
if agent[-3:] == ".py":
agentFile = os.path.join( agentDir, agent )
afile = open( agentFile, 'r' )
body = afile.read()
afile.close()
if body.find( 'AgentModule' ) != -1 or body.find( 'OptimizerModule' ) != -1:
if not agents.has_key( system ):
agents[system] = []
agents[system].append( agent.replace( '.py', '' ) )
except OSError:
pass
try:
serviceDir = os.path.join( rootPath, extension, sys, 'Service' )
serviceList = os.listdir( serviceDir )
for service in serviceList:
if service.find( 'Handler' ) != -1 and service[-3:] == '.py':
if not services.has_key( system ):
services[system] = []
if system == 'Configuration' and service == 'ConfigurationHandler.py':
service = 'ServerHandler.py'
services[system].append( service.replace( '.py', '' ).replace( 'Handler', '' ) )
except OSError:
pass
try:
executorDir = os.path.join( rootPath, extension, sys, 'Executor' )
executorList = os.listdir( executorDir )
for executor in executorList:
if executor[-3:] == ".py":
executorFile = os.path.join( executorDir, executor )
afile = open( executorFile, 'r' )
body = afile.read()
afile.close()
if body.find( 'OptimizerExecutor' ) != -1:
if not executors.has_key( system ):
executors[system] = []
executors[system].append( executor.replace( '.py', '' ) )
except OSError:
pass
resultDict = {}
resultDict['Services'] = services
resultDict['Agents'] = agents
resultDict['Executors'] = executors
return S_OK( resultDict )
def getInstalledComponents():
"""
Get the list of all the components ( services and agents )
installed on the system in the runit directory
"""
services = {}
agents = {}
executors = {}
systemList = os.listdir( runitDir )
for system in systemList:
systemDir = os.path.join( runitDir, system )
components = os.listdir( systemDir )
for component in components:
try:
runFile = os.path.join( systemDir, component, 'run' )
rfile = open( runFile, 'r' )
body = rfile.read()
rfile.close()
if body.find( 'dirac-service' ) != -1:
if not services.has_key( system ):
services[system] = []
services[system].append( component )
elif body.find( 'dirac-agent' ) != -1:
if not agents.has_key( system ):
agents[system] = []
agents[system].append( component )
elif body.find( 'dirac-executor' ) != -1:
if not executors.has_key( system ):
executors[system] = []
executors[system].append( component )
except IOError:
pass
resultDict = {}
resultDict['Services'] = services
resultDict['Agents'] = agents
resultDict['Executors'] = executors
return S_OK( resultDict )
def getSetupComponents():
""" Get the list of all the components ( services and agents )
set up for running with runsvdir in startup directory
"""
services = {}
agents = {}
executors = {}
if not os.path.isdir( startDir ):
return S_ERROR( 'Startup Directory does not exit: %s' % startDir )
componentList = os.listdir( startDir )
for component in componentList:
try:
runFile = os.path.join( startDir, component, 'run' )
rfile = open( runFile, 'r' )
body = rfile.read()
rfile.close()
if body.find( 'dirac-service' ) != -1:
system, service = component.split( '_' )[0:2]
if not services.has_key( system ):
services[system] = []
services[system].append( service )
elif body.find( 'dirac-agent' ) != -1:
system, agent = component.split( '_' )[0:2]
if not agents.has_key( system ):
agents[system] = []
agents[system].append( agent )
elif body.find( 'dirac-executor' ) != -1:
system, executor = component.split( '_' )[0:2]
if not executors.has_key( system ):
executors[system] = []
executors[system].append( executor )
except IOError:
pass
resultDict = {}
resultDict['Services'] = services
resultDict['Agents'] = agents
resultDict['Executors'] = executors
return S_OK( resultDict )
def getStartupComponentStatus( componentTupleList ):
""" Get the list of all the components ( services and agents )
set up for running with runsvdir in startup directory
"""
try:
if componentTupleList:
cList = []
for componentTuple in componentTupleList:
cList.extend( glob.glob( os.path.join( startDir, '_'.join( componentTuple ) ) ) )
else:
cList = glob.glob( os.path.join( startDir, '*' ) )
except Exception:
error = 'Failed to parse List of Components'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 0, ['runsvstat'] + cList )
if not result['OK']:
return result
output = result['Value'][1].strip().split( '\n' )
componentDict = {}
for line in output:
if not line:
continue
cname, routput = line.split( ':' )
cname = cname.replace( '%s/' % startDir, '' )
run = False
reResult = re.search( '^ run', routput )
if reResult:
run = True
down = False
reResult = re.search( '^ down', routput )
if reResult:
down = True
reResult = re.search( '([0-9]+) seconds', routput )
timeup = 0
if reResult:
timeup = reResult.group( 1 )
reResult = re.search( 'pid ([0-9]+)', routput )
pid = 0
if reResult:
pid = reResult.group( 1 )
runsv = "Not running"
if run or down:
runsv = "Running"
reResult = re.search( 'runsv not running', routput )
if reResult:
runsv = "Not running"
runDict = {}
runDict['Timeup'] = timeup
runDict['PID'] = pid
runDict['RunitStatus'] = "Unknown"
if run:
runDict['RunitStatus'] = "Run"
if down:
runDict['RunitStatus'] = "Down"
if runsv == "Not running":
runDict['RunitStatus'] = "NoRunitControl"
componentDict[cname] = runDict
return S_OK( componentDict )
def getComponentModule( gConfig, system, component, compType ):
""" Get the component software module
"""
setup = CSGlobals.getSetup()
instance = gConfig.getValue( cfgPath( 'DIRAC', 'Setups', setup, system ), '' )
if not instance:
return S_OK( component )
module = gConfig.getValue( cfgPath( 'Systems', system, instance, compType, component, 'Module' ), '' )
if not module:
module = component
return S_OK( module )
def getOverallStatus( extensions ):
""" Get the list of all the components ( services and agents )
set up for running with runsvdir in startup directory
"""
result = getSoftwareComponents( extensions )
if not result['OK']:
return result
softDict = result['Value']
result = getSetupComponents()
if not result['OK']:
return result
setupDict = result['Value']
result = getInstalledComponents()
if not result['OK']:
return result
installedDict = result['Value']
result = getStartupComponentStatus( [] )
if not result['OK']:
return result
runitDict = result['Value']
# Collect the info now
resultDict = {'Services':{}, 'Agents':{}, 'Executors':{} }
for compType in ['Services', 'Agents', 'Executors' ]:
if softDict.has_key( 'Services' ):
for system in softDict[compType]:
resultDict[compType][system] = {}
for component in softDict[compType][system]:
if system == 'Configuration' and component == 'Configuration':
# Fix to avoid missing CS due to different between Service name and Handler name
component = 'Server'
resultDict[compType][system][component] = {}
resultDict[compType][system][component]['Setup'] = False
resultDict[compType][system][component]['Installed'] = False
resultDict[compType][system][component]['RunitStatus'] = 'Unknown'
resultDict[compType][system][component]['Timeup'] = 0
resultDict[compType][system][component]['PID'] = 0
# TODO: why do we need a try here?
try:
if component in setupDict[compType][system]:
resultDict[compType][system][component]['Setup'] = True
except Exception:
pass
try:
if component in installedDict[compType][system]:
resultDict[compType][system][component]['Installed'] = True
except Exception:
pass
try:
compDir = system + '_' + component
if runitDict.has_key( compDir ):
resultDict[compType][system][component]['RunitStatus'] = runitDict[compDir]['RunitStatus']
resultDict[compType][system][component]['Timeup'] = runitDict[compDir]['Timeup']
resultDict[compType][system][component]['PID'] = runitDict[compDir]['PID']
except Exception, x:
#print str(x)
pass
# Installed components can be not the same as in the software list
if installedDict.has_key( 'Services' ):
for system in installedDict[compType]:
for component in installedDict[compType][system]:
if compType in resultDict:
if system in resultDict[compType]:
if component in resultDict[compType][system]:
continue
resultDict[compType][system][component] = {}
resultDict[compType][system][component]['Setup'] = False
resultDict[compType][system][component]['Installed'] = True
resultDict[compType][system][component]['RunitStatus'] = 'Unknown'
resultDict[compType][system][component]['Timeup'] = 0
resultDict[compType][system][component]['PID'] = 0
# TODO: why do we need a try here?
try:
if component in setupDict[compType][system]:
resultDict[compType][system][component]['Setup'] = True
except Exception:
pass
try:
compDir = system + '_' + component
if runitDict.has_key( compDir ):
resultDict[compType][system][component]['RunitStatus'] = runitDict[compDir]['RunitStatus']
resultDict[compType][system][component]['Timeup'] = runitDict[compDir]['Timeup']
resultDict[compType][system][component]['PID'] = runitDict[compDir]['PID']
except Exception, x:
#print str(x)
pass
return S_OK( resultDict )
def checkComponentModule( componentType, system, module ):
""" Check existence of the given module
and if it inherits from the proper class
"""
if componentType == 'agent':
loader = ModuleLoader( "Agent", PathFinder.getAgentSection, AgentModule )
elif componentType == 'service':
loader = ModuleLoader( "Service", PathFinder.getServiceSection,
RequestHandler, moduleSuffix = "Handler" )
elif componentType == 'executor':
loader = ModuleLoader( "Executor", PathFinder.getExecutorSection, ExecutorModule )
else:
return S_ERROR( 'Unknown component type %s' % componentType )
return loader.loadModule( "%s/%s" % ( system, module ) )
def checkComponentSoftware( componentType, system, component, extensions ):
""" Check the component software
"""
result = getSoftwareComponents( extensions )
if not result['OK']:
return result
if componentType == 'service':
softDict = result['Value']['Services']
elif componentType == 'agent':
softDict = result['Value']['Agents']
else:
return S_ERROR( 'Unknown component type %s' % componentType )
if system in softDict and component in softDict[system]:
return S_OK()
return S_ERROR( 'Unknown Component %s/%s' % ( system, component ) )
def runsvctrlComponent( system, component, mode ):
"""
Execute runsvctrl and check status of the specified component
"""
if not mode in ['u', 'd', 'o', 'p', 'c', 'h', 'a', 'i', 'q', '1', '2', 't', 'k', 'x', 'e']:
return S_ERROR( 'Unknown runsvctrl mode "%s"' % mode )
startCompDirs = glob.glob( os.path.join( startDir, '%s_%s' % ( system, component ) ) )
# Make sure that the Configuration server restarts first and the SystemAdmin restarts last
tmpList = list( startCompDirs )
for comp in tmpList:
if "Framework_SystemAdministrator" in comp:
startCompDirs.append( startCompDirs.pop( startCompDirs.index( comp ) ) )
if "Configuration_Server" in comp:
startCompDirs.insert( 0, startCompDirs.pop( startCompDirs.index( comp ) ) )
startCompList = [ [k] for k in startCompDirs]
for startComp in startCompList:
result = execCommand( 0, ['runsvctrl', mode] + startComp )
if not result['OK']:
return result
time.sleep( 1 )
# Check the runsv status
if system == '*' or component == '*':
time.sleep( 5 )
# Final check
result = getStartupComponentStatus( [( system, component )] )
if not result['OK']:
return S_ERROR( 'Failed to start the component' )
return result
def getLogTail( system, component, length = 100 ):
"""
Get the tail of the component log file
"""
retDict = {}
for startCompDir in glob.glob( os.path.join( startDir, '%s_%s' % ( system, component ) ) ):
compName = os.path.basename( startCompDir )
logFileName = os.path.join( startCompDir, 'log', 'current' )
if not os.path.exists( logFileName ):
retDict[compName] = 'No log file found'
else:
logFile = open( logFileName, 'r' )
lines = [ line.strip() for line in logFile.readlines() ]
logFile.close()
if len( lines ) < length:
retDict[compName] = '\n'.join( lines )
else:
retDict[compName] = '\n'.join( lines[-length:] )
return S_OK( retDict )
def setupSite( scriptCfg, cfg = None ):
"""
Setup a new site using the options defined
"""
# First we need to find out what needs to be installed
# by default use dirac.cfg, but if a cfg is given use it and
# merge it into the dirac.cfg
diracCfg = CFG()
installCfg = None
if cfg:
try:
installCfg = CFG()
installCfg.loadFromFile( cfg )
for section in ['DIRAC', 'LocalSite', cfgInstallSection]:
if installCfg.isSection( section ):
diracCfg.createNewSection( section, contents = installCfg[section] )
if instancePath != basePath:
if not diracCfg.isSection( 'LocalSite' ):
diracCfg.createNewSection( 'LocalSite' )
diracCfg.setOption( cfgPath( 'LocalSite', 'InstancePath' ), instancePath )
_addCfgToDiracCfg( diracCfg, verbose = True )
except Exception:
error = 'Failed to load %s' % cfg
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
# Now get the necessary info from localCfg
setupSystems = localCfg.getOption( cfgInstallPath( 'Systems' ), ['Configuration', 'Framework'] )
setupDatabases = localCfg.getOption( cfgInstallPath( 'Databases' ), [] )
setupServices = [ k.split( '/' ) for k in localCfg.getOption( cfgInstallPath( 'Services' ), [] ) ]
setupAgents = [ k.split( '/' ) for k in localCfg.getOption( cfgInstallPath( 'Agents' ), [] ) ]
setupExecutors = [ k.split( '/' ) for k in localCfg.getOption( cfgInstallPath( 'Executors' ), [] ) ]
setupWeb = localCfg.getOption( cfgInstallPath( 'WebPortal' ), False )
setupConfigurationMaster = localCfg.getOption( cfgInstallPath( 'ConfigurationMaster' ), False )
setupPrivateConfiguration = localCfg.getOption( cfgInstallPath( 'PrivateConfiguration' ), False )
setupConfigurationName = localCfg.getOption( cfgInstallPath( 'ConfigurationName' ), setup )
setupAddConfiguration = localCfg.getOption( cfgInstallPath( 'AddConfiguration' ), True )
for serviceTuple in setupServices:
error = ''
if len( serviceTuple ) != 2:
error = 'Wrong service specification: system/service'
# elif serviceTuple[0] not in setupSystems:
# error = 'System %s not available' % serviceTuple[0]
if error:
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
serviceSysInstance = serviceTuple[0]
if not serviceSysInstance in setupSystems:
setupSystems.append( serviceSysInstance )
for agentTuple in setupAgents:
error = ''
if len( agentTuple ) != 2:
error = 'Wrong agent specification: system/agent'
# elif agentTuple[0] not in setupSystems:
# error = 'System %s not available' % agentTuple[0]
if error:
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
agentSysInstance = agentTuple[0]
if not agentSysInstance in setupSystems:
setupSystems.append( agentSysInstance )
for executorTuple in setupExecutors:
error = ''
if len( executorTuple ) != 2:
error = 'Wrong executor specification: system/executor'
if error:
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
executorSysInstance = executorTuple[0]
if not executorSysInstance in setupSystems:
setupSystems.append( executorSysInstance )
# And to find out the available extensions
result = getExtensions()
if not result['OK']:
return result
extensions = [ k.replace( 'DIRAC', '' ) for k in result['Value']]
# Make sure the necessary directories are there
if basePath != instancePath:
if not os.path.exists( instancePath ):
try:
os.makedirs( instancePath )
except Exception:
error = 'Can not create directory for instance %s' % instancePath
if exitOnError:
gLogger.exception( error )
DIRAC.exit( -1 )
return S_ERROR( error )
if not os.path.isdir( instancePath ):
error = 'Instance directory %s is not valid' % instancePath
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
instanceEtcDir = os.path.join( instancePath, 'etc' )
etcDir = os.path.dirname( cfgFile )
if not os.path.exists( instanceEtcDir ):
try:
os.symlink( etcDir, instanceEtcDir )
except Exception:
error = 'Can not create link to configuration %s' % instanceEtcDir
if exitOnError:
gLogger.exception( error )
DIRAC.exit( -1 )
return S_ERROR( error )
if os.path.realpath( instanceEtcDir ) != os.path.realpath( etcDir ):
error = 'Instance etc (%s) is not the same as DIRAC etc (%s)' % ( instanceEtcDir, etcDir )
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# if any server or agent needs to be install we need the startup directory and runsvdir running
if setupServices or setupAgents or setupExecutors or setupWeb:
if not os.path.exists( startDir ):
try:
os.makedirs( startDir )
except Exception:
error = 'Can not create %s' % startDir
if exitOnError:
gLogger.exception( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# And need to make sure runsvdir is running
result = execCommand( 0, ['ps', '-ef'] )
if not result['OK']:
if exitOnError:
gLogger.error( result['Message'] )
DIRAC.exit( -1 )
return S_ERROR( result['Message'] )
processList = result['Value'][1].split( '\n' )
cmd = 'runsvdir %s' % startDir
cmdFound = False
for process in processList:
if process.find( cmd ) != -1:
cmdFound = True
if not cmdFound:
gLogger.notice( 'Starting runsvdir ...' )
os.system( "runsvdir %s 'log: DIRAC runsv' &" % startDir )
if ['Configuration', 'Server'] in setupServices and setupConfigurationMaster:
# This server hosts the Master of the CS
from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData
gLogger.notice( 'Installing Master Configuration Server' )
cfg = __getCfg( cfgPath( 'DIRAC', 'Setups', setup ), 'Configuration', instance )
_addCfgToDiracCfg( cfg )
cfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'Master' , 'yes' )
cfg.setOption( cfgPath( 'DIRAC', 'Configuration', 'Name' ) , setupConfigurationName )
serversCfgPath = cfgPath( 'DIRAC', 'Configuration', 'Servers' )
if not localCfg.getOption( serversCfgPath , [] ):
serverUrl = 'dips://%s:9135/Configuration/Server' % host
cfg.setOption( serversCfgPath, serverUrl )
gConfigurationData.setOptionInCFG( serversCfgPath, serverUrl )
instanceOptionPath = cfgPath( 'DIRAC', 'Setups', setup )
instanceCfg = __getCfg( instanceOptionPath, 'Configuration', instance )
cfg = cfg.mergeWith( instanceCfg )
_addCfgToDiracCfg( cfg )
result = getComponentCfg( 'service', 'Configuration', 'Server', instance, extensions, addDefaultOptions = True )
if not result['OK']:
if exitOnError:
DIRAC.exit( -1 )
else:
return result
compCfg = result['Value']
cfg = cfg.mergeWith( compCfg )
gConfigurationData.mergeWithLocal( cfg )
addDefaultOptionsToComponentCfg( 'service', 'Configuration', 'Server', [] )
if installCfg:
centralCfg = _getCentralCfg( installCfg )
else:
centralCfg = _getCentralCfg( localCfg )
_addCfgToLocalCS( centralCfg )
setupComponent( 'service', 'Configuration', 'Server', [], checkModule = False )
runsvctrlComponent( 'Configuration', 'Server', 't' )
while ['Configuration', 'Server'] in setupServices:
setupServices.remove( ['Configuration', 'Server'] )
time.sleep( 5 )
# Now need to check if there is valid CS to register the info
result = scriptCfg.enableCS()
if not result['OK']:
if exitOnError:
DIRAC.exit( -1 )
return result
cfgClient = CSAPI()
if not cfgClient.initialize():
error = 'Configuration Server not defined'
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# We need to make sure components are connecting to the Master CS, that is the only one being update
from DIRAC import gConfig
localServers = localCfg.getOption( cfgPath( 'DIRAC', 'Configuration', 'Servers' ) )
masterServer = gConfig.getValue( cfgPath( 'DIRAC', 'Configuration', 'MasterServer' ), '' )
initialCfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'Servers' , localServers )
masterCfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'Servers' , masterServer )
_addCfgToDiracCfg( masterCfg )
# 1.- Setup the instances in the CS
# If the Configuration Server used is not the Master, it can take some time for this
# info to be propagated, this may cause the later setup to fail
if setupAddConfiguration:
gLogger.notice( 'Registering System instances' )
for system in setupSystems:
addSystemInstance( system, instance, setup, True )
for system, service in setupServices:
if not addDefaultOptionsToCS( None, 'service', system, service, extensions, overwrite = True )['OK']:
# If we are not allowed to write to the central CS, add the configuration to the local file
addDefaultOptionsToComponentCfg( 'service', system, service, extensions )
for system, agent in setupAgents:
if not addDefaultOptionsToCS( None, 'agent', system, agent, extensions, overwrite = True )['OK']:
# If we are not allowed to write to the central CS, add the configuration to the local file
addDefaultOptionsToComponentCfg( 'agent', system, agent, extensions )
for system, executor in setupExecutors:
if not addDefaultOptionsToCS( None, 'executor', system, executor, extensions, overwrite = True )['OK']:
# If we are not allowed to write to the central CS, add the configuration to the local file
addDefaultOptionsToComponentCfg( 'executor', system, executor, extensions )
else:
gLogger.warn( 'Configuration parameters definition is not requested' )
if ['Configuration', 'Server'] in setupServices and setupPrivateConfiguration:
cfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'AutoPublish' , 'no' )
_addCfgToDiracCfg( cfg )
# 2.- Check if MySQL is required
if setupDatabases:
gLogger.notice( 'Installing MySQL' )
getMySQLPasswords()
installMySQL()
# 3.- And install requested Databases
result = getDatabases()
if not result['OK']:
if exitOnError:
gLogger.error( result['Message'] )
DIRAC.exit( -1 )
return result
installedDatabases = result['Value']
for dbName in setupDatabases:
if dbName not in installedDatabases:
extension, system = installDatabase( dbName )['Value']
gLogger.notice( 'Database %s from %s/%s installed' % ( dbName, extension, system ) )
result = addDatabaseOptionsToCS( None, system, dbName, overwrite = True )
if not result['OK']:
gLogger.error( 'Database %s CS registration failed: %s' % ( dbName, result['Message'] ) )
gLogger.notice( 'Database %s already installed' % dbName )
if mysqlPassword:
if not _addMySQLToDiracCfg():
error = 'Failed to add MySQL user password to local configuration'
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# 4.- Then installed requested services
for system, service in setupServices:
setupComponent( 'service', system, service, extensions )
# 5.- Now the agents
for system, agent in setupAgents:
setupComponent( 'agent', system, agent, extensions )
# 6.- Now the executors
for system, executor in setupExecutors:
setupComponent( 'executor', system, executor, extensions )
# 7.- And finally the Portal
if setupWeb:
setupPortal()
if localServers != masterServer:
_addCfgToDiracCfg( initialCfg )
for system, service in setupServices:
runsvctrlComponent( system, service, 't' )
for system, agent in setupAgents:
runsvctrlComponent( system, agent, 't' )
for system, executor in setupExecutors:
runsvctrlComponent( system, executor, 't' )
return S_OK()
def _createRunitLog( runitCompDir ):
logDir = os.path.join( runitCompDir, 'log' )
os.makedirs( logDir )
logConfigFile = os.path.join( logDir, 'config' )
fd = open( logConfigFile, 'w' )
fd.write(
"""s10000000
n20
""" )
fd.close()
logRunFile = os.path.join( logDir, 'run' )
fd = open( logRunFile, 'w' )
fd.write(
"""#!/bin/bash
#
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec svlogd .
""" % { 'bashrc' : os.path.join( instancePath, 'bashrc' ) } )
fd.close()
os.chmod( logRunFile, gDefaultPerms )
def installComponent( componentType, system, component, extensions, componentModule = '', checkModule = True ):
""" Install runit directory for the specified component
"""
# Check if the component is already installed
runitCompDir = os.path.join( runitDir, system, component )
if os.path.exists( runitCompDir ):
msg = "%s %s_%s already installed" % ( componentType, system, component )
gLogger.notice( msg )
return S_OK( runitCompDir )
# Check that the software for the component is installed
# Any "Load" or "Module" option in the configuration defining what modules the given "component"
# needs to load will be taken care of by checkComponentModule.
if checkModule:
result = checkComponentModule( componentType, system, component )
if not result['OK']:
# cModule = componentModule
# if not cModule:
# cModule = component
# if not checkComponentSoftware( componentType, system, cModule, extensions )['OK'] and componentType != 'executor':
error = 'Software for %s %s/%s is not installed' % ( componentType, system, component )
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
gLogger.notice( 'Installing %s %s/%s' % ( componentType, system, component ) )
# Now do the actual installation
try:
componentCfg = os.path.join( linkedRootPath, 'etc', '%s_%s.cfg' % ( system, component ) )
if not os.path.exists( componentCfg ):
fd = open( componentCfg, 'w' )
fd.close()
_createRunitLog( runitCompDir )
runFile = os.path.join( runitCompDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
[ "%(componentType)s" = "agent" ] && renice 20 -p $$
#
exec python $DIRAC/DIRAC/Core/scripts/dirac-%(componentType)s.py %(system)s/%(component)s %(componentCfg)s < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ),
'componentType': componentType,
'system' : system,
'component': component,
'componentCfg': componentCfg } )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for %s %s/%s' % ( componentType, system, component )
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
return S_OK( runitCompDir )
def setupComponent( componentType, system, component, extensions, componentModule = '', checkModule = True ):
"""
Install and create link in startup
"""
result = installComponent( componentType, system, component, extensions, componentModule, checkModule )
if not result['OK']:
return result
# Create the startup entry now
runitCompDir = result['Value']
startCompDir = os.path.join( startDir, '%s_%s' % ( system, component ) )
if not os.path.exists( startDir ):
os.makedirs( startDir )
if not os.path.lexists( startCompDir ):
gLogger.notice( 'Creating startup link at', startCompDir )
os.symlink( runitCompDir, startCompDir )
time.sleep( 10 )
# Check the runsv status
start = time.time()
while ( time.time() - 20 ) < start:
result = getStartupComponentStatus( [ ( system, component )] )
if not result['OK']:
continue
if result['Value'] and result['Value']['%s_%s' % ( system, component )]['RunitStatus'] == "Run":
break
time.sleep( 1 )
# Final check
result = getStartupComponentStatus( [( system, component )] )
if not result['OK']:
return S_ERROR( 'Failed to start the component %s_%s' % ( system, component ) )
resDict = {}
resDict['ComponentType'] = componentType
resDict['RunitStatus'] = result['Value']['%s_%s' % ( system, component )]['RunitStatus']
return S_OK( resDict )
def unsetupComponent( system, component ):
"""
Remove link from startup
"""
for startCompDir in glob.glob( os.path.join( startDir, '%s_%s' % ( system, component ) ) ):
try:
os.unlink( startCompDir )
except Exception:
gLogger.exception()
return S_OK()
def uninstallComponent( system, component ):
"""
Remove startup and runit directories
"""
result = runsvctrlComponent( system, component, 'd' )
if not result['OK']:
pass
result = unsetupComponent( system, component )
for runitCompDir in glob.glob( os.path.join( runitDir, system, component ) ):
try:
shutil.rmtree( runitCompDir )
except Exception:
gLogger.exception()
return S_OK()
def installPortal():
"""
Install runit directories for the Web Portal
"""
# Check that the software for the Web Portal is installed
error = ''
webDir = os.path.join( linkedRootPath, 'Web' )
if not os.path.exists( webDir ):
error = 'Web extension not installed at %s' % webDir
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# First the lighthttpd server
# Check if the component is already installed
runitHttpdDir = os.path.join( runitDir, 'Web', 'httpd' )
runitPasterDir = os.path.join( runitDir, 'Web', 'paster' )
if os.path.exists( runitHttpdDir ):
msg = "lighthttpd already installed"
gLogger.notice( msg )
else:
gLogger.notice( 'Installing Lighttpd' )
# Now do the actual installation
try:
_createRunitLog( runitHttpdDir )
runFile = os.path.join( runitHttpdDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
exec lighttpdSvc.sh < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ), } )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for lighttpd'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
# Second the Web portal
# Check if the component is already installed
if os.path.exists( runitPasterDir ):
msg = "Web Portal already installed"
gLogger.notice( msg )
else:
gLogger.notice( 'Installing Web Portal' )
# Now do the actual installation
try:
_createRunitLog( runitPasterDir )
runFile = os.path.join( runitPasterDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
cd %(DIRAC)s/Web
exec paster serve --reload production.ini < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ),
'DIRAC': linkedRootPath} )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for Web Portal'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
return S_OK( [runitHttpdDir, runitPasterDir] )
def setupPortal():
"""
Install and create link in startup
"""
result = installPortal()
if not result['OK']:
return result
# Create the startup entries now
runitCompDir = result['Value']
startCompDir = [ os.path.join( startDir, 'Web_httpd' ),
os.path.join( startDir, 'Web_paster' ) ]
if not os.path.exists( startDir ):
os.makedirs( startDir )
for i in range( 2 ):
if not os.path.lexists( startCompDir[i] ):
gLogger.notice( 'Creating startup link at', startCompDir[i] )
os.symlink( runitCompDir[i], startCompDir[i] )
time.sleep( 1 )
time.sleep( 5 )
# Check the runsv status
start = time.time()
while ( time.time() - 10 ) < start:
result = getStartupComponentStatus( [ ( 'Web', 'httpd' ), ( 'Web', 'paster' ) ] )
if not result['OK']:
return S_ERROR( 'Failed to start the Portal' )
if result['Value'] and \
result['Value']['%s_%s' % ( 'Web', 'httpd' )]['RunitStatus'] == "Run" and \
result['Value']['%s_%s' % ( 'Web', 'paster' )]['RunitStatus'] == "Run" :
break
time.sleep( 1 )
# Final check
return getStartupComponentStatus( [ ( 'Web', 'httpd' ), ( 'Web', 'paster' ) ] )
def fixMySQLScripts( startupScript = mysqlStartupScript ):
"""
Edit MySQL scripts to point to desired locations for db and my.cnf
"""
gLogger.verbose( 'Updating:', startupScript )
try:
fd = open( startupScript, 'r' )
orgLines = fd.readlines()
fd.close()
fd = open( startupScript, 'w' )
for line in orgLines:
if line.find( 'export HOME' ) == 0:
continue
if line.find( 'datadir=' ) == 0:
line = 'datadir=%s\n' % mysqlDbDir
gLogger.debug( line )
line += 'export HOME=%s\n' % mysqlDir
if line.find( 'basedir=' ) == 0:
platform = getPlatformString()
line = 'basedir=%s\n' % os.path.join( rootPath, platform )
fd.write( line )
fd.close()
except Exception:
error = 'Failed to Update MySQL startup script'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return S_OK()
def mysqlInstalled( doNotExit = False ):
"""
Check if MySQL is already installed
"""
if os.path.exists( mysqlDbDir ) or os.path.exists( mysqlLogDir ):
return S_OK()
if doNotExit:
return S_ERROR()
error = 'MySQL not properly Installed'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
def getMySQLPasswords():
"""
Get MySQL passwords from local configuration or prompt
"""
import getpass
global mysqlRootPwd, mysqlPassword
if not mysqlRootPwd:
mysqlRootPwd = getpass.getpass( 'MySQL root password: ' )
if not mysqlPassword:
# Take it if it is already defined
mysqlPassword = localCfg.getOption( '/Systems/Databases/Password', '' )
if not mysqlPassword:
mysqlPassword = getpass.getpass( 'MySQL Dirac password: ' )
return S_OK()
def setMySQLPasswords( root = '', dirac = '' ):
"""
Set MySQL passwords
"""
global mysqlRootPwd, mysqlPassword
if root:
mysqlRootPwd = root
if dirac:
mysqlPassword = dirac
return S_OK()
def startMySQL():
"""
Start MySQL server
"""
result = mysqlInstalled()
if not result['OK']:
return result
return execCommand( 0, [mysqlStartupScript, 'start'] )
def stopMySQL():
"""
Stop MySQL server
"""
result = mysqlInstalled()
if not result['OK']:
return result
return execCommand( 0, [mysqlStartupScript, 'stop'] )
def installMySQL():
"""
Attempt an installation of MySQL
mode:
Master
Slave
None
"""
fixMySQLScripts()
if mysqlInstalled( doNotExit = True )['OK']:
gLogger.notice( 'MySQL already installed' )
return S_OK()
if mysqlMode.lower() not in [ '', 'master', 'slave' ]:
error = 'Unknown MySQL server Mode'
if exitOnError:
gLogger.fatal( error, mysqlMode )
DIRAC.exit( -1 )
gLogger.error( error, mysqlMode )
return S_ERROR( error )
if mysqlHost:
gLogger.notice( 'Installing MySQL server at', mysqlHost )
if mysqlMode:
gLogger.notice( 'This is a MySQl %s server' % mysqlMode )
try:
os.makedirs( mysqlDbDir )
os.makedirs( mysqlLogDir )
except Exception:
error = 'Can not create MySQL dirs'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
try:
fd = open( mysqlMyOrg, 'r' )
myOrg = fd.readlines()
fd.close()
fd = open( mysqlMyCnf, 'w' )
for line in myOrg:
if line.find( '[mysqld]' ) == 0:
line += '\n'.join( [ 'innodb_file_per_table', '' ] )
elif line.find( 'innodb_log_arch_dir' ) == 0:
line = ''
elif line.find( 'innodb_data_file_path' ) == 0:
line = line.replace( '2000M', '200M' )
elif line.find( 'server-id' ) == 0 and mysqlMode.lower() == 'master':
# MySQL Configuration for Master Server
line = '\n'.join( ['server-id = 1',
'# DIRAC Master-Server',
'sync-binlog = 1',
'replicate-ignore-table = mysql.MonitorData',
'# replicate-ignore-db=db_name',
'log-bin = mysql-bin',
'log-slave-updates', '' ] )
elif line.find( 'server-id' ) == 0 and mysqlMode.lower() == 'slave':
# MySQL Configuration for Slave Server
line = '\n'.join( ['server-id = %s' % int( time.time() ),
'# DIRAC Slave-Server',
'sync-binlog = 1',
'replicate-ignore-table = mysql.MonitorData',
'# replicate-ignore-db=db_name',
'log-bin = mysql-bin',
'log-slave-updates', '' ] )
elif line.find( '/opt/dirac/mysql' ) > -1:
line = line.replace( '/opt/dirac/mysql', mysqlDir )
if mysqlSmallMem:
if line.find( 'innodb_buffer_pool_size' ) == 0:
line = 'innodb_buffer_pool_size = 200M\n'
elif mysqlLargeMem:
if line.find( 'innodb_buffer_pool_size' ) == 0:
line = 'innodb_buffer_pool_size = 10G\n'
fd.write( line )
fd.close()
except Exception:
error = 'Can not create my.cnf'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
gLogger.notice( 'Initializing MySQL...' )
result = execCommand( 0, ['mysql_install_db',
'--defaults-file=%s' % mysqlMyCnf,
'--datadir=%s' % mysqlDbDir ] )
if not result['OK']:
return result
gLogger.notice( 'Starting MySQL...' )
result = startMySQL()
if not result['OK']:
return result
gLogger.notice( 'Setting MySQL root password' )
result = execCommand( 0, ['mysqladmin', '-u', 'root', 'password', mysqlRootPwd] )
if not result['OK']:
return result
# MySQL tends to define root@host user rather than [email protected]
hostName = mysqlHost.split('.')[0]
result = execMySQL( "UPDATE user SET Host='%s' WHERE Host='%s'" % (mysqlHost,hostName),
localhost=True )
if not result['OK']:
return result
result = execMySQL( "FLUSH PRIVILEGES" )
if not result['OK']:
return result
if mysqlHost and socket.gethostbyname( mysqlHost ) != '127.0.0.1' :
result = execCommand( 0, ['mysqladmin', '-u', 'root', '-h', mysqlHost, 'password', mysqlRootPwd] )
if not result['OK']:
return result
result = execMySQL( "DELETE from user WHERE Password=''", localhost=True )
if not _addMySQLToDiracCfg():
return S_ERROR( 'Failed to add MySQL user password to local configuration' )
return S_OK()
def getMySQLStatus():
"""
Get the status of the MySQL database installation
"""
result = execCommand( 0, ['mysqladmin', 'status' ] )
if not result['OK']:
return result
output = result['Value'][1]
d1, uptime, nthreads, nquestions, nslow, nopens, nflash, nopen, nqpersec = output.split( ':' )
resDict = {}
resDict['UpTime'] = uptime.strip().split()[0]
resDict['NumberOfThreads'] = nthreads.strip().split()[0]
resDict['NumberOfQuestions'] = nquestions.strip().split()[0]
resDict['NumberOfSlowQueries'] = nslow.strip().split()[0]
resDict['NumberOfOpens'] = nopens.strip().split()[0]
resDict['OpenTables'] = nopen.strip().split()[0]
resDict['FlushTables'] = nflash.strip().split()[0]
resDict['QueriesPerSecond'] = nqpersec.strip().split()[0]
return S_OK( resDict )
def getAvailableDatabases( extensions ):
dbDict = {}
for extension in extensions + ['']:
databases = glob.glob( os.path.join( rootPath, '%sDIRAC' % extension, '*', 'DB', '*.sql' ) )
for dbPath in databases:
dbName = os.path.basename( dbPath ).replace( '.sql', '' )
dbDict[dbName] = {}
dbDict[dbName]['Extension'] = extension
dbDict[dbName]['System'] = dbPath.split( '/' )[-3].replace( 'System', '' )
return S_OK( dbDict )
def getDatabases():
"""
Get the list of installed databases
"""
result = execMySQL( 'SHOW DATABASES' )
if not result['OK']:
return result
dbList = []
for dbName in result['Value']:
if not dbName[0] in ['Database', 'information_schema', 'mysql', 'test']:
dbList.append( dbName[0] )
return S_OK( dbList )
def installDatabase( dbName ):
"""
Install requested DB in MySQL server
"""
global mysqlRootPwd, mysqlPassword
result = mysqlInstalled()
if not result['OK']:
return result
if not mysqlRootPwd:
rootPwdPath = cfgInstallPath( 'Database', 'RootPwd' )
return S_ERROR( 'Missing %s in %s' % ( rootPwdPath, cfgFile ) )
if not mysqlPassword:
mysqlPassword = localCfg.getOption( cfgPath( 'Systems', 'Databases', 'Password' ), mysqlPassword )
if not mysqlPassword:
mysqlPwdPath = cfgPath( 'Systems', 'Databases', 'Password' )
return S_ERROR( 'Missing %s in %s' % ( mysqlPwdPath, cfgFile ) )
gLogger.notice( 'Installing', dbName )
dbFile = glob.glob( os.path.join( rootPath, '*', '*', 'DB', '%s.sql' % dbName ) )
if not dbFile:
error = 'Database %s not found' % dbName
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
dbFile = dbFile[0]
try:
fd = open( dbFile )
dbLines = fd.readlines()
fd.close()
dbAdded = False
cmdLines = []
for line in dbLines:
if line.lower().find( ( 'use %s;' % dbName ).lower() ) > -1:
result = execMySQL( 'CREATE DATABASE `%s`' % dbName )
if not result['OK']:
gLogger.error( result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return result
result = execMySQL( 'SHOW STATUS' )
if not result['OK']:
error = 'Could not connect to MySQL server'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
perms = "SELECT,INSERT,LOCK TABLES,UPDATE,DELETE,CREATE,DROP,ALTER,CREATE VIEW, SHOW VIEW"
for cmd in ["GRANT %s ON `%s`.* TO '%s'@'localhost' IDENTIFIED BY '%s'" % ( perms, dbName, mysqlUser,
mysqlPassword ),
"GRANT %s ON `%s`.* TO '%s'@'%s' IDENTIFIED BY '%s'" % ( perms, dbName, mysqlUser,
mysqlHost, mysqlPassword ),
"GRANT %s ON `%s`.* TO '%s'@'%%' IDENTIFIED BY '%s'" % ( perms, dbName, mysqlUser,
mysqlPassword ),
]:
result = execMySQL( cmd )
if not result['OK']:
error = 'Error setting MySQL permissions'
gLogger.error( error, result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
dbAdded = True
result = execMySQL( 'FLUSH PRIVILEGES' )
if not result['OK']:
gLogger.error( result['Message'] )
if exitOnError:
exit( -1 )
return result
elif dbAdded:
if line.strip():
cmdLines.append( line.strip() )
if line.strip() and line.strip()[-1] == ';':
result = execMySQL( '\n'.join( cmdLines ), dbName )
if not result['OK']:
error = 'Failed to initialize Database'
gLogger.notice( '\n'.join( cmdLines ) )
gLogger.error( error, result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
cmdLines = []
# last line might not have the last ";"
if cmdLines:
cmd = '\n'.join( cmdLines )
if cmd.lower().find( 'source' ) == 0:
try:
dbFile = cmd.split()[1]
dbFile = os.path.join( rootPath, dbFile )
fd = open( dbFile )
dbLines = fd.readlines()
fd.close()
cmdLines = []
for line in dbLines:
if line.strip():
cmdLines.append( line.strip() )
if line.strip() and line.strip()[-1] == ';':
result = execMySQL( '\n'.join( cmdLines ), dbName )
if not result['OK']:
error = 'Failed to initialize Database'
gLogger.notice( '\n'.join( cmdLines ) )
gLogger.error( error, result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
cmdLines = []
except Exception:
error = 'Failed to %s' % cmd
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
if not dbAdded:
error = 'Missing "use %s;"' % dbName
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
except Exception:
error = 'Failed to create Database'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return S_OK( dbFile.split( '/' )[-4:-2] )
def execMySQL( cmd, dbName = 'mysql', localhost=False ):
"""
Execute MySQL Command
"""
global db
from DIRAC.Core.Utilities.MySQL import MySQL
if not mysqlRootPwd:
return S_ERROR( 'MySQL root password is not defined' )
if dbName not in db:
dbHost = mysqlHost
if localhost:
dbHost = 'localhost'
db[dbName] = MySQL( dbHost, mysqlRootUser, mysqlRootPwd, dbName, mysqlPort )
if not db[dbName]._connected:
error = 'Could not connect to MySQL server'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return db[dbName]._query( cmd )
def _addMySQLToDiracCfg():
"""
Add the database access info to the local configuration
"""
if not mysqlPassword:
return S_ERROR( 'Missing %s in %s' % ( cfgInstallPath( 'Database', 'Password' ), cfgFile ) )
sectionPath = cfgPath( 'Systems', 'Databases' )
cfg = __getCfg( sectionPath, 'User', mysqlUser )
cfg.setOption( cfgPath( sectionPath, 'Password' ), mysqlPassword )
return _addCfgToDiracCfg( cfg )
def configureCE( ceName = '', ceType = '', cfg = None, currentSectionPath = '' ):
"""
Produce new dirac.cfg including configuration for new CE
"""
from DIRAC.Resources.Computing.ComputingElementFactory import ComputingElementFactory
from DIRAC import gConfig
cesCfg = ResourcesDefaults.getComputingElementDefaults( ceName, ceType, cfg, currentSectionPath )
ceNameList = cesCfg.listSections()
if not ceNameList:
error = 'No CE Name provided'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
for ceName in ceNameList:
if 'CEType' not in cesCfg[ceName]:
error = 'Missing Type for CE "%s"' % ceName
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
localsiteCfg = localCfg['LocalSite']
# Replace Configuration under LocalSite with new Configuration
for ceName in ceNameList:
if localsiteCfg.existsKey( ceName ):
gLogger.notice( ' Removing existing CE:', ceName )
localsiteCfg.deleteKey( ceName )
gLogger.notice( 'Configuring CE:', ceName )
localsiteCfg.createNewSection( ceName, contents = cesCfg[ceName] )
# Apply configuration and try to instantiate the CEs
gConfig.loadCFG( localCfg )
for ceName in ceNameList:
ceFactory = ComputingElementFactory()
try:
ceInstance = ceFactory.getCE( ceType, ceName )
except Exception:
error = 'Fail to instantiate CE'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
if not ceInstance['OK']:
error = 'Fail to instantiate CE: %s' % ceInstance['Message']
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
# Everything is OK, we can save the new cfg
localCfg.writeToFile( cfgFile )
gLogger.always( 'LocalSite section in %s has been uptdated with new configuration:' % os.path.basename( cfgFile ) )
gLogger.always( str( localCfg['LocalSite'] ) )
return S_OK( ceNameList )
def configureLocalDirector( ceNameList = '' ):
"""
Install a Local DIRAC TaskQueueDirector, basically write the proper configuration file
"""
if ceNameList:
result = setupComponent( 'agent', 'WorkloadManagement', 'TaskQueueDirector', [] )
if not result['OK']:
return result
# Now write a local Configuration for the Director
directorCfg = CFG()
directorCfg.addKey( 'SubmitPools', 'DIRAC', 'Added by InstallTools' )
directorCfg.addKey( 'DefaultSubmitPools', 'DIRAC', 'Added by InstallTools' )
directorCfg.addKey( 'ComputingElements', ', '.join( ceNameList ), 'Added by InstallTools' )
result = addCfgToComponentCfg( 'agent', 'WorkloadManagement', 'TaskQueueDirector', directorCfg )
if not result['OK']:
return result
return runsvctrlComponent( 'WorkloadManagement', 'TaskQueueDirector', 't' )
def execCommand( timeout, cmd ):
"""
Execute command tuple and handle Error cases
"""
result = systemCall( timeout, cmd )
if not result['OK']:
if timeout and result['Message'].find( 'Timeout' ) == 0:
return result
gLogger.error( 'Failed to execute', cmd[0] )
gLogger.error( result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return result
if result['Value'][0]:
error = 'Failed to execute'
gLogger.error( error, cmd[0] )
gLogger.error( 'Exit code:' , ( '%s\n' % result['Value'][0] ) + '\n'.join( result['Value'][1:] ) )
if exitOnError:
DIRAC.exit( -1 )
error = S_ERROR( error )
error['Value'] = result['Value']
return error
gLogger.verbose( result['Value'][1] )
return result
| gpl-3.0 | -4,741,534,441,320,484,000 | 35.177253 | 153 | 0.636055 | false |
bdang2012/taiga-back-casting | taiga/timeline/apps.py | 1 | 1772 | # Copyright (C) 2014-2015 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2015 Jesús Espino <[email protected]>
# Copyright (C) 2014-2015 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.apps import AppConfig
from django.apps import apps
from django.db.models import signals
from . import signals as handlers
from taiga.projects.history.models import HistoryEntry
class TimelineAppConfig(AppConfig):
name = "taiga.timeline"
verbose_name = "Timeline"
def ready(self):
signals.post_save.connect(handlers.on_new_history_entry, sender=HistoryEntry, dispatch_uid="timeline")
signals.pre_save.connect(handlers.create_membership_push_to_timeline,
sender=apps.get_model("projects", "Membership"))
signals.post_delete.connect(handlers.delete_membership_push_to_timeline,
sender=apps.get_model("projects", "Membership"))
signals.post_save.connect(handlers.create_user_push_to_timeline,
sender=apps.get_model("users", "User"))
| agpl-3.0 | -1,320,559,556,682,931,500 | 48.166667 | 110 | 0.697175 | false |
andrewthetechie/slack_rtmbot_core_plugins | status/status.py | 1 | 1633 | #! env/bin/python
from datetime import timedelta
import psutil
import re
import yaml
outputs = []
# load default configs
config = yaml.load(file('conf/rtmbot.conf', 'r'))
def status_main():
"""
Does the work of checking the server's status
Returns the message to output
:return: message
"""
message = "Unable to check server status"
cpu_usage = psutil.cpu_percent()
disk_io = psutil.disk_io_counters(perdisk=False)
with open('/proc/uptime', 'r') as f:
uptime_seconds = float(f.readline().split()[0])
uptime_string = str(timedelta(seconds=uptime_seconds))
if cpu_usage and disk_io and uptime_string:
message = "Load: {}\nDisk IO: {}\nUptime: {}".format(cpu_usage, disk_io, uptime_string)
return message
def process_directmessage(data):
match = re.match(r"status", data['text'], flags=re.IGNORECASE)
if match:
message = status_main()
outputs.append([data['channel'], "{}".format(message)])
return
def process_message(data):
match = re.match(r"{} status".format(config['BOT_NAME']), data['text'], flags=re.IGNORECASE)
if match:
message = status_main()
outputs.append([data['channel'], "{}".format(message)])
return
def process_help():
dm_help = []
channel_help = []
plugin_help = []
# setup help
dm_help.append("status - Responds with some basic status on the server running the bot")
channel_help.append("status - Responds with some basic status on the server running the bot")
plugin_help.append(dm_help)
plugin_help.append(channel_help)
return plugin_help
| lgpl-3.0 | -2,207,216,231,104,388,900 | 25.33871 | 97 | 0.6485 | false |
openprocurement/openprocurement.tender.esco | openprocurement/tender/esco/tests/bid.py | 1 | 8048 | # -*- coding: utf-8 -*-
import unittest
from esculator import npv, escp
from openprocurement.api.utils import get_now
from openprocurement.tender.esco.tests.base import (
test_bids, test_features_tender_data,
BaseESCOContentWebTest, NBU_DISCOUNT_RATE
)
from openprocurement.tender.belowthreshold.tests.base import (
test_organization,
)
from openprocurement.api.tests.base import snitch
from openprocurement.tender.belowthreshold.tests.bid_blanks import (
# TenderBidBatchDocumentWithDSResourceTest
create_tender_bid_with_documents,
create_tender_bid_with_document_invalid,
create_tender_bid_with_document,
)
from openprocurement.tender.openeu.tests.bid import TenderBidDocumentResourceTestMixin
from openprocurement.tender.openeu.tests.bid_blanks import (
# TenderBidDocumentWithDSResourceTest
patch_tender_bidder_document_private_json,
put_tender_bidder_document_private_json,
get_tender_bidder_document_ds,
# TenderBidDocumentResourceTest
create_tender_bidder_document_nopending,
# TenderBidBatchDocumentWithDSResourceTest
create_tender_bid_with_all_documents,
create_tender_bid_with_eligibility_document_invalid,
create_tender_bid_with_financial_document_invalid,
create_tender_bid_with_qualification_document_invalid,
create_tender_bid_with_eligibility_document,
create_tender_bid_with_qualification_document,
create_tender_bid_with_financial_document,
create_tender_bid_with_financial_documents,
create_tender_bid_with_eligibility_documents,
create_tender_bid_with_qualification_documents,
get_tender_bidder,
get_tender_tenderers,
)
from openprocurement.tender.esco.tests.bid_blanks import (
create_tender_bid_invalid,
create_tender_bid,
patch_tender_bid,
deleted_bid_is_not_restorable,
bid_Administrator_change,
bids_activation_on_tender_documents,
features_bid_invalid,
features_bid,
patch_and_put_document_into_invalid_bid,
# TenderBidResourceTest
delete_tender_bidder,
bids_invalidation_on_tender_change,
deleted_bid_do_not_locks_tender_in_state,
create_tender_bid_invalid_funding_kind_budget,
create_tender_bid_31_12,
)
from openprocurement.tender.esco.utils import to_decimal
bid_amountPerformance = round(to_decimal(npv(
test_bids[0]['value']['contractDuration']['years'],
test_bids[0]['value']['contractDuration']['days'],
test_bids[0]['value']['yearlyPaymentsPercentage'],
test_bids[0]['value']['annualCostsReduction'],
get_now(),
NBU_DISCOUNT_RATE)), 2)
bid_amount = round(to_decimal(escp(
test_bids[0]['value']['contractDuration']['years'],
test_bids[0]['value']['contractDuration']['days'],
test_bids[0]['value']['yearlyPaymentsPercentage'],
test_bids[0]['value']['annualCostsReduction'],
get_now())), 2)
class TenderBidResourceTest(BaseESCOContentWebTest):
initial_status = 'active.tendering'
test_bids_data = test_bids
author_data = test_bids_data[0]['tenderers'][0]
expected_bid_amountPerformance = bid_amountPerformance
expected_bid_amount = bid_amount
test_create_tender_bid_invalid = snitch(create_tender_bid_invalid)
test_create_tender_bid = snitch(create_tender_bid)
test_patch_tender_bid = snitch(patch_tender_bid)
test_get_tender_bidder = snitch(get_tender_bidder)
test_deleted_bid_do_not_locks_tender_in_state = snitch(deleted_bid_do_not_locks_tender_in_state)
test_get_tender_tenderers = snitch(get_tender_tenderers)
test_deleted_bid_is_not_restorable = snitch(deleted_bid_is_not_restorable)
test_bid_Administrator_change = snitch(bid_Administrator_change)
test_bids_activation_on_tender_documents = snitch(bids_activation_on_tender_documents)
test_delete_tender_bidder = snitch(delete_tender_bidder)
test_bids_invalidation_on_tender_change = snitch(bids_invalidation_on_tender_change)
test_create_tender_bid_invalid_funding_kind_budget = snitch(create_tender_bid_invalid_funding_kind_budget)
test_create_tender_bid_31_12 = snitch(create_tender_bid_31_12)
class TenderBidFeaturesResourceTest(BaseESCOContentWebTest):
initial_status = 'active.tendering'
initial_data = test_features_tender_data
test_bids_data = test_bids
test_features_bid = snitch(features_bid)
test_features_bid_invalid = snitch(features_bid_invalid)
class TenderBidDocumentResourceTest(BaseESCOContentWebTest, TenderBidDocumentResourceTestMixin):
initial_auth = ('Basic', ('broker', ''))
initial_status = 'active.tendering'
test_bids_data = test_bids
def setUp(self):
super(TenderBidDocumentResourceTest, self).setUp()
# Create bid
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), {'data': test_bids[0]})
bid = response.json['data']
self.bid_id = bid['id']
self.bid_token = response.json['access']['token']
# create second bid
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), {'data': test_bids[1]})
bid2 = response.json['data']
self.bid2_id = bid2['id']
self.bid2_token = response.json['access']['token']
test_patch_and_put_document_into_invalid_bid = snitch(patch_and_put_document_into_invalid_bid)
test_create_tender_bidder_document_nopending = snitch(create_tender_bidder_document_nopending)
class TenderBidDocumentWithDSResourceTest(TenderBidDocumentResourceTest):
docservice = True
test_patch_tender_bidder_document_private_json = snitch(patch_tender_bidder_document_private_json)
test_put_tender_bidder_document_private_json = snitch(put_tender_bidder_document_private_json)
test_get_tender_bidder_document_ds = snitch(get_tender_bidder_document_ds)
class TenderBidDocumentWithoutDSResourceTest(TenderBidDocumentResourceTest):
docservice = False
class TenderBidBatchDocumentsWithDSResourceTest(BaseESCOContentWebTest):
docservice = True
initial_status = 'active.tendering'
bid_data_wo_docs = {
'tenderers': [test_organization],
'value': test_bids[0]['value'],
'selfEligible': True,
'selfQualified': True,
'documents': []
}
test_create_tender_bid_with_document_invalid = snitch(create_tender_bid_with_document_invalid)
test_create_tender_bid_with_document = snitch(create_tender_bid_with_document)
test_create_tender_bid_with_documents = snitch(create_tender_bid_with_documents)
test_create_tender_bid_with_eligibility_document_invalid = snitch(create_tender_bid_with_eligibility_document_invalid)
test_create_tender_bid_with_eligibility_document = snitch(create_tender_bid_with_eligibility_document)
test_create_tender_bid_with_eligibility_documents = snitch(create_tender_bid_with_eligibility_documents)
test_create_tender_bid_with_qualification_document_invalid = snitch(create_tender_bid_with_qualification_document_invalid)
test_create_tender_bid_with_qualification_document = snitch(create_tender_bid_with_qualification_document)
test_create_tender_bid_with_qualification_documents = snitch(create_tender_bid_with_qualification_documents)
test_create_tender_bid_with_financial_document_invalid = snitch(create_tender_bid_with_financial_document_invalid)
test_create_tender_bid_with_financial_document = snitch(create_tender_bid_with_financial_document)
test_create_tender_bid_with_financial_documents = snitch(create_tender_bid_with_financial_documents)
test_create_tender_bid_with_all_documents = snitch(create_tender_bid_with_all_documents)
def suite():
suite = unittest.TestSuite()
suite.addTest(TenderBidResourceTest)
suite.addTest(TenderBidFeaturesResourceTest)
suite.addTest(TenderBidDocumentResourceTest)
suite.addTest(TenderBidDocumentWithDSResourceTest)
suite.addTest(TenderBidDocumentWithoutDSResourceTest)
suite.addTest(TenderBidBatchDocumentsWithDSResourceTest)
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| apache-2.0 | -7,882,602,119,402,993 | 41.136126 | 126 | 0.742917 | false |
disulfiram/AntFarm | Game.py | 1 | 3954 | import pygame
import sys
from Menu.MenuItem import *
white = (255, 255, 255)
black = (0, 0, 0)
grey = (195, 195, 195)
pink = (242, 217, 229)
green = (210, 255, 191)
# TODO: More robust menu system. Different menus with different menu items. Maybe have button action here.
main_menu = [MenuItem("New Game", True), MenuItem("Settings"), MenuItem("About"), MenuItem("Exit")]
settings_menu = [MenuItem("Resolution", True), MenuItem("Controls"), MenuItem("Full screen")]
_window_border = 100
# TODO: These need to be parsed from ini file and set in the menu.
window_width = 800
window_height = 600
fps = 30
def main():
menu_loop()
pygame.quit()
def draw_menu(game_display, menu):
menu_items_count = len(main_menu)
button_index = 0
for menu_item in menu:
# Calculating button size
b_width = window_width * 0.3
b_height = window_height * 0.1
b_top = _window_border + (button_index * (window_height - b_height - _window_border * 2) / (menu_items_count - 1))
b_left = (window_width / 2) - (b_width / 2)
# Checking button state
button_state = MenuItemStates.inactive
mouse = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
if b_left + b_width > mouse[0] > b_left and b_top + b_height > mouse[1] > b_top:
if click[0] == 1: # and action != None:
button_state = MenuItemStates.active
else:
change_active_item(menu, menu_item)
button_state = MenuItemStates.highlight
elif menu_item.highlight:
button_state = MenuItemStates.highlight
# Drawing the button
button(game_display, menu_item, button_state, b_left, b_top, b_width, b_height, pink, white)
button_index += 1
def change_active_item(menu, menu_item):
if menu_item.highlight:
return
for item in menu:
if item == menu_item:
item.highlight = True
else:
item.highlight = False
def text_objects(text, font, color=black):
text_surface = font.render(text, True, color)
return text_surface, text_surface.get_rect()
def button(game_display, m_item, state, left, top, width, height, inactive_colour, active_colour, action=None):
if state == MenuItemStates.active:
pygame.draw.rect(game_display, active_colour, (left, top, width, height))
# TODO: Still not sure how this will work, but it needs to.
# action()
elif state == MenuItemStates.highlight:
# TODO: This will crash if one, or more, of the values of inactive_colour is greater than 245. Fix later! Experiment with incresing only the values that are below 245 or increasing these values to maximum of 255
highlight_colour = (inactive_colour[0] + 10, inactive_colour[1] + 10, inactive_colour[2] + 10)
pygame.draw.rect(game_display, highlight_colour, (left, top, width, height))
elif state == MenuItemStates.inactive:
pygame.draw.rect(game_display, inactive_colour, (left, top, width, height))
# TODO: change font size based on button size and/or window size
small_text = pygame.font.SysFont("Berlin Sans FB", 20)
text_surf, text_rect = text_objects(m_item.text, small_text, white)
text_rect.center = ((left + (width / 2)), (top + (height / 2)))
game_display.blit(text_surf, text_rect)
def menu_loop(current_menu=main_menu):
pygame.init()
game_exit = False
pygame.display.set_caption('Ant Farm')
game_display = pygame.display.set_mode((window_width, window_height))
game_display.fill(grey)
clock = pygame.time.Clock()
while not game_exit:
for event in pygame.event.get():
if event.type == pygame.QUIT:
game_exit = True
draw_menu(game_display, current_menu)
pygame.display.update()
clock.tick(fps)
def game_loop():
return 0
if __name__ == "__main__":
sys.exit(main())
| gpl-3.0 | -1,382,443,975,517,226,800 | 33.382609 | 219 | 0.634294 | false |
KimTaehee/eucalyptus | admin-tools/eucalyptus_admin/commands/bootstrap/__init__.py | 1 | 2345 | # Copyright 2015 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from requestbuilder import Arg
import requestbuilder.auth.aws
import requestbuilder.request
import requestbuilder.service
from eucalyptus_admin.commands import EucalyptusAdmin
from eucalyptus_admin.exceptions import AWSError
from eucalyptus_admin.util import add_fake_region_name
class Bootstrap(requestbuilder.service.BaseService):
NAME = 'bootstrap'
DESCRIPTION = 'Bootstrap service'
REGION_ENVVAR = 'AWS_DEFAULT_REGION'
URL_ENVVAR = 'EUCA_BOOTSTRAP_URL'
ARGS = [Arg('-U', '--url', metavar='URL',
help='bootstrap service endpoint URL')]
def configure(self):
requestbuilder.service.BaseService.configure(self)
add_fake_region_name(self)
def handle_http_error(self, response):
raise AWSError(response)
class BootstrapRequest(requestbuilder.request.AWSQueryRequest):
SUITE = EucalyptusAdmin
SERVICE_CLASS = Bootstrap
AUTH_CLASS = requestbuilder.auth.aws.HmacV4Auth
API_VERSION = 'eucalyptus'
METHOD = 'POST'
| gpl-3.0 | -4,270,303,309,189,916,700 | 39.431034 | 74 | 0.764606 | false |
dionyziz/llama | tests/test_ast.py | 1 | 2859 | import itertools
import unittest
from compiler import ast, parse
# pylint: disable=no-member
class TestAST(unittest.TestCase):
def test_node_init(self):
self.assertRaises(
NotImplementedError,
ast.Node
)
def test_eq(self):
foocon = ast.Constructor("foo", [])
ast.Constructor("foo", []).should.equal(foocon)
ast.Constructor("bar", []).shouldnt.equal(foocon)
def test_regression_constructor_attr_equality(self):
tdef1 = parse.quiet_parse("type color = Red", "typedef")
tdef2 = [ast.TDef(ast.User("color"), [ast.Constructor("Red")])]
node_eq = lambda a, b: a == b
node_eq.when.called_with(tdef1, tdef2).shouldnt.throw(AttributeError)
def test_builtin_type_equality(self):
for typecon in ast.builtin_types_map.values():
typecon().should.equal(typecon())
for typecon1, typecon2 in itertools.combinations(
ast.builtin_types_map.values(), 2
):
typecon1().shouldnt.equal(typecon2())
def test_builtin_type_set(self):
typeset = {typecon() for typecon in ast.builtin_types_map.values()}
typeset.add(ast.User("foo"))
for typecon in ast.builtin_types_map.values():
typeset.should.contain(typecon())
typeset.should.contain(ast.User("foo"))
typeset.shouldnt.contain(ast.User("bar"))
def test_user_defined_types(self):
ast.User("foo").should.equal(ast.User("foo"))
ast.User("foo").shouldnt.equal(ast.User("bar"))
ast.User("foo").shouldnt.equal(ast.Int())
def test_ref_types(self):
footype = ast.User("foo")
bartype = ast.User("bar")
reffootype = ast.Ref(footype)
reffootype.should.equal(ast.Ref(footype))
reffootype.shouldnt.equal(footype)
reffootype.shouldnt.equal(ast.Ref(bartype))
def test_array_types(self):
inttype = ast.Int()
ast.Array(inttype).should.equal(ast.Array(inttype))
ast.Array(inttype, 2).should.equal(ast.Array(inttype, 2))
ast.Array(ast.Int()).shouldnt.equal(ast.Array(ast.Float()))
ast.Array(inttype, 1).shouldnt.equal(ast.Array(inttype, 2))
arr_int_type = ast.Array(inttype)
arr_int_type.shouldnt.equal(inttype)
arr_int_type.shouldnt.equal(ast.User("foo"))
arr_int_type.shouldnt.equal(ast.Ref(inttype))
def test_function_types(self):
intt = ast.Int()
ast.Function(intt, intt).should.equal(ast.Function(intt, intt))
i2float = ast.Function(ast.Int(), ast.Float())
i2float.shouldnt.equal(ast.Function(ast.Float(), ast.Int()))
i2float.shouldnt.equal(intt)
i2float.shouldnt.equal(ast.User("foo"))
i2float.shouldnt.equal(ast.Ref(ast.Int()))
i2float.shouldnt.equal(ast.Array(ast.Int()))
| mit | -1,466,804,297,911,444,500 | 32.635294 | 77 | 0.622945 | false |
weiyuanke/mykeystone | keystone/openstack/common/cfg.py | 1 | 52287 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
r"""
Configuration options which may be set on the command line or in config files.
The schema for each option is defined using the Opt sub-classes, e.g.:
::
common_opts = [
cfg.StrOpt('bind_host',
default='0.0.0.0',
help='IP address to listen on'),
cfg.IntOpt('bind_port',
default=9292,
help='Port number to listen on')
]
Options can be strings, integers, floats, booleans, lists or 'multi strings'::
enabled_apis_opt = cfg.ListOpt('enabled_apis',
default=['ec2', 'osapi_compute'],
help='List of APIs to enable by default')
DEFAULT_EXTENSIONS = [
'nova.api.openstack.compute.contrib.standard_extensions'
]
osapi_compute_extension_opt = cfg.MultiStrOpt('osapi_compute_extension',
default=DEFAULT_EXTENSIONS)
Option schemas are registered with with the config manager at runtime, but
before the option is referenced::
class ExtensionManager(object):
enabled_apis_opt = cfg.ListOpt(...)
def __init__(self, conf):
self.conf = conf
self.conf.register_opt(enabled_apis_opt)
...
def _load_extensions(self):
for ext_factory in self.conf.osapi_compute_extension:
....
A common usage pattern is for each option schema to be defined in the module or
class which uses the option::
opts = ...
def add_common_opts(conf):
conf.register_opts(opts)
def get_bind_host(conf):
return conf.bind_host
def get_bind_port(conf):
return conf.bind_port
An option may optionally be made available via the command line. Such options
must registered with the config manager before the command line is parsed (for
the purposes of --help and CLI arg validation)::
cli_opts = [
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output'),
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output'),
]
def add_common_opts(conf):
conf.register_cli_opts(cli_opts)
The config manager has two CLI options defined by default, --config-file
and --config-dir::
class ConfigOpts(object):
def __call__(self, ...):
opts = [
MultiStrOpt('config-file',
...),
StrOpt('config-dir',
...),
]
self.register_cli_opts(opts)
Option values are parsed from any supplied config files using
openstack.common.iniparser. If none are specified, a default set is used
e.g. glance-api.conf and glance-common.conf::
glance-api.conf:
[DEFAULT]
bind_port = 9292
glance-common.conf:
[DEFAULT]
bind_host = 0.0.0.0
Option values in config files override those on the command line. Config files
are parsed in order, with values in later files overriding those in earlier
files.
The parsing of CLI args and config files is initiated by invoking the config
manager e.g.::
conf = ConfigOpts()
conf.register_opt(BoolOpt('verbose', ...))
conf(sys.argv[1:])
if conf.verbose:
...
Options can be registered as belonging to a group::
rabbit_group = cfg.OptGroup(name='rabbit',
title='RabbitMQ options')
rabbit_host_opt = cfg.StrOpt('host',
default='localhost',
help='IP/hostname to listen on'),
rabbit_port_opt = cfg.IntOpt('port',
default=5672,
help='Port number to listen on')
def register_rabbit_opts(conf):
conf.register_group(rabbit_group)
# options can be registered under a group in either of these ways:
conf.register_opt(rabbit_host_opt, group=rabbit_group)
conf.register_opt(rabbit_port_opt, group='rabbit')
If it no group attributes are required other than the group name, the group
need not be explicitly registered e.g.
def register_rabbit_opts(conf):
# The group will automatically be created, equivalent calling::
# conf.register_group(OptGroup(name='rabbit'))
conf.register_opt(rabbit_port_opt, group='rabbit')
If no group is specified, options belong to the 'DEFAULT' section of config
files::
glance-api.conf:
[DEFAULT]
bind_port = 9292
...
[rabbit]
host = localhost
port = 5672
use_ssl = False
userid = guest
password = guest
virtual_host = /
Command-line options in a group are automatically prefixed with the
group name::
--rabbit-host localhost --rabbit-port 9999
Option values in the default group are referenced as attributes/properties on
the config manager; groups are also attributes on the config manager, with
attributes for each of the options associated with the group::
server.start(app, conf.bind_port, conf.bind_host, conf)
self.connection = kombu.connection.BrokerConnection(
hostname=conf.rabbit.host,
port=conf.rabbit.port,
...)
Option values may reference other values using PEP 292 string substitution::
opts = [
cfg.StrOpt('state_path',
default=os.path.join(os.path.dirname(__file__), '../'),
help='Top-level directory for maintaining nova state'),
cfg.StrOpt('sqlite_db',
default='nova.sqlite',
help='file name for sqlite'),
cfg.StrOpt('sql_connection',
default='sqlite:///$state_path/$sqlite_db',
help='connection string for sql database'),
]
Note that interpolation can be avoided by using '$$'.
For command line utilities that dispatch to other command line utilities, the
disable_interspersed_args() method is available. If this this method is called,
then parsing e.g.::
script --verbose cmd --debug /tmp/mything
will no longer return::
['cmd', '/tmp/mything']
as the leftover arguments, but will instead return::
['cmd', '--debug', '/tmp/mything']
i.e. argument parsing is stopped at the first non-option argument.
Options may be declared as required so that an error is raised if the user
does not supply a value for the option.
Options may be declared as secret so that their values are not leaked into
log files:
opts = [
cfg.StrOpt('s3_store_access_key', secret=True),
cfg.StrOpt('s3_store_secret_key', secret=True),
...
]
This module also contains a global instance of the CommonConfigOpts class
in order to support a common usage pattern in OpenStack:
from openstack.common import cfg
opts = [
cfg.StrOpt('bind_host' default='0.0.0.0'),
cfg.IntOpt('bind_port', default=9292),
]
CONF = cfg.CONF
CONF.register_opts(opts)
def start(server, app):
server.start(app, CONF.bind_port, CONF.bind_host)
"""
import collections
import copy
import functools
import glob
import optparse
import os
import string
import sys
from keystone.openstack.common import iniparser
class Error(Exception):
"""Base class for cfg exceptions."""
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
return self.msg
class ArgsAlreadyParsedError(Error):
"""Raised if a CLI opt is registered after parsing."""
def __str__(self):
ret = "arguments already parsed"
if self.msg:
ret += ": " + self.msg
return ret
class NoSuchOptError(Error, AttributeError):
"""Raised if an opt which doesn't exist is referenced."""
def __init__(self, opt_name, group=None):
self.opt_name = opt_name
self.group = group
def __str__(self):
if self.group is None:
return "no such option: %s" % self.opt_name
else:
return "no such option in group %s: %s" % (self.group.name,
self.opt_name)
class NoSuchGroupError(Error):
"""Raised if a group which doesn't exist is referenced."""
def __init__(self, group_name):
self.group_name = group_name
def __str__(self):
return "no such group: %s" % self.group_name
class DuplicateOptError(Error):
"""Raised if multiple opts with the same name are registered."""
def __init__(self, opt_name):
self.opt_name = opt_name
def __str__(self):
return "duplicate option: %s" % self.opt_name
class RequiredOptError(Error):
"""Raised if an option is required but no value is supplied by the user."""
def __init__(self, opt_name, group=None):
self.opt_name = opt_name
self.group = group
def __str__(self):
if self.group is None:
return "value required for option: %s" % self.opt_name
else:
return "value required for option: %s.%s" % (self.group.name,
self.opt_name)
class TemplateSubstitutionError(Error):
"""Raised if an error occurs substituting a variable in an opt value."""
def __str__(self):
return "template substitution error: %s" % self.msg
class ConfigFilesNotFoundError(Error):
"""Raised if one or more config files are not found."""
def __init__(self, config_files):
self.config_files = config_files
def __str__(self):
return ('Failed to read some config files: %s' %
string.join(self.config_files, ','))
class ConfigFileParseError(Error):
"""Raised if there is an error parsing a config file."""
def __init__(self, config_file, msg):
self.config_file = config_file
self.msg = msg
def __str__(self):
return 'Failed to parse %s: %s' % (self.config_file, self.msg)
class ConfigFileValueError(Error):
"""Raised if a config file value does not match its opt type."""
pass
def _get_config_dirs(project=None):
"""Return a list of directors where config files may be located.
:param project: an optional project name
If a project is specified, following directories are returned::
~/.${project}/
~/
/etc/${project}/
/etc/
Otherwise, these directories::
~/
/etc/
"""
fix_path = lambda p: os.path.abspath(os.path.expanduser(p))
cfg_dirs = [
fix_path(os.path.join('~', '.' + project)) if project else None,
fix_path('~'),
os.path.join('/etc', project) if project else None,
'/etc'
]
return filter(bool, cfg_dirs)
def _search_dirs(dirs, basename, extension=""):
"""Search a list of directories for a given filename.
Iterator over the supplied directories, returning the first file
found with the supplied name and extension.
:param dirs: a list of directories
:param basename: the filename, e.g. 'glance-api'
:param extension: the file extension, e.g. '.conf'
:returns: the path to a matching file, or None
"""
for d in dirs:
path = os.path.join(d, '%s%s' % (basename, extension))
if os.path.exists(path):
return path
def find_config_files(project=None, prog=None, extension='.conf'):
"""Return a list of default configuration files.
:param project: an optional project name
:param prog: the program name, defaulting to the basename of sys.argv[0]
:param extension: the type of the config file
We default to two config files: [${project}.conf, ${prog}.conf]
And we look for those config files in the following directories::
~/.${project}/
~/
/etc/${project}/
/etc/
We return an absolute path for (at most) one of each the default config
files, for the topmost directory it exists in.
For example, if project=foo, prog=bar and /etc/foo/foo.conf, /etc/bar.conf
and ~/.foo/bar.conf all exist, then we return ['/etc/foo/foo.conf',
'~/.foo/bar.conf']
If no project name is supplied, we only look for ${prog.conf}.
"""
if prog is None:
prog = os.path.basename(sys.argv[0])
cfg_dirs = _get_config_dirs(project)
config_files = []
if project:
config_files.append(_search_dirs(cfg_dirs, project, extension))
config_files.append(_search_dirs(cfg_dirs, prog, extension))
return filter(bool, config_files)
def _is_opt_registered(opts, opt):
"""Check whether an opt with the same name is already registered.
The same opt may be registered multiple times, with only the first
registration having any effect. However, it is an error to attempt
to register a different opt with the same name.
:param opts: the set of opts already registered
:param opt: the opt to be registered
:returns: True if the opt was previously registered, False otherwise
:raises: DuplicateOptError if a naming conflict is detected
"""
if opt.dest in opts:
if opts[opt.dest]['opt'] is not opt:
raise DuplicateOptError(opt.name)
return True
else:
return False
class Opt(object):
"""Base class for all configuration options.
An Opt object has no public methods, but has a number of public string
properties:
name:
the name of the option, which may include hyphens
dest:
the (hyphen-less) ConfigOpts property which contains the option value
short:
a single character CLI option name
default:
the default value of the option
metavar:
the name shown as the argument to a CLI option in --help output
help:
an string explaining how the options value is used
"""
multi = False
def __init__(self, name, dest=None, short=None, default=None,
metavar=None, help=None, secret=False, required=False):
"""Construct an Opt object.
The only required parameter is the option's name. However, it is
common to also supply a default and help string for all options.
:param name: the option's name
:param dest: the name of the corresponding ConfigOpts property
:param short: a single character CLI option name
:param default: the default value of the option
:param metavar: the option argument to show in --help
:param help: an explanation of how the option is used
:param secret: true iff the value should be obfuscated in log output
:param required: true iff a value must be supplied for this option
"""
self.name = name
if dest is None:
self.dest = self.name.replace('-', '_')
else:
self.dest = dest
self.short = short
self.default = default
self.metavar = metavar
self.help = help
self.secret = secret
self.required = required
def _get_from_config_parser(self, cparser, section):
"""Retrieves the option value from a MultiConfigParser object.
This is the method ConfigOpts uses to look up the option value from
config files. Most opt types override this method in order to perform
type appropriate conversion of the returned value.
:param cparser: a ConfigParser object
:param section: a section name
"""
return cparser.get(section, self.dest)
def _add_to_cli(self, parser, group=None):
"""Makes the option available in the command line interface.
This is the method ConfigOpts uses to add the opt to the CLI interface
as appropriate for the opt type. Some opt types may extend this method,
others may just extend the helper methods it uses.
:param parser: the CLI option parser
:param group: an optional OptGroup object
"""
container = self._get_optparse_container(parser, group)
kwargs = self._get_optparse_kwargs(group)
prefix = self._get_optparse_prefix('', group)
self._add_to_optparse(container, self.name, self.short, kwargs, prefix)
def _add_to_optparse(self, container, name, short, kwargs, prefix=''):
"""Add an option to an optparse parser or group.
:param container: an optparse.OptionContainer object
:param name: the opt name
:param short: the short opt name
:param kwargs: the keyword arguments for add_option()
:param prefix: an optional prefix to prepend to the opt name
:raises: DuplicateOptError if a naming confict is detected
"""
args = ['--' + prefix + name]
if short:
args += ['-' + short]
for a in args:
if container.has_option(a):
raise DuplicateOptError(a)
container.add_option(*args, **kwargs)
def _get_optparse_container(self, parser, group):
"""Returns an optparse.OptionContainer.
:param parser: an optparse.OptionParser
:param group: an (optional) OptGroup object
:returns: an optparse.OptionGroup if a group is given, else the parser
"""
if group is not None:
return group._get_optparse_group(parser)
else:
return parser
def _get_optparse_kwargs(self, group, **kwargs):
"""Build a dict of keyword arguments for optparse's add_option().
Most opt types extend this method to customize the behaviour of the
options added to optparse.
:param group: an optional group
:param kwargs: optional keyword arguments to add to
:returns: a dict of keyword arguments
"""
dest = self.dest
if group is not None:
dest = group.name + '_' + dest
kwargs.update({
'dest': dest,
'metavar': self.metavar,
'help': self.help,
})
return kwargs
def _get_optparse_prefix(self, prefix, group):
"""Build a prefix for the CLI option name, if required.
CLI options in a group are prefixed with the group's name in order
to avoid conflicts between similarly named options in different
groups.
:param prefix: an existing prefix to append to (e.g. 'no' or '')
:param group: an optional OptGroup object
:returns: a CLI option prefix including the group name, if appropriate
"""
if group is not None:
return group.name + '-' + prefix
else:
return prefix
class StrOpt(Opt):
"""
String opts do not have their values transformed and are returned as
str objects.
"""
pass
class BoolOpt(Opt):
"""
Bool opts are set to True or False on the command line using --optname or
--noopttname respectively.
In config files, boolean values are case insensitive and can be set using
1/0, yes/no, true/false or on/off.
"""
_boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
def _get_from_config_parser(self, cparser, section):
"""Retrieve the opt value as a boolean from ConfigParser."""
def convert_bool(v):
value = self._boolean_states.get(v.lower())
if value is None:
raise ValueError('Unexpected boolean value %r' % v)
return value
return [convert_bool(v) for v in cparser.get(section, self.dest)]
def _add_to_cli(self, parser, group=None):
"""Extends the base class method to add the --nooptname option."""
super(BoolOpt, self)._add_to_cli(parser, group)
self._add_inverse_to_optparse(parser, group)
def _add_inverse_to_optparse(self, parser, group):
"""Add the --nooptname option to the option parser."""
container = self._get_optparse_container(parser, group)
kwargs = self._get_optparse_kwargs(group, action='store_false')
prefix = self._get_optparse_prefix('no', group)
kwargs["help"] = "The inverse of --" + self.name
self._add_to_optparse(container, self.name, None, kwargs, prefix)
def _get_optparse_kwargs(self, group, action='store_true', **kwargs):
"""Extends the base optparse keyword dict for boolean options."""
return super(BoolOpt,
self)._get_optparse_kwargs(group, action=action, **kwargs)
class IntOpt(Opt):
"""Int opt values are converted to integers using the int() builtin."""
def _get_from_config_parser(self, cparser, section):
"""Retrieve the opt value as a integer from ConfigParser."""
return [int(v) for v in cparser.get(section, self.dest)]
def _get_optparse_kwargs(self, group, **kwargs):
"""Extends the base optparse keyword dict for integer options."""
return super(IntOpt,
self)._get_optparse_kwargs(group, type='int', **kwargs)
class FloatOpt(Opt):
"""Float opt values are converted to floats using the float() builtin."""
def _get_from_config_parser(self, cparser, section):
"""Retrieve the opt value as a float from ConfigParser."""
return [float(v) for v in cparser.get(section, self.dest)]
def _get_optparse_kwargs(self, group, **kwargs):
"""Extends the base optparse keyword dict for float options."""
return super(FloatOpt,
self)._get_optparse_kwargs(group, type='float', **kwargs)
class ListOpt(Opt):
"""
List opt values are simple string values separated by commas. The opt value
is a list containing these strings.
"""
def _get_from_config_parser(self, cparser, section):
"""Retrieve the opt value as a list from ConfigParser."""
return [v.split(',') for v in cparser.get(section, self.dest)]
def _get_optparse_kwargs(self, group, **kwargs):
"""Extends the base optparse keyword dict for list options."""
return super(ListOpt,
self)._get_optparse_kwargs(group,
type='string',
action='callback',
callback=self._parse_list,
**kwargs)
def _parse_list(self, option, opt, value, parser):
"""An optparse callback for parsing an option value into a list."""
setattr(parser.values, self.dest, value.split(','))
class MultiStrOpt(Opt):
"""
Multistr opt values are string opts which may be specified multiple times.
The opt value is a list containing all the string values specified.
"""
multi = True
def _get_optparse_kwargs(self, group, **kwargs):
"""Extends the base optparse keyword dict for multi str options."""
return super(MultiStrOpt,
self)._get_optparse_kwargs(group, action='append')
class OptGroup(object):
"""
Represents a group of opts.
CLI opts in the group are automatically prefixed with the group name.
Each group corresponds to a section in config files.
An OptGroup object has no public methods, but has a number of public string
properties:
name:
the name of the group
title:
the group title as displayed in --help
help:
the group description as displayed in --help
"""
def __init__(self, name, title=None, help=None):
"""Constructs an OptGroup object.
:param name: the group name
:param title: the group title for --help
:param help: the group description for --help
"""
self.name = name
if title is None:
self.title = "%s options" % title
else:
self.title = title
self.help = help
self._opts = {} # dict of dicts of (opt:, override:, default:)
self._optparse_group = None
def _register_opt(self, opt):
"""Add an opt to this group.
:param opt: an Opt object
:returns: False if previously registered, True otherwise
:raises: DuplicateOptError if a naming conflict is detected
"""
if _is_opt_registered(self._opts, opt):
return False
self._opts[opt.dest] = {'opt': opt, 'override': None, 'default': None}
return True
def _unregister_opt(self, opt):
"""Remove an opt from this group.
:param opt: an Opt object
"""
if opt.dest in self._opts:
del self._opts[opt.dest]
def _get_optparse_group(self, parser):
"""Build an optparse.OptionGroup for this group."""
if self._optparse_group is None:
self._optparse_group = optparse.OptionGroup(parser, self.title,
self.help)
return self._optparse_group
def _clear(self):
"""Clear this group's option parsing state."""
self._optparse_group = None
class ParseError(iniparser.ParseError):
def __init__(self, msg, lineno, line, filename):
super(ParseError, self).__init__(msg, lineno, line)
self.filename = filename
def __str__(self):
return 'at %s:%d, %s: %r' % (self.filename, self.lineno,
self.msg, self.line)
class ConfigParser(iniparser.BaseParser):
def __init__(self, filename, sections):
super(ConfigParser, self).__init__()
self.filename = filename
self.sections = sections
self.section = None
def parse(self):
with open(self.filename) as f:
return super(ConfigParser, self).parse(f)
def new_section(self, section):
self.section = section
self.sections.setdefault(self.section, {})
def assignment(self, key, value):
if not self.section:
raise self.error_no_section()
self.sections[self.section].setdefault(key, [])
self.sections[self.section][key].append('\n'.join(value))
def parse_exc(self, msg, lineno, line=None):
return ParseError(msg, lineno, line, self.filename)
def error_no_section(self):
return self.parse_exc('Section must be started before assignment',
self.lineno)
class MultiConfigParser(object):
def __init__(self):
self.sections = {}
def read(self, config_files):
read_ok = []
for filename in config_files:
parser = ConfigParser(filename, self.sections)
try:
parser.parse()
except IOError:
continue
read_ok.append(filename)
return read_ok
def get(self, section, name):
return self.sections[section][name]
class ConfigOpts(collections.Mapping):
"""
Config options which may be set on the command line or in config files.
ConfigOpts is a configuration option manager with APIs for registering
option schemas, grouping options, parsing option values and retrieving
the values of options.
"""
def __init__(self):
"""Construct a ConfigOpts object."""
self._opts = {} # dict of dicts of (opt:, override:, default:)
self._groups = {}
self._args = None
self._oparser = None
self._cparser = None
self._cli_values = {}
self.__cache = {}
self._config_opts = []
self._disable_interspersed_args = False
def _setup(self, project, prog, version, usage, default_config_files):
"""Initialize a ConfigOpts object for option parsing."""
if prog is None:
prog = os.path.basename(sys.argv[0])
if default_config_files is None:
default_config_files = find_config_files(project, prog)
self._oparser = optparse.OptionParser(prog=prog,
version=version,
usage=usage)
if self._disable_interspersed_args:
self._oparser.disable_interspersed_args()
self._config_opts = [
MultiStrOpt('config-file',
default=default_config_files,
metavar='PATH',
help='Path to a config file to use. Multiple config '
'files can be specified, with values in later '
'files taking precedence. The default files '
' used are: %s' % (default_config_files, )),
StrOpt('config-dir',
metavar='DIR',
help='Path to a config directory to pull *.conf '
'files from. This file set is sorted, so as to '
'provide a predictable parse order if individual '
'options are over-ridden. The set is parsed after '
'the file(s), if any, specified via --config-file, '
'hence over-ridden options in the directory take '
'precedence.'),
]
self.register_cli_opts(self._config_opts)
self.project = project
self.prog = prog
self.version = version
self.usage = usage
self.default_config_files = default_config_files
def __clear_cache(f):
@functools.wraps(f)
def __inner(self, *args, **kwargs):
if kwargs.pop('clear_cache', True):
self.__cache.clear()
return f(self, *args, **kwargs)
return __inner
def __call__(self,
args=None,
project=None,
prog=None,
version=None,
usage=None,
default_config_files=None):
"""Parse command line arguments and config files.
Calling a ConfigOpts object causes the supplied command line arguments
and config files to be parsed, causing opt values to be made available
as attributes of the object.
The object may be called multiple times, each time causing the previous
set of values to be overwritten.
Automatically registers the --config-file option with either a supplied
list of default config files, or a list from find_config_files().
If the --config-dir option is set, any *.conf files from this
directory are pulled in, after all the file(s) specified by the
--config-file option.
:param args: command line arguments (defaults to sys.argv[1:])
:param project: the toplevel project name, used to locate config files
:param prog: the name of the program (defaults to sys.argv[0] basename)
:param version: the program version (for --version)
:param usage: a usage string (%prog will be expanded)
:param default_config_files: config files to use by default
:returns: the list of arguments left over after parsing options
:raises: SystemExit, ConfigFilesNotFoundError, ConfigFileParseError,
RequiredOptError, DuplicateOptError
"""
self.clear()
self._setup(project, prog, version, usage, default_config_files)
self._cli_values, leftovers = self._parse_cli_opts(args)
self._parse_config_files()
self._check_required_opts()
return leftovers
def __getattr__(self, name):
"""Look up an option value and perform string substitution.
:param name: the opt name (or 'dest', more precisely)
:returns: the option value (after string subsititution) or a GroupAttr
:raises: NoSuchOptError,ConfigFileValueError,TemplateSubstitutionError
"""
return self._get(name)
def __getitem__(self, key):
"""Look up an option value and perform string substitution."""
return self.__getattr__(key)
def __contains__(self, key):
"""Return True if key is the name of a registered opt or group."""
return key in self._opts or key in self._groups
def __iter__(self):
"""Iterate over all registered opt and group names."""
for key in self._opts.keys() + self._groups.keys():
yield key
def __len__(self):
"""Return the number of options and option groups."""
return len(self._opts) + len(self._groups)
def reset(self):
"""Clear the object state and unset overrides and defaults."""
self._unset_defaults_and_overrides()
self.clear()
@__clear_cache
def clear(self):
"""Clear the state of the object to before it was called."""
self._args = None
self._cli_values.clear()
self._oparser = None
self._cparser = None
self.unregister_opts(self._config_opts)
for group in self._groups.values():
group._clear()
@__clear_cache
def register_opt(self, opt, group=None):
"""Register an option schema.
Registering an option schema makes any option value which is previously
or subsequently parsed from the command line or config files available
as an attribute of this object.
:param opt: an instance of an Opt sub-class
:param group: an optional OptGroup object or group name
:return: False if the opt was already register, True otherwise
:raises: DuplicateOptError
"""
if group is not None:
return self._get_group(group, autocreate=True)._register_opt(opt)
if _is_opt_registered(self._opts, opt):
return False
self._opts[opt.dest] = {'opt': opt, 'override': None, 'default': None}
return True
@__clear_cache
def register_opts(self, opts, group=None):
"""Register multiple option schemas at once."""
for opt in opts:
self.register_opt(opt, group, clear_cache=False)
@__clear_cache
def register_cli_opt(self, opt, group=None):
"""Register a CLI option schema.
CLI option schemas must be registered before the command line and
config files are parsed. This is to ensure that all CLI options are
show in --help and option validation works as expected.
:param opt: an instance of an Opt sub-class
:param group: an optional OptGroup object or group name
:return: False if the opt was already register, True otherwise
:raises: DuplicateOptError, ArgsAlreadyParsedError
"""
if self._args is not None:
raise ArgsAlreadyParsedError("cannot register CLI option")
return self.register_opt(opt, group, clear_cache=False)
@__clear_cache
def register_cli_opts(self, opts, group=None):
"""Register multiple CLI option schemas at once."""
for opt in opts:
self.register_cli_opt(opt, group, clear_cache=False)
def register_group(self, group):
"""Register an option group.
An option group must be registered before options can be registered
with the group.
:param group: an OptGroup object
"""
if group.name in self._groups:
return
self._groups[group.name] = copy.copy(group)
@__clear_cache
def unregister_opt(self, opt, group=None):
"""Unregister an option.
:param opt: an Opt object
:param group: an optional OptGroup object or group name
:raises: ArgsAlreadyParsedError, NoSuchGroupError
"""
if self._args is not None:
raise ArgsAlreadyParsedError("reset before unregistering options")
if group is not None:
self._get_group(group)._unregister_opt(opt)
elif opt.dest in self._opts:
del self._opts[opt.dest]
@__clear_cache
def unregister_opts(self, opts, group=None):
"""Unregister multiple CLI option schemas at once."""
for opt in opts:
self.unregister_opt(opt, group, clear_cache=False)
@__clear_cache
def set_override(self, name, override, group=None):
"""Override an opt value.
Override the command line, config file and default values of a
given option.
:param name: the name/dest of the opt
:param override: the override value
:param group: an option OptGroup object or group name
:raises: NoSuchOptError, NoSuchGroupError
"""
opt_info = self._get_opt_info(name, group)
opt_info['override'] = override
@__clear_cache
def set_default(self, name, default, group=None):
"""Override an opt's default value.
Override the default value of given option. A command line or
config file value will still take precedence over this default.
:param name: the name/dest of the opt
:param default: the default value
:param group: an option OptGroup object or group name
:raises: NoSuchOptError, NoSuchGroupError
"""
opt_info = self._get_opt_info(name, group)
opt_info['default'] = default
def _all_opt_infos(self):
"""A generator function for iteration opt infos."""
for info in self._opts.values():
yield info, None
for group in self._groups.values():
for info in group._opts.values():
yield info, group
def _all_opts(self):
"""A generator function for iteration opts."""
for info, group in self._all_opt_infos():
yield info['opt'], group
def _unset_defaults_and_overrides(self):
"""Unset any default or override on all options."""
for info, group in self._all_opt_infos():
info['default'] = None
info['override'] = None
def disable_interspersed_args(self):
"""Set parsing to stop on the first non-option.
If this this method is called, then parsing e.g.
script --verbose cmd --debug /tmp/mything
will no longer return:
['cmd', '/tmp/mything']
as the leftover arguments, but will instead return:
['cmd', '--debug', '/tmp/mything']
i.e. argument parsing is stopped at the first non-option argument.
"""
self._disable_interspersed_args = True
def enable_interspersed_args(self):
"""Set parsing to not stop on the first non-option.
This it the default behaviour."""
self._disable_interspersed_args = False
def find_file(self, name):
"""Locate a file located alongside the config files.
Search for a file with the supplied basename in the directories
which we have already loaded config files from and other known
configuration directories.
The directory, if any, supplied by the config_dir option is
searched first. Then the config_file option is iterated over
and each of the base directories of the config_files values
are searched. Failing both of these, the standard directories
searched by the module level find_config_files() function is
used. The first matching file is returned.
:param basename: the filename, e.g. 'policy.json'
:returns: the path to a matching file, or None
"""
dirs = []
if self.config_dir:
dirs.append(self.config_dir)
for cf in reversed(self.config_file):
dirs.append(os.path.dirname(cf))
dirs.extend(_get_config_dirs(self.project))
return _search_dirs(dirs, name)
def log_opt_values(self, logger, lvl):
"""Log the value of all registered opts.
It's often useful for an app to log its configuration to a log file at
startup for debugging. This method dumps to the entire config state to
the supplied logger at a given log level.
:param logger: a logging.Logger object
:param lvl: the log level (e.g. logging.DEBUG) arg to logger.log()
"""
logger.log(lvl, "*" * 80)
logger.log(lvl, "Configuration options gathered from:")
logger.log(lvl, "command line args: %s", self._args)
logger.log(lvl, "config files: %s", self.config_file)
logger.log(lvl, "=" * 80)
def _sanitize(opt, value):
"""Obfuscate values of options declared secret"""
return value if not opt.secret else '*' * len(str(value))
for opt_name in sorted(self._opts):
opt = self._get_opt_info(opt_name)['opt']
logger.log(lvl, "%-30s = %s", opt_name,
_sanitize(opt, getattr(self, opt_name)))
for group_name in self._groups:
group_attr = self.GroupAttr(self, self._get_group(group_name))
for opt_name in sorted(self._groups[group_name]._opts):
opt = self._get_opt_info(opt_name, group_name)['opt']
logger.log(lvl, "%-30s = %s",
"%s.%s" % (group_name, opt_name),
_sanitize(opt, getattr(group_attr, opt_name)))
logger.log(lvl, "*" * 80)
def print_usage(self, file=None):
"""Print the usage message for the current program."""
self._oparser.print_usage(file)
def print_help(self, file=None):
"""Print the help message for the current program."""
self._oparser.print_help(file)
def _get(self, name, group=None):
if isinstance(group, OptGroup):
key = (group.name, name)
else:
key = (group, name)
try:
return self.__cache[key]
except KeyError:
value = self._substitute(self._do_get(name, group))
self.__cache[key] = value
return value
def _do_get(self, name, group=None):
"""Look up an option value.
:param name: the opt name (or 'dest', more precisely)
:param group: an OptGroup
:returns: the option value, or a GroupAttr object
:raises: NoSuchOptError, NoSuchGroupError, ConfigFileValueError,
TemplateSubstitutionError
"""
if group is None and name in self._groups:
return self.GroupAttr(self, self._get_group(name))
info = self._get_opt_info(name, group)
default, opt, override = [info[k] for k in sorted(info.keys())]
if override is not None:
return override
values = []
if self._cparser is not None:
section = group.name if group is not None else 'DEFAULT'
try:
value = opt._get_from_config_parser(self._cparser, section)
except KeyError:
pass
except ValueError as ve:
raise ConfigFileValueError(str(ve))
else:
if not opt.multi:
# No need to continue since the last value wins
return value[-1]
values.extend(value)
name = name if group is None else group.name + '_' + name
value = self._cli_values.get(name)
if value is not None:
if not opt.multi:
return value
return value + values
if values:
return values
if default is not None:
return default
return opt.default
def _substitute(self, value):
"""Perform string template substitution.
Substititue any template variables (e.g. $foo, ${bar}) in the supplied
string value(s) with opt values.
:param value: the string value, or list of string values
:returns: the substituted string(s)
"""
if isinstance(value, list):
return [self._substitute(i) for i in value]
elif isinstance(value, str):
tmpl = string.Template(value)
return tmpl.safe_substitute(self.StrSubWrapper(self))
else:
return value
def _get_group(self, group_or_name, autocreate=False):
"""Looks up a OptGroup object.
Helper function to return an OptGroup given a parameter which can
either be the group's name or an OptGroup object.
The OptGroup object returned is from the internal dict of OptGroup
objects, which will be a copy of any OptGroup object that users of
the API have access to.
:param group_or_name: the group's name or the OptGroup object itself
:param autocreate: whether to auto-create the group if it's not found
:raises: NoSuchGroupError
"""
group = group_or_name if isinstance(group_or_name, OptGroup) else None
group_name = group.name if group else group_or_name
if not group_name in self._groups:
if not group is None or not autocreate:
raise NoSuchGroupError(group_name)
self.register_group(OptGroup(name=group_name))
return self._groups[group_name]
def _get_opt_info(self, opt_name, group=None):
"""Return the (opt, override, default) dict for an opt.
:param opt_name: an opt name/dest
:param group: an optional group name or OptGroup object
:raises: NoSuchOptError, NoSuchGroupError
"""
if group is None:
opts = self._opts
else:
group = self._get_group(group)
opts = group._opts
if not opt_name in opts:
raise NoSuchOptError(opt_name, group)
return opts[opt_name]
def _parse_config_files(self):
"""Parse the config files from --config-file and --config-dir.
:raises: ConfigFilesNotFoundError, ConfigFileParseError
"""
config_files = list(self.config_file)
if self.config_dir:
config_dir_glob = os.path.join(self.config_dir, '*.conf')
config_files += sorted(glob.glob(config_dir_glob))
self._cparser = MultiConfigParser()
try:
read_ok = self._cparser.read(config_files)
except iniparser.ParseError as pe:
raise ConfigFileParseError(pe.filename, str(pe))
if read_ok != config_files:
not_read_ok = filter(lambda f: f not in read_ok, config_files)
raise ConfigFilesNotFoundError(not_read_ok)
def _check_required_opts(self):
"""Check that all opts marked as required have values specified.
:raises: RequiredOptError
"""
for info, group in self._all_opt_infos():
default, opt, override = [info[k] for k in sorted(info.keys())]
if opt.required:
if (default is not None or override is not None):
continue
if self._get(opt.name, group) is None:
raise RequiredOptError(opt.name, group)
def _parse_cli_opts(self, args):
"""Parse command line options.
Initializes the command line option parser and parses the supplied
command line arguments.
:param args: the command line arguments
:returns: a dict of parsed option values
:raises: SystemExit, DuplicateOptError
"""
self._args = args
for opt, group in self._all_opts():
opt._add_to_cli(self._oparser, group)
values, leftovers = self._oparser.parse_args(args)
return vars(values), leftovers
class GroupAttr(collections.Mapping):
"""
A helper class representing the option values of a group as a mapping
and attributes.
"""
def __init__(self, conf, group):
"""Construct a GroupAttr object.
:param conf: a ConfigOpts object
:param group: an OptGroup object
"""
self.conf = conf
self.group = group
def __getattr__(self, name):
"""Look up an option value and perform template substitution."""
return self.conf._get(name, self.group)
def __getitem__(self, key):
"""Look up an option value and perform string substitution."""
return self.__getattr__(key)
def __contains__(self, key):
"""Return True if key is the name of a registered opt or group."""
return key in self.group._opts
def __iter__(self):
"""Iterate over all registered opt and group names."""
for key in self.group._opts.keys():
yield key
def __len__(self):
"""Return the number of options and option groups."""
return len(self.group._opts)
class StrSubWrapper(object):
"""
A helper class exposing opt values as a dict for string substitution.
"""
def __init__(self, conf):
"""Construct a StrSubWrapper object.
:param conf: a ConfigOpts object
"""
self.conf = conf
def __getitem__(self, key):
"""Look up an opt value from the ConfigOpts object.
:param key: an opt name
:returns: an opt value
:raises: TemplateSubstitutionError if attribute is a group
"""
value = getattr(self.conf, key)
if isinstance(value, self.conf.GroupAttr):
raise TemplateSubstitutionError(
'substituting group %s not supported' % key)
return value
class CommonConfigOpts(ConfigOpts):
DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
BoolOpt('debug',
short='d',
default=False,
help='Print debugging output'),
BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output'),
]
logging_cli_opts = [
StrOpt('log-config',
metavar='PATH',
help='If this option is specified, the logging configuration '
'file specified is used and overrides any other logging '
'options specified. Please see the Python logging module '
'documentation for details on logging configuration '
'files.'),
StrOpt('log-format',
default=DEFAULT_LOG_FORMAT,
metavar='FORMAT',
help='A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'Default: %default'),
StrOpt('log-date-format',
default=DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %(asctime)s in log records. '
'Default: %default'),
StrOpt('log-file',
metavar='PATH',
help='(Optional) Name of log file to output to. '
'If not set, logging will go to stdout.'),
StrOpt('log-dir',
help='(Optional) The directory to keep log files in '
'(will be prepended to --logfile)'),
BoolOpt('use-syslog',
default=False,
help='Use syslog for logging.'),
StrOpt('syslog-log-facility',
default='LOG_USER',
help='syslog facility to receive log lines')
]
def __init__(self):
super(CommonConfigOpts, self).__init__()
self.register_cli_opts(self.common_cli_opts)
self.register_cli_opts(self.logging_cli_opts)
CONF = CommonConfigOpts()
| apache-2.0 | 1,294,673,558,742,574,300 | 32.517308 | 79 | 0.597969 | false |
malmiron/incubator-airflow | airflow/operators/bash_operator.py | 1 | 5597 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import signal
from subprocess import Popen, STDOUT, PIPE
from tempfile import gettempdir, NamedTemporaryFile
from builtins import bytes
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.utils.file import TemporaryDirectory
from airflow.utils.operator_helpers import context_to_airflow_vars
class BashOperator(BaseOperator):
"""
Execute a Bash script, command or set of commands.
:param bash_command: The command, set of commands or reference to a
bash script (must be '.sh') to be executed. (templated)
:type bash_command: str
:param xcom_push: If xcom_push is True, the last line written to stdout
will also be pushed to an XCom when the bash command completes.
:type xcom_push: bool
:param env: If env is not None, it must be a mapping that defines the
environment variables for the new process; these are used instead
of inheriting the current process environment, which is the default
behavior. (templated)
:type env: dict
:param output_encoding: Output encoding of bash command
:type output_encoding: str
On execution of this operator the task will be up for retry
when exception is raised. However, if a sub-command exits with non-zero
value Airflow will not recognize it as failure unless the whole shell exits
with a failure. The easiest way of achieving this is to prefix the command
with ``set -e;``
Example:
.. code-block:: python
bash_command = "set -e; python3 script.py '{{ next_execution_date }}'"
"""
template_fields = ('bash_command', 'env')
template_ext = ('.sh', '.bash',)
ui_color = '#f0ede4'
@apply_defaults
def __init__(
self,
bash_command,
xcom_push=False,
env=None,
output_encoding='utf-8',
*args, **kwargs):
super(BashOperator, self).__init__(*args, **kwargs)
self.bash_command = bash_command
self.env = env
self.xcom_push_flag = xcom_push
self.output_encoding = output_encoding
def execute(self, context):
"""
Execute the bash command in a temporary directory
which will be cleaned afterwards
"""
self.log.info("Tmp dir root location: \n %s", gettempdir())
# Prepare env for child process.
if self.env is None:
self.env = os.environ.copy()
airflow_context_vars = context_to_airflow_vars(context,
in_env_var_format=True)
self.log.info("Exporting the following env vars:\n" +
'\n'.join(["{}={}".format(k, v)
for k, v in
airflow_context_vars.items()]))
self.env.update(airflow_context_vars)
self.lineage_data = self.bash_command
with TemporaryDirectory(prefix='airflowtmp') as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f:
f.write(bytes(self.bash_command, 'utf_8'))
f.flush()
fname = f.name
script_location = os.path.abspath(fname)
self.log.info(
"Temporary script location: %s",
script_location
)
def pre_exec():
# Restore default signal disposition and invoke setsid
for sig in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'):
if hasattr(signal, sig):
signal.signal(getattr(signal, sig), signal.SIG_DFL)
os.setsid()
self.log.info("Running command: %s", self.bash_command)
sp = Popen(
['bash', fname],
stdout=PIPE, stderr=STDOUT,
cwd=tmp_dir, env=self.env,
preexec_fn=pre_exec)
self.sp = sp
self.log.info("Output:")
line = ''
for line in iter(sp.stdout.readline, b''):
line = line.decode(self.output_encoding).rstrip()
self.log.info(line)
sp.wait()
self.log.info(
"Command exited with return code %s",
sp.returncode
)
if sp.returncode:
raise AirflowException("Bash command failed")
if self.xcom_push_flag:
return line
def on_kill(self):
self.log.info('Sending SIGTERM signal to bash process group')
os.killpg(os.getpgid(self.sp.pid), signal.SIGTERM)
| apache-2.0 | 6,163,552,212,366,461,000 | 36.313333 | 79 | 0.595676 | false |
sjpet/epysteme | epysteme/sets/sql.py | 1 | 19387 | # -*- coding: utf-8 -*-
import os
import sqlite3
import weakref
from collections import OrderedDict
from functools import reduce
from ..helpers import (repeat_to_match,
key_depth,
expand_key,
extend_functions,
tuple_,
maybe_list,
sql_friendly)
class _SqliteLocIndexer(object):
"""Label-location based indexer for selection by label in Sqlite data
frames.
"""
def __init__(self, parent):
self._parent = weakref.ref(parent)
def __getitem__(self, item):
for observation in self._parent().query(index=item):
yield observation
class _SqliteIlocIndexer(object):
"""Indexer for selection by integer index in Sqlite data
frames.
"""
def __init__(self, parent):
self._parent = weakref.ref(parent)
def __getitem__(self, item):
for observation in self._parent().query(int_index=item):
yield observation
class SqliteSeries(object):
"""One-dimensional array analogous to a pandas series. Depends on a parent
SqliteDataFrame which holds the database connection and index.
"""
def __init__(self,
parent,
table_name,
column_name,
sql_index,
func=lambda x: x):
"""SqliteSeries init method.
Parameters
----------
parent : SqliteDataFrame
Parent data frame
table_name : str
Database table name
column_name : str
Column name in the given table holding data values
sql_index : str, optional
Column name in the given table holding index values
func : function, optional
Function to map over the values
"""
self.parent = weakref.ref(parent)
self.table_name = table_name
self.column_name = column_name
self.sql_index = sql_index
self.func = func
self._loc = _SqliteLocIndexer(self)
self._iloc = _SqliteIlocIndexer(self)
@property
def loc(self):
return self._loc
@property
def iloc(self):
return self._iloc
@property
def n_columns(self):
if isinstance(self.column_name, str):
return 1
else:
return len(self.column_name)
@property
def index(self):
# return [k for k in range(len(self))]
return self.parent().index
def __iter__(self):
if isinstance(self.column_name, list):
observations = zip(*[self._iter_single(table_name,
column_name,
index)
for table_name, column_name, index in
zip(self.table_name,
self.column_name,
self.sql_index)])
for observation in observations:
yield self.func(*observation)
else:
for observation in self._iter_single(self.table_name,
self.column_name,
self.sql_index):
yield self.func(observation)
# def __iter__(self):
# for observation in self.query():
# yield observation
def __len__(self):
n_observations = 0
for _ in iter(self):
n_observations += 1
return n_observations
def _iter_single(self, table_name, column_name, index):
if table_name is None:
for x in self.parent()[column_name]:
yield x
elif self.parent().connection is not None:
crs = self.parent().connection.cursor()
crs.execute("SELECT {column} FROM {table} ORDER BY {index}".format(
column=column_name,
table=table_name,
index=index))
for x in crs:
yield x[0]
else:
while False:
yield None
def query(self, index=None, int_index=None):
"""Query the database for values.
Parameters
----------
index : list or slice of index labels or single index label, optional
int_index : list or slice of integer indices or single index, optional
index takes presedence if not None
Yields
------
list
The next observation
"""
this_column = self.parent().column_name(self)
for observation in self.parent().query(index=index,
int_index=int_index,
columns=this_column):
yield observation
class SqliteDataFrame(object):
"""Two-dimensional data structure providing a read-only connection to an
SQLite database and an interface similar to that of a pandas data frame.
"""
def __init__(self, path=None, columns=None, index_col=None):
"""SqliteDataFrame init method.
Parameters
----------
path : str, optional
Path to an SQLite database
columns : dict, optional
Dictionary of columns to add, given as
{key: (table_name, column_name, index_name)}
index_col : dict key, optional
Key of the column to use as index
"""
self._columns = OrderedDict()
self._index = None
self._connection = None
self._loc = _SqliteLocIndexer(self)
self._iloc = _SqliteIlocIndexer(self)
if path is not None:
self.connect(path)
if columns is not None:
for column_name, column_details in columns.items():
self[column_name] = column_details
if index_col is not None:
self.set_index(index_col)
@property
def database(self):
"""Path to the connected database, if any"""
if self._connection is not None:
crs = self._connection.cursor()
crs.execute("PRAGMA database_list")
_, _, db_path = crs.fetchone()
return db_path
else:
return None
@property
def connection(self):
"""Database connection"""
return self._connection
@property
def columns(self):
return [column for column in self._columns.keys()]
@columns.setter
def columns(self, value):
if not len(value) == len(self._columns):
error_message = ("Length mismatch, data frame has {n_data} "
"columns but {n_names} names were given")
raise ValueError(error_message.format(n_data=len(self._columns),
n_names=len(value)))
if not len(value) == len(set(value)):
raise ValueError("Column names must be unique")
max_depth = max(map(key_depth, value))
expanded_names = [expand_key(name, max_depth) for name in value]
self._columns = OrderedDict(
[(key, val) for key, val in zip(expanded_names,
list(self._columns.values()))])
@property
def index(self):
if self._index is None:
try:
return list(range(len(next(iter(self._columns.values())))))
except StopIteration:
return []
else:
return list(self._index)
@property
def loc(self):
return self._loc
@property
def iloc(self):
return self._iloc
def _expand_item(self, items):
"""Expand a list of items to present multi-indexing keys."""
depth = key_depth(next(iter(self._columns.keys())))
if depth == 0:
return items
expanded_items = []
for item in items:
if key_depth(item) == depth:
expanded_items.append(item)
else:
expanded_items.extend(
[key for key in self._columns
if all(a == b for a, b in zip(tuple_(item), key))])
return expanded_items
def __setitem__(self, item, value):
if isinstance(value, SqliteSeries):
self._columns[item] = value
else:
table_name, column_name, index = repeat_to_match(*value[:3])
if len(value) > 3:
func = value[3]
else:
def func(x): return x
series = SqliteSeries(self, table_name, column_name, index, func)
self._columns[item] = series
def __getitem__(self, item):
if isinstance(item, list):
items = self._expand_item(item)
return SqliteDataFrame(self.database,
columns={column: self._columns[column]
for column in items})
else:
items = self._expand_item([item])
if len(items) == 1:
return self._columns[items[0]]
else:
return SqliteDataFrame(
self.database,
columns={column[1:]: self._columns[column]
for column in items})
def __iter__(self):
for observation in self.query():
yield observation
def connect(self, path):
"""Connect to a database.
Parameters
----------
path : str
Path to the database
"""
current_connection = self._connection
if os.path.isfile(path):
connection = sqlite3.connect("file:{}?mode=ro".format(path),
uri=True)
try:
connection.execute("PRAGMA schema_version")
self._connection = connection
if current_connection is not None:
current_connection.close()
except sqlite3.DatabaseError:
raise ValueError(
"{} is not a valid SQLite database".format(path))
else:
raise ValueError("{} is not a file".format(path))
def drop(self, label, axis=0):
"""Remove a label from the requested axis.
Parameters
----------
label : str
Label to be removed
axis : int
Axis from which to remove the label.
"""
if axis == 1:
if label in self._columns:
self._columns.pop(label)
else:
raise KeyError("No column labeled '{}'".format(label))
else:
raise ValueError("Dropping of indices is not yet implemented")
def rename(self, columns=None):
"""Rename a label.
Parameters
----------
columns : dict, optional
Dictionary of column label substitutions
"""
if columns is not None:
self.columns = [key if key not in columns else columns[key]
for key in self.columns]
def set_index(self, index):
"""Set a column as index.
Parameters
----------
index : column label
"""
if index in self._columns:
self._index = self._columns.pop(index)
else:
raise ValueError("No such column: {}".format(index))
def column_name(self, target):
"""Find the column label of a series if it is part of the data frame.
Parameters
----------
target : SqliteSeries
Returns
-------
column label or None
"""
for column_name, column in self._columns.items():
if column == target:
return column_name
def query(self, index=None, int_index=None, columns=None):
"""Query the database for values.
Parameters
----------
index : list or slice of index labels or single index label, optional
int_index : list or slice of integer indices or single index, optional
index takes presedence if not None
columns : list of column labels or a single column label
Yields
------
list
The next observation
"""
if columns is None:
columns_ = [column for column in self._columns.values()]
elif isinstance(columns, list):
columns_ = [self._columns[column] for column in columns]
else:
columns_ = [self._columns[columns]]
if any(column.func is not None for column in columns_):
def f_0():
return []
f, n = reduce(extend_functions,
[(column.func, column.n_columns)
for column in columns_],
(f_0, 0))
else:
f = None
crs = self._connection.cursor()
query_ = self._build_query(index=index,
int_index=int_index,
columns=columns)
crs.execute(query_)
if f is None:
for observation in crs:
yield maybe_list(observation)
else:
for observation in crs:
yield(maybe_list(f(*observation)))
def _build_query(self, index=None, int_index=None, columns=None):
"""Build a suitable SQL query.
Parameters
----------
index : list or slice of index labels or single index label, optional
int_index : list or slice of integer indices or single index, optional
index takes presedence if not None
Returns
-------
str
An SQL query
"""
if columns is None:
columns_ = [column for column in self._columns.values()]
elif isinstance(columns, list):
columns_ = [self._columns[column] for column in columns]
else:
columns_ = [self._columns[columns]]
join_string = "INNER JOIN {table} ON {table}.{index} " \
"== {master}.{master_index}"
column_list = []
table_list = []
for column in columns_:
if isinstance(column.column_name, str):
column_list.append(".".join((column.table_name,
column.column_name)))
table_list.append((column.table_name, column.sql_index))
else:
column_list.extend(
".".join((table_, column_))
for table_, column_ in zip(column.table_name,
column.column_name))
table_list.extend((table_, index_)
for table_, index_ in zip(column.table_name,
column.sql_index))
columns = ", ".join(column_list)
first_column = columns_[0]
if isinstance(first_column.table_name, list):
table = first_column.table_name[0]
master_index = first_column.sql_index[0]
else:
table = first_column.table_name
master_index = first_column.sql_index
joins_set = set(join_string.format(table=table_,
index=index_,
master=table,
master_index=master_index)
for table_, index_ in table_list
if not table_ == table)
if len(joins_set) > 0:
joins = " " + "".join(joins_set)
else:
joins = ""
indices = ""
limit_and_offset = ""
if index is not None and self._index is not None:
inner_query = \
"SELECT {index_index} FROM {index_table}{where_clause}"
if isinstance(index, slice):
slice_parts = []
if index.start is not None:
slice_parts.append(
"{index_column}>={slice_start}".format(
index_column=self._index.column_name,
slice_start=sql_friendly(index.start)))
if index.stop is not None:
slice_parts.append("{index_column}<={slice_stop}".format(
index_column=self._index.column_name,
slice_stop=sql_friendly(index.stop)))
if index.step is not None:
raise NotImplementedError("Slices with steps are not yet "
"supported")
if len(slice_parts) > 0:
where_clause = " WHERE " + " AND ".join(slice_parts)
else:
where_clause = ""
elif isinstance(index, list):
where_clause = \
" WHERE {index_column} IN ({index_values})".format(
index_column=self._index.column_name,
index_values=", ".join(sql_friendly(value)
for value in index))
else:
where_clause = " WHERE {index_column}={index_value}".format(
index_column=self._index.column_name,
index_value=sql_friendly(index))
indices = " WHERE {index} IN ({inner_query})".format(
index=".".join([first_column.table_name,
first_column.sql_index]),
inner_query=inner_query.format(
index_index=self._index.sql_index,
index_table=self._index.table_name,
where_clause=where_clause))
elif index is not None or int_index is not None:
if index is None:
index = int_index
elif isinstance(index, slice):
if index.stop is not None:
# mimic pandas by including the stop
index = slice(index.start, index.stop + 1, index.step)
if isinstance(index, slice):
if index.start is None and index.stop is None:
pass
elif index.stop is None:
limit_and_offset = \
" LIMIT -1 OFFSET {}".format(index.start)
elif index.start is None:
limit_and_offset = " LIMIT {}".format(index.stop)
else:
limit_and_offset = " LIMIT {limit} OFFSET {offset}".format(
limit=index.stop - index.start, offset=index.start)
elif isinstance(index, list):
indices = " WHERE {table}.ROWID IN ({index_values})".format(
table=first_column.table_name,
index_values=", ".join(str(value + 1) for value in index))
else:
limit_and_offset = " LIMIT 1 OFFSET {}".format(index)
else:
pass
query_template = \
"SELECT {columns} FROM {table}{joins}{indices}{limit_and_offset}"
return query_template.format(**locals())
| mit | 4,250,631,575,813,475,000 | 32.834206 | 79 | 0.497911 | false |
blackpan2/HealthNet | src/healthnet/patient/forms.py | 1 | 5185 | """
Application: HealthNet
File: /patient/forms.py
Authors:
- Nathan Stevens
- Philip Bedward
- Daniel Herzig
- George Herde
- Samuel Launt
Description:
- This file contains all view controller information
"""
from base.models import ExtendedStay
from django.apps import apps
from django import forms
from django.contrib.auth.models import User
from django.forms.extras.widgets import SelectDateWidget
from django.forms.widgets import NumberInput
address = apps.get_model('base', 'Address')
person = apps.get_model('base', 'Person')
insurance = apps.get_model('base', 'Insurance')
doctor = apps.get_model('base', 'Doctor')
nurse = apps.get_model('base', 'Nurse')
admin = apps.get_model('base', 'Admin')
# Custom forms for the PatientRegistration
class UserForm(forms.ModelForm):
"""
@class: UserForm
@description: This form is where the User information is updated
"""
first_name = forms.CharField(required=True, label='First Name:')
last_name = forms.CharField(required=True, label='Last Name:')
email = forms.EmailField(required=True, label='Email:')
username = forms.CharField(required=True, label='Username:',
help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.')
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class PersonRegistrationForm(forms.ModelForm):
"""
@class: PersonRegistrationForm
@description: This form is where the Person specific information is entered
"""
birthday = forms.DateField(widget=SelectDateWidget(years={1950, 1951, 1952, 1953, 1954, 1955, 1956,
1957, 1958, 1959, 1960, 1961, 1962, 1963,
1964, 1965, 1966, 1967, 1968, 1969, 1970,
1971, 1972, 1973, 1974, 1975, 1976, 1977,
1978, 1979, 1980, 1981, 1982, 1983, 1984,
1985, 1986, 1987, 1988, 1989, 1990, 1991,
1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2002, 2003, 2004, 2005,
2006, 2007, 2008, 2009, 2010, 2011, 2012,
2013, 2014, 2015}),
label='Birthday:')
# ssn = forms.IntegerField(widget=NumberInput, label='SSN:')
# phoneNumber = USPhoneNumberField()
class Meta:
model = apps.get_model('base', 'Person')
fields = ('birthday', 'phoneNumber')
exclude = ('ssn',)
class InsuranceForm(forms.ModelForm):
"""
@class: InsuranceForm
@description: This form is where the Insurance information is supplied
"""
name = forms.CharField(label='Name:')
policyNumber = forms.IntegerField(label='Policy Number:')
class Meta:
model = apps.get_model('base', 'Insurance')
fields = ('name', 'policyNumber')
exclude = ('addressID',)
class AddressForm(forms.ModelForm):
"""
@class: AddressForm
@description: This form is where the Address information is provided
"""
# zip = USZipCodeField()
# state = USStateField()
#
class Meta:
model = apps.get_model('base', 'Address')
fields = ('street', 'zip', 'city', 'state')
class EmergencyContactForm(forms.ModelForm):
"""
@class: EmergencyContactForm
@description: This form is where the Emergency Contact information is entered
"""
firstName = forms.CharField(required=True, label='First Name:')
lastName = forms.CharField(required=True, label='Last Name:')
# emergencyNumber = USPhoneNumberField()
class Meta:
model = apps.get_model('base', 'EmergencyContact')
fields = ('firstName', 'lastName', 'emergencyNumber')
exclude = ('personID',)
class AdminForm(forms.ModelForm):
hospital = forms.ModelChoiceField(queryset=admin.objects.all(), empty_label='Choose A Hospital')
class Meta:
model = admin
fields = ('hospital',)
class DeleteDoctor(forms.ModelForm):
class Meta:
model = doctor
fields = []
class DeleteNurse(forms.ModelForm):
class Meta:
model = nurse
fields = []
class DeleteAdmin(forms.ModelForm):
class Meta:
model = admin
fields = []
class AdmitPatient(forms.ModelForm):
endDate = forms.DateField(label='Choose A date to discharge this patient')
endTime = forms.TimeField(label='Choose A time to discharge this patient')
class Meta:
model = ExtendedStay
fields = ('endDate','endTime')
class DischargePatient(forms.ModelForm):
class Meta:
model = ExtendedStay
fields = []
class TransferPatientForm(forms.ModelForm):
class Meta:
model = ExtendedStay
fields = []
| mit | -8,182,136,124,556,290,000 | 32.668831 | 113 | 0.580714 | false |
toirl/ringo | ringo/lib/helpers/appinfo.py | 1 | 4117 | """Modul to get information about the application"""
import os
import pkg_resources
from pyramid.threadlocal import get_current_registry
from ringo.lib.sitetree import build_breadcrumbs, site_tree_branches
def get_ringo_version():
return pkg_resources.get_distribution('ringo').version
def get_app_inheritance_path():
"""Returns a list of application names. The names describe the path
to the root of the application inheritance. e.g if the current
application is 'foo' which is based and 'bar' which is based on
'ringo' the function will return the follwing result: ['foo', 'bar',
'ringo'].
The default path is [<nameofcurrentapp>, "ringo"]. The path can be
extended by setting the app.base config variable.
:returns: List of application name which build the inheritance path.
"""
path = ['ringo']
registry = get_current_registry()
settings = registry.settings
base = settings.get("app.base")
if base:
path.append(base)
path.append(get_app_name())
return reversed(path)
def get_app_name():
registry = get_current_registry()
return registry.__name__
def get_app_version():
return pkg_resources.get_distribution(get_app_name()).version
def get_app_location(name=None):
if not name:
name = get_app_name()
return pkg_resources.get_distribution(name).location
def get_app_url(request):
"""Returns the path of the application under which the application
is hosted on the server.
.. note::
This function is a helper function. It is only used to build
correct URLs for client sided AJAX requests in case the
application is hosted in a subpath.
Example:
If the application is hosted on "http://localhost:6543/foo" the
function will return "foo". If it is hosted under the root
directory '' is returned."""
return request.environ.get("SCRIPT_NAME", "")
def get_app_mode(request):
"""Will return a tuple of the mode configuration (if configured)
Tuple: (mode, desc, color)
If no mode is configured return None.
:request: Current request
:return: Tuple of mode configruation
"""
settings = request.registry.settings
mode = settings.get("app.mode")
desc = settings.get("app.mode_desc", "").decode('utf-8')
color_primary = settings.get("app.mode_color_primary", "#F2DEDE")
color_secondary = settings.get("app.mode_color_secondary", "red")
if mode:
return (mode, desc, color_primary, color_secondary)
return None
def get_app_title():
"""Will return the title of the application
:return: The title of the application"""
registry = get_current_registry()
settings = registry.settings
return settings['app.title']
def get_path_to(location, app=None):
"""Will return the full pathname the given file name with in the
path. path is relativ to the application package (pkg_ressource
location + ressource name). You can define a alternative
application."""
if app:
app_name = app
else:
app_name = get_app_name()
base_path = os.path.join(get_app_location(app_name), app_name)
return os.path.join(base_path, location)
def get_breadcrumbs(request, strategy=None):
"""Will return a list of elements which are used to build the
breadcrumbs in the UI.
The function take a strategy attribute which is called to build this
list instead of the default mechanism of ringo. The strategy
function takes the current request as attribute
The returned list currently must have the follwing format::
[(label of element, url of element), (), ...]
The last element in the list shoul be the current element and has no
link. (URL is None)
:request: Current request
:strategy: Optional function which is called to build the site tree.
:returns: List of elements used for building a the breadcrumbs.
"""
if strategy is None:
strategy = build_breadcrumbs
tree = {}
for branch in site_tree_branches:
tree.update(branch)
return strategy(request, tree)
| gpl-2.0 | 7,238,456,900,135,183,000 | 29.954887 | 72 | 0.686422 | false |
DaveA50/lbry | lbrynet/core/Error.py | 1 | 1428 | class PriceDisagreementError(Exception):
pass
class DuplicateStreamHashError(Exception):
pass
class DownloadCanceledError(Exception):
pass
class RequestCanceledError(Exception):
pass
class InsufficientFundsError(Exception):
pass
class ConnectionClosedBeforeResponseError(Exception):
pass
class KeyFeeAboveMaxAllowed(Exception):
pass
class UnknownNameError(Exception):
def __init__(self, name):
self.name = name
def __str__(self):
return repr(self.name)
class InvalidNameError(Exception):
def __init__(self, name):
self.name = name
def __str__(self):
return repr(self.name)
class UnknownStreamTypeError(Exception):
def __init__(self, stream_type):
self.stream_type = stream_type
def __str__(self):
return repr(self.stream_type)
class InvalidStreamDescriptorError(Exception):
pass
class InvalidStreamInfoError(Exception):
def __init__(self, name):
self.name = name
def __str__(self):
return repr(self.name)
class MisbehavingPeerError(Exception):
pass
class InvalidDataError(MisbehavingPeerError):
pass
class NoResponseError(MisbehavingPeerError):
pass
class InvalidResponseError(MisbehavingPeerError):
pass
class NoSuchBlobError(Exception):
pass
class NoSuchStreamHashError(Exception):
pass
class InvalidBlobHashError(Exception):
pass | mit | -8,200,112,935,699,734,000 | 14.877778 | 53 | 0.69958 | false |
disqus/django-old | django/contrib/formtools/wizard/views.py | 1 | 26572 | import re
from django import forms
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.forms import formsets, ValidationError
from django.views.generic import TemplateView
from django.utils.datastructures import SortedDict
from django.utils.decorators import classonlymethod
from django.contrib.formtools.wizard.storage import get_storage
from django.contrib.formtools.wizard.storage.exceptions import NoFileStorageConfigured
from django.contrib.formtools.wizard.forms import ManagementForm
def normalize_name(name):
new = re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))', '_\\1', name)
return new.lower().strip('_')
class StepsHelper(object):
def __init__(self, wizard):
self._wizard = wizard
def __dir__(self):
return self.all
def __len__(self):
return self.count
def __repr__(self):
return '<StepsHelper for %s (steps: %s)>' % (self._wizard, self.all)
@property
def all(self):
"Returns the names of all steps/forms."
return self._wizard.get_form_list().keys()
@property
def count(self):
"Returns the total number of steps/forms in this the wizard."
return len(self.all)
@property
def current(self):
"""
Returns the current step. If no current step is stored in the
storage backend, the first step will be returned.
"""
return self._wizard.storage.current_step or self.first
@property
def first(self):
"Returns the name of the first step."
return self.all[0]
@property
def last(self):
"Returns the name of the last step."
return self.all[-1]
@property
def next(self):
"Returns the next step."
return self._wizard.get_next_step()
@property
def prev(self):
"Returns the previous step."
return self._wizard.get_prev_step()
@property
def index(self):
"Returns the index for the current step."
return self._wizard.get_step_index()
@property
def step0(self):
return int(self.index)
@property
def step1(self):
return int(self.index) + 1
class WizardView(TemplateView):
"""
The WizardView is used to create multi-page forms and handles all the
storage and validation stuff. The wizard is based on Django's generic
class based views.
"""
storage_name = None
form_list = None
initial_dict = None
instance_dict = None
condition_dict = None
template_name = 'formtools/wizard/wizard_form.html'
def __repr__(self):
return '<%s: forms: %s>' % (self.__class__.__name__, self.form_list)
@classonlymethod
def as_view(cls, *args, **kwargs):
"""
This method is used within urls.py to create unique formwizard
instances for every request. We need to override this method because
we add some kwargs which are needed to make the formwizard usable.
"""
initkwargs = cls.get_initkwargs(*args, **kwargs)
return super(WizardView, cls).as_view(**initkwargs)
@classmethod
def get_initkwargs(cls, form_list, initial_dict=None,
instance_dict=None, condition_dict=None, *args, **kwargs):
"""
Creates a dict with all needed parameters for the form wizard instances.
* `form_list` - is a list of forms. The list entries can be single form
classes or tuples of (`step_name`, `form_class`). If you pass a list
of forms, the formwizard will convert the class list to
(`zero_based_counter`, `form_class`). This is needed to access the
form for a specific step.
* `initial_dict` - contains a dictionary of initial data dictionaries.
The key should be equal to the `step_name` in the `form_list` (or
the str of the zero based counter - if no step_names added in the
`form_list`)
* `instance_dict` - contains a dictionary of instance objects. This list
is only used when `ModelForm`s are used. The key should be equal to
the `step_name` in the `form_list`. Same rules as for `initial_dict`
apply.
* `condition_dict` - contains a dictionary of boolean values or
callables. If the value of for a specific `step_name` is callable it
will be called with the formwizard instance as the only argument.
If the return value is true, the step's form will be used.
"""
kwargs.update({
'initial_dict': initial_dict or {},
'instance_dict': instance_dict or {},
'condition_dict': condition_dict or {},
})
init_form_list = SortedDict()
assert len(form_list) > 0, 'at least one form is needed'
# walk through the passed form list
for i, form in enumerate(form_list):
if isinstance(form, (list, tuple)):
# if the element is a tuple, add the tuple to the new created
# sorted dictionary.
init_form_list[unicode(form[0])] = form[1]
else:
# if not, add the form with a zero based counter as unicode
init_form_list[unicode(i)] = form
# walk through the new created list of forms
for form in init_form_list.itervalues():
if issubclass(form, formsets.BaseFormSet):
# if the element is based on BaseFormSet (FormSet/ModelFormSet)
# we need to override the form variable.
form = form.form
# check if any form contains a FileField, if yes, we need a
# file_storage added to the formwizard (by subclassing).
for field in form.base_fields.itervalues():
if (isinstance(field, forms.FileField) and
not hasattr(cls, 'file_storage')):
raise NoFileStorageConfigured
# build the kwargs for the formwizard instances
kwargs['form_list'] = init_form_list
return kwargs
def get_wizard_name(self):
return normalize_name(self.__class__.__name__)
def get_prefix(self):
# TODO: Add some kind of unique id to prefix
return self.wizard_name
def get_form_list(self):
"""
This method returns a form_list based on the initial form list but
checks if there is a condition method/value in the condition_list.
If an entry exists in the condition list, it will call/read the value
and respect the result. (True means add the form, False means ignore
the form)
The form_list is always generated on the fly because condition methods
could use data from other (maybe previous forms).
"""
form_list = SortedDict()
for form_key, form_class in self.form_list.iteritems():
# try to fetch the value from condition list, by default, the form
# gets passed to the new list.
condition = self.condition_dict.get(form_key, True)
if callable(condition):
# call the value if needed, passes the current instance.
condition = condition(self)
if condition:
form_list[form_key] = form_class
return form_list
def dispatch(self, request, *args, **kwargs):
"""
This method gets called by the routing engine. The first argument is
`request` which contains a `HttpRequest` instance.
The request is stored in `self.request` for later use. The storage
instance is stored in `self.storage`.
After processing the request using the `dispatch` method, the
response gets updated by the storage engine (for example add cookies).
"""
# add the storage engine to the current formwizard instance
self.wizard_name = self.get_wizard_name()
self.prefix = self.get_prefix()
self.storage = get_storage(self.storage_name, self.prefix, request,
getattr(self, 'file_storage', None))
self.steps = StepsHelper(self)
response = super(WizardView, self).dispatch(request, *args, **kwargs)
# update the response (e.g. adding cookies)
self.storage.update_response(response)
return response
def get(self, request, *args, **kwargs):
"""
This method handles GET requests.
If a GET request reaches this point, the wizard assumes that the user
just starts at the first step or wants to restart the process.
The data of the wizard will be resetted before rendering the first step.
"""
self.storage.reset()
# reset the current step to the first step.
self.storage.current_step = self.steps.first
return self.render(self.get_form())
def post(self, *args, **kwargs):
"""
This method handles POST requests.
The wizard will render either the current step (if form validation
wasn't successful), the next step (if the current step was stored
successful) or the done view (if no more steps are available)
"""
# Look for a wizard_prev_step element in the posted data which
# contains a valid step name. If one was found, render the requested
# form. (This makes stepping back a lot easier).
wizard_prev_step = self.request.POST.get('wizard_prev_step', None)
if wizard_prev_step and wizard_prev_step in self.get_form_list():
self.storage.current_step = wizard_prev_step
form = self.get_form(
data=self.storage.get_step_data(self.steps.current),
files=self.storage.get_step_files(self.steps.current))
return self.render(form)
# Check if form was refreshed
management_form = ManagementForm(self.request.POST, prefix=self.prefix)
if not management_form.is_valid():
raise ValidationError(
'ManagementForm data is missing or has been tampered.')
form_current_step = management_form.cleaned_data['current_step']
if (form_current_step != self.steps.current and
self.storage.current_step is not None):
# form refreshed, change current step
self.storage.current_step = form_current_step
# get the form for the current step
form = self.get_form(data=self.request.POST, files=self.request.FILES)
# and try to validate
if form.is_valid():
# if the form is valid, store the cleaned data and files.
self.storage.set_step_data(self.steps.current, self.process_step(form))
self.storage.set_step_files(self.steps.current, self.process_step_files(form))
# check if the current step is the last step
if self.steps.current == self.steps.last:
# no more steps, render done view
return self.render_done(form, **kwargs)
else:
# proceed to the next step
return self.render_next_step(form)
return self.render(form)
def render_next_step(self, form, **kwargs):
"""
THis method gets called when the next step/form should be rendered.
`form` contains the last/current form.
"""
# get the form instance based on the data from the storage backend
# (if available).
next_step = self.steps.next
new_form = self.get_form(next_step,
data=self.storage.get_step_data(next_step),
files=self.storage.get_step_files(next_step))
# change the stored current step
self.storage.current_step = next_step
return self.render(new_form, **kwargs)
def render_done(self, form, **kwargs):
"""
This method gets called when all forms passed. The method should also
re-validate all steps to prevent manipulation. If any form don't
validate, `render_revalidation_failure` should get called.
If everything is fine call `done`.
"""
final_form_list = []
# walk through the form list and try to validate the data again.
for form_key in self.get_form_list():
form_obj = self.get_form(step=form_key,
data=self.storage.get_step_data(form_key),
files=self.storage.get_step_files(form_key))
if not form_obj.is_valid():
return self.render_revalidation_failure(form_key, form_obj, **kwargs)
final_form_list.append(form_obj)
# render the done view and reset the wizard before returning the
# response. This is needed to prevent from rendering done with the
# same data twice.
done_response = self.done(final_form_list, **kwargs)
self.storage.reset()
return done_response
def get_form_prefix(self, step=None, form=None):
"""
Returns the prefix which will be used when calling the actual form for
the given step. `step` contains the step-name, `form` the form which
will be called with the returned prefix.
If no step is given, the form_prefix will determine the current step
automatically.
"""
if step is None:
step = self.steps.current
return str(step)
def get_form_initial(self, step):
"""
Returns a dictionary which will be passed to the form for `step`
as `initial`. If no initial data was provied while initializing the
form wizard, a empty dictionary will be returned.
"""
return self.initial_dict.get(step, {})
def get_form_instance(self, step):
"""
Returns a object which will be passed to the form for `step`
as `instance`. If no instance object was provied while initializing
the form wizard, None be returned.
"""
return self.instance_dict.get(step, None)
def get_form_kwargs(self, step=None):
"""
Returns the keyword arguments for instantiating the form
(or formset) on given step.
"""
return {}
def get_form(self, step=None, data=None, files=None):
"""
Constructs the form for a given `step`. If no `step` is defined, the
current step will be determined automatically.
The form will be initialized using the `data` argument to prefill the
new form. If needed, instance or queryset (for `ModelForm` or
`ModelFormSet`) will be added too.
"""
if step is None:
step = self.steps.current
# prepare the kwargs for the form instance.
kwargs = self.get_form_kwargs(step)
kwargs.update({
'data': data,
'files': files,
'prefix': self.get_form_prefix(step, self.form_list[step]),
'initial': self.get_form_initial(step),
})
if issubclass(self.form_list[step], forms.ModelForm):
# If the form is based on ModelForm, add instance if available.
kwargs.update({'instance': self.get_form_instance(step)})
elif issubclass(self.form_list[step], forms.models.BaseModelFormSet):
# If the form is based on ModelFormSet, add queryset if available.
kwargs.update({'queryset': self.get_form_instance(step)})
return self.form_list[step](**kwargs)
def process_step(self, form):
"""
This method is used to postprocess the form data. By default, it
returns the raw `form.data` dictionary.
"""
return self.get_form_step_data(form)
def process_step_files(self, form):
"""
This method is used to postprocess the form files. By default, it
returns the raw `form.files` dictionary.
"""
return self.get_form_step_files(form)
def render_revalidation_failure(self, step, form, **kwargs):
"""
Gets called when a form doesn't validate when rendering the done
view. By default, it changed the current step to failing forms step
and renders the form.
"""
self.storage.current_step = step
return self.render(form, **kwargs)
def get_form_step_data(self, form):
"""
Is used to return the raw form data. You may use this method to
manipulate the data.
"""
return form.data
def get_form_step_files(self, form):
"""
Is used to return the raw form files. You may use this method to
manipulate the data.
"""
return form.files
def get_all_cleaned_data(self):
"""
Returns a merged dictionary of all step cleaned_data dictionaries.
If a step contains a `FormSet`, the key will be prefixed with formset
and contain a list of the formset' cleaned_data dictionaries.
"""
cleaned_data = {}
for form_key in self.get_form_list():
form_obj = self.get_form(
step=form_key,
data=self.storage.get_step_data(form_key),
files=self.storage.get_step_files(form_key)
)
if form_obj.is_valid():
if isinstance(form_obj.cleaned_data, (tuple, list)):
cleaned_data.update({
'formset-%s' % form_key: form_obj.cleaned_data
})
else:
cleaned_data.update(form_obj.cleaned_data)
return cleaned_data
def get_cleaned_data_for_step(self, step):
"""
Returns the cleaned data for a given `step`. Before returning the
cleaned data, the stored values are being revalidated through the
form. If the data doesn't validate, None will be returned.
"""
if step in self.form_list:
form_obj = self.get_form(step=step,
data=self.storage.get_step_data(step),
files=self.storage.get_step_files(step))
if form_obj.is_valid():
return form_obj.cleaned_data
return None
def get_next_step(self, step=None):
"""
Returns the next step after the given `step`. If no more steps are
available, None will be returned. If the `step` argument is None, the
current step will be determined automatically.
"""
if step is None:
step = self.steps.current
form_list = self.get_form_list()
key = form_list.keyOrder.index(step) + 1
if len(form_list.keyOrder) > key:
return form_list.keyOrder[key]
return None
def get_prev_step(self, step=None):
"""
Returns the previous step before the given `step`. If there are no
steps available, None will be returned. If the `step` argument is
None, the current step will be determined automatically.
"""
if step is None:
step = self.steps.current
form_list = self.get_form_list()
key = form_list.keyOrder.index(step) - 1
if key >= 0:
return form_list.keyOrder[key]
return None
def get_step_index(self, step=None):
"""
Returns the index for the given `step` name. If no step is given,
the current step will be used to get the index.
"""
if step is None:
step = self.steps.current
return self.get_form_list().keyOrder.index(step)
def get_context_data(self, form, *args, **kwargs):
"""
Returns the template context for a step. You can overwrite this method
to add more data for all or some steps. This method returns a
dictionary containing the rendered form step. Available template
context variables are:
* all extra data stored in the storage backend
* `form` - form instance of the current step
* `wizard` - the wizard instance itself
Example:
.. code-block:: python
class MyWizard(FormWizard):
def get_context_data(self, form, **kwargs):
context = super(MyWizard, self).get_context_data(form, **kwargs)
if self.steps.current == 'my_step_name':
context.update({'another_var': True})
return context
"""
context = super(WizardView, self).get_context_data(*args, **kwargs)
context.update(self.storage.extra_data)
context['wizard'] = {
'form': form,
'steps': self.steps,
'management_form': ManagementForm(prefix=self.prefix, initial={
'current_step': self.steps.current,
}),
}
return context
def render(self, form=None, **kwargs):
"""
Returns a ``HttpResponse`` containing a all needed context data.
"""
form = form or self.get_form()
context = self.get_context_data(form, **kwargs)
return self.render_to_response(context)
def done(self, form_list, **kwargs):
"""
This method muss be overrided by a subclass to process to form data
after processing all steps.
"""
raise NotImplementedError("Your %s class has not defined a done() "
"method, which is required." % self.__class__.__name__)
class SessionWizardView(WizardView):
"""
A WizardView with pre-configured SessionStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class CookieWizardView(WizardView):
"""
A WizardView with pre-configured CookieStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
class NamedUrlWizardView(WizardView):
"""
A WizardView with URL named steps support.
"""
url_name = None
done_step_name = None
@classmethod
def get_initkwargs(cls, *args, **kwargs):
"""
We require a url_name to reverse URLs later. Additionally users can
pass a done_step_name to change the URL name of the "done" view.
"""
assert 'url_name' in kwargs, 'URL name is needed to resolve correct wizard URLs'
extra_kwargs = {
'done_step_name': kwargs.pop('done_step_name', 'done'),
'url_name': kwargs.pop('url_name'),
}
initkwargs = super(NamedUrlWizardView, cls).get_initkwargs(*args, **kwargs)
initkwargs.update(extra_kwargs)
assert initkwargs['done_step_name'] not in initkwargs['form_list'], \
'step name "%s" is reserved for "done" view' % initkwargs['done_step_name']
return initkwargs
def get(self, *args, **kwargs):
"""
This renders the form or, if needed, does the http redirects.
"""
step_url = kwargs.get('step', None)
if step_url is None:
if 'reset' in self.request.GET:
self.storage.reset()
self.storage.current_step = self.steps.first
if self.request.GET:
query_string = "?%s" % self.request.GET.urlencode()
else:
query_string = ""
next_step_url = reverse(self.url_name, kwargs={
'step': self.steps.current,
}) + query_string
return redirect(next_step_url)
# is the current step the "done" name/view?
elif step_url == self.done_step_name:
last_step = self.steps.last
return self.render_done(self.get_form(step=last_step,
data=self.storage.get_step_data(last_step),
files=self.storage.get_step_files(last_step)
), **kwargs)
# is the url step name not equal to the step in the storage?
# if yes, change the step in the storage (if name exists)
elif step_url == self.steps.current:
# URL step name and storage step name are equal, render!
return self.render(self.get_form(
data=self.storage.current_step_data,
files=self.storage.current_step_data,
), **kwargs)
elif step_url in self.get_form_list():
self.storage.current_step = step_url
return self.render(self.get_form(
data=self.storage.current_step_data,
files=self.storage.current_step_data,
), **kwargs)
# invalid step name, reset to first and redirect.
else:
self.storage.current_step = self.steps.first
return redirect(self.url_name, step=self.steps.first)
def post(self, *args, **kwargs):
"""
Do a redirect if user presses the prev. step button. The rest of this
is super'd from FormWizard.
"""
prev_step = self.request.POST.get('wizard_prev_step', None)
if prev_step and prev_step in self.get_form_list():
self.storage.current_step = prev_step
return redirect(self.url_name, step=prev_step)
return super(NamedUrlWizardView, self).post(*args, **kwargs)
def render_next_step(self, form, **kwargs):
"""
When using the NamedUrlFormWizard, we have to redirect to update the
browser's URL to match the shown step.
"""
next_step = self.get_next_step()
self.storage.current_step = next_step
return redirect(self.url_name, step=next_step)
def render_revalidation_failure(self, failed_step, form, **kwargs):
"""
When a step fails, we have to redirect the user to the first failing
step.
"""
self.storage.current_step = failed_step
return redirect(self.url_name, step=failed_step)
def render_done(self, form, **kwargs):
"""
When rendering the done view, we have to redirect first (if the URL
name doesn't fit).
"""
if kwargs.get('step', None) != self.done_step_name:
return redirect(self.url_name, step=self.done_step_name)
return super(NamedUrlWizardView, self).render_done(form, **kwargs)
class NamedUrlSessionWizardView(NamedUrlWizardView):
"""
A NamedUrlWizardView with pre-configured SessionStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class NamedUrlCookieWizardView(NamedUrlWizardView):
"""
A NamedUrlFormWizard with pre-configured CookieStorageBackend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
| bsd-3-clause | -6,143,436,178,708,382,000 | 37.734694 | 90 | 0.607594 | false |
Tommekster/kotelnik | pokus.py | 1 | 5511 | #!/usr/bin/python3
#import time # time.sleep(5.5)
import http.client
from socket import error as socket_error
import time
logCtrlFile = '/tmp/kotelnik.log'
logTempFile = '/tmp/kotelnikTemps.log'
class connectionError(RuntimeError):
def __init__(self, arg):
self.args = arg
class sensorError(RuntimeError):
def __init__(self, arg):
self.args = arg
def logCtrl(str):
file = open(logCtrlFile,'a')
file.write(str)
file.write("\n")
file.close()
def logTemp(str):
file = open(logTempFile,'a')
file.write(str)
file.write("\n")
file.close()
def switchKotelOnOff(on=False):
conn = http.client.HTTPConnection('192.168.11.99') # nastavim spojeni na maleho kotelnika
if on:
cmd = '/on'
else:
cmd = '/off'
try:
conn.request('GET',cmd) # necham kotel zapnout
except (sensorError,connectionError,socket_error) as e:
logCtrl(time.strftime('%d.%m.%Y %H:%M')+'switchKotel('+str(on)+') Exception: '+str(e))
return
else:
logCtrl(time.strftime('%d.%m.%Y %H:%M')+' '+cmd)
def kotelOn():
switchKotelOnOff(True)
def kotelOff():
switchKotelOnOff(False)
def readSens(loc=0):
if loc:
data1 = b'<html><head><title>Kotelnik Senzory</title></head><body><h2>Senzory</h2><pre>\n609\n665\n674\n653\n697\n666\n174\n747\n</pre><hr></body></html>'
else:
conn = http.client.HTTPConnection('192.168.11.99') # nastavim spojeni na maleho kotelnika
conn.request('GET','/sens') # pozadem o GET /sens
r1 = conn.getresponse() # ziskam vysledek
if r1.status != 200: # skontroluji status vysledku
raise connectionError('/sens is not 200 OK')
data1 = r1.read() # vezmu si data
sens_str = data1.decode('utf8') # preveduje na string
sens = sens_str.split('\n') # rozdelim je podle odradkovani
if len(sens) < 10: # mam-li mene radku, asi se zrovna Atmel resetuj
raise sensorError('Dostal jsem malo dat.',sens)
del(sens[-1]) # odstranim HTML paticku
del(sens[0]) # odstranim HTML hlavicku
return [int(s) for s in sens]
class mTime:
def __init__(self,_h,_m):
self.h=_h
self.m=_m
def isLess(self,h,m): # tento cas uz byl, oproti zadanemu
return self.h < h or (self.h == h and self.m < m)
def isGreater(self,h,m): # tento cas teprve bude oproti zadanemu
return self.h > h or (self.h == h and self.m > m)
class mDay:
def __init__(self):
self.filledStart = False
pass
def setStartTime(self,h,m):
setattr(self,'start',mTime(h,m))
self.filledStart = True
def setStopTime(self,h,m):
setattr(self,'stop',mTime(h,m))
self.filledStop = True
def setStartStop(self,h,m,hh,mm):
setattr(self,'start',mTime(h,m))
setattr(self,'stop',mTime(hh,mm))
self.filledStart = True
self.filledStart = True
def isTimeForHeating(self):
if not (self.filledStart and self.filledStart):
return False
h = time.localtime().tm_hour
m = time.localtime().tm_min
return self.start.isLess(h,m) and self.stop.isGreater(h,m)
class mWeek:
def __init__(self):
self.days=[mDay() for i in range(0,7)]
#def getDay(self,index):
# return self.days[index]
def isTimeForHeating(self):
day = self.days[time.localtime().tm_wday]
return day.isTimeForHeating()
class Kotelnik:
def __init__(self):
self.out_temperature = 15.0 # je-li venku vyssi teplota, tak netopi
self.pipes_temperature = 30.0 # je-li trubka ohrata, tak kotel topi
self.week = mWeek()
self.week.days[0].setStartStop(5,0,22,30) # casy na vytapeni behem tydne
self.week.days[1].setStartStop(5,0,22,30)
self.week.days[2].setStartStop(5,0,22,30)
self.week.days[3].setStartStop(5,0,22,30)
self.week.days[4].setStartStop(5,0,23,59)
self.week.days[5].setStartStop(8,0,23,59)
self.week.days[6].setStartStop(8,0,23,0)
self.timeout_interval = 3600 # kdyz bude podle trubek natopeno, jak dlouho ma kotel odpocivat
self.filterWeight = 1/32 # parametr dolnopropustoveho filtru
self.referenceVoltage=1.1 # referencni napeti pro mereni referencnich "5V"
self.temperatures = [15.0 for i in range(0,6)] # vychozi teploty, aby predesli selhani
def refreshTemperature(self):
try:
sens = readSens() # ziskam hodnoty ze senzoru
except (sensorError,connectionError,socket_error) as e:
logCtrl(time.strftime('%d.%m.%Y %H:%M')+' refreshTemperature() Exception: '+str(e))
return
pom = sens[-2] # pomer merice VCC
vcc = sens[-1] # hodnota na merici VCC pri VREF
rawTemps = [s/10.24*vcc/pom*1.1-273 for s in sens[:-2]] # prepocet hodnot senzoru do stupni Celsia
newTemps = [self.temperatures[i] + (rawTemps[i] - self.temperatures[i])*self.filterWeight for i in range(0,6)]
self.temperatures = newTemps
tempstr='%d' % int(time.time())
for t in self.temperatures:
tempstr+=" %.5f" % t
logTemp(tempstr)
def isTemperatureForHeating(self):
return self.out_temperature > self.temperatures[0] # venkovni teplota je nizka
def boilerHeats(self):
return max(self.temperatures[1:]) > self.pipes_temperature
def mayBoilerHeat(self):
return self.isTemperatureForHeating() and self.week.isTimeForHeating()
def controlBoiler(self):
if self.mayBoilerHeat():
#if not self.boilerHeats():
kotelOn()
elif self.boilerHeats():
kotelOff()
def doYourWork(self):
self.work = True
cycles = 0
while(self.work):
self.refreshTemperature()
if cycles % 10 == 0:
self.controlBoiler()
cycles += 1
time.sleep(60)
def cancelWork(self):
self.work = False
if __name__ == '__main__':
#print('Pokus: uvidime, co zmuzeme s kotelnikem.')
k=Kotelnik()
k.doYourWork()
print('Kotelnik skoncil. ')
| gpl-3.0 | 2,927,955,303,797,610,500 | 29.269231 | 156 | 0.685424 | false |
Southpaw-TACTIC/Team | src/python/Lib/site-packages/PySide/examples/declarative/extending/chapter3-bindings/bindings.py | 1 | 3388 | #!/usr/bin/python
# Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
# All rights reserved.
# Contact: PySide Team ([email protected])
#
# This file is part of the examples of PySide: Python for Qt.
#
# You may use this file under the terms of the BSD license as follows:
#
# "Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
import sys
from PySide.QtCore import *
from PySide.QtGui import *
from PySide.QtDeclarative import *
class PieChart (QDeclarativeItem):
def __init__(self, parent = None):
QDeclarativeItem.__init__(self, parent)
# need to disable this flag to draw inside a QDeclarativeItem
self.setFlag(QGraphicsItem.ItemHasNoContents, False)
self._name = u''
self._color = QColor()
def paint(self, painter, options, widget):
pen = QPen(self._color, 2)
painter.setPen(pen);
painter.setRenderHints(QPainter.Antialiasing, True);
painter.drawPie(self.boundingRect(), 90 * 16, 290 * 16);
def getColor(self):
return self._color
def setColor(self, value):
if value != self._color:
self._color = value
self.update()
self.colorChanged.emit()
def getName(self):
return self._name
def setName(self, value):
self._name = value
colorChanged = Signal()
color = Property(QColor, getColor, setColor, notify=colorChanged)
name = Property(unicode, getName, setName)
chartCleared = Signal()
@Slot() # This should be something like @Invokable
def clearChart(self):
self.setColor(Qt.transparent)
self.update()
self.chartCleared.emit()
if __name__ == '__main__':
app = QApplication(sys.argv)
qmlRegisterType(PieChart, 'Charts', 1, 0, 'PieChart');
view = QDeclarativeView()
view.setSource(QUrl.fromLocalFile('app.qml'))
view.show()
sys.exit(app.exec_())
| epl-1.0 | -8,127,580,666,819,666,000 | 35.826087 | 72 | 0.702184 | false |
skosukhin/spack | var/spack/repos/builtin/packages/fastphase/package.py | 1 | 1682 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Fastphase(Package):
"""Software for haplotype reconstruction, and estimating missing genotypes
from population data."""
homepage = "http://stephenslab.uchicago.edu/software.html"
url = "http://scheet.org/code/Linuxfp.tar.gz"
version('2016-03-30', 'b48731eed9b8d0a5a321f970c5c20d8c')
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('fastPHASE', prefix.bin)
| lgpl-2.1 | 6,089,562,215,838,260,000 | 42.128205 | 78 | 0.675386 | false |
dreadrel/UWF_2014_spring_COP3990C-2507 | notebooks/scripts/book_code/code/registry-deco.py | 1 | 1048 | # Registering decorated objects to an API
from __future__ import print_function # 2.X
registry = {}
def register(obj): # Both class and func decorator
registry[obj.__name__] = obj # Add to registry
return obj # Return obj itself, not a wrapper
@register
def spam(x):
return(x ** 2) # spam = register(spam)
@register
def ham(x):
return(x ** 3)
@register
class Eggs: # Eggs = register(Eggs)
def __init__(self, x):
self.data = x ** 4
def __str__(self):
return str(self.data)
print('Registry:')
for name in registry:
print(name, '=>', registry[name], type(registry[name]))
print('\nManual calls:')
print(spam(2)) # Invoke objects manually
print(ham(2)) # Later calls not intercepted
X = Eggs(2)
print(X)
print('\nRegistry calls:')
for name in registry:
print(name, '=>', registry[name](2)) # Invoke from registry
| apache-2.0 | 154,931,190,101,978,340 | 28.111111 | 78 | 0.533397 | false |
sbenthall/chantbot | parse.py | 1 | 1431 | import ConfigParser
import re
import math
config= ConfigParser.ConfigParser()
config.read('config.cfg')
def hash_word(match):
return '#' + match.group()
def hash_line(line,kw_re):
for kr in kw_re:
line = re.sub(kr, hash_word, line)
return line
def prepare_chants(source,num_bursts,keywords):
"""
prepare_chants(source) -> list of Chants
Read in the text from the source file and
return a list whose elements are
"""
chants = []
f = open(source)
text = ""
kw_re = [re.compile(r'\b%s\b' % kw,flags=re.I) for kw in keywords]
for line in f:
if re.match(r'^\s*$',line) is not None:
if text is not "":
chants.append(Chant(text,num_bursts))
text = ""
else:
# add hashtags where necessary
text += hash_line(line,kw_re)
f.close()
return chants
class Chant:
lines = []
bursts = []
# lines per burst
lpb = 0
def __init__(self,text,num_bursts):
self.lines = text.split("\n")
if self.lines[-1] is "":
self.lines = self.lines[0:-1]
# lines per burst
self.lpb = int(math.ceil(float(len(self.lines)) / num_bursts))
self.bursts = [self.lines[i:i+self.lpb] for i
in xrange(0,len(self.lines),self.lpb)]
if len(self.bursts) < num_bursts:
self.bursts.append([])
| mit | -8,786,346,433,938,706,000 | 20.358209 | 70 | 0.55276 | false |
acapet/GHER-POSTPROC | Examples/Conservation.py | 1 | 2831 | import numpy as np
import numpy.ma as ma
from netCDF4 import Dataset
#from mpl_toolkits.basemap import Basemap
#from multiprocessing import Pool
#import gsw
import matplotlib
matplotlib.use('pdf')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import datetime as dt
import sys
import os
import G3D_class
ztab = -1*np.concatenate([np.arange(0,10,2), np.arange(10,40,5),np.arange(50,120,10),np.arange(120,320,50)])
firstyear=1
lastyear =4
for yy in range(firstyear,lastyear+1):
G1 = G3D_class.G3D('../Out_CARTseq/r'+str(yy)+'.nc')
maskDS = (G1.bat<120) | (G1.bat.mask)
G1.gload('T1age')
G1.gstore('T1age')
G1.testz()
NAgeClasses = 100
AgeClasses = np.linspace(0,1000,NAgeClasses )
AgeVolumes = np.zeros([len(G1.dates),NAgeClasses])
Vol = G1.dx*G1.dy*G1.dz*1e-9
daysincebeg=np.zeros(len(G1.dates))
if yy==firstyear:
datebeg=G1.dates[0]
for t in range(len(G1.dates)):
# make a vector with the volume of water For each age class
localagevector = G1.T1age[t]
for ageClassindex in range(len(AgeClasses)-1):
bi = ma.masked_where( (localagevector<AgeClasses[ageClassindex]) | (localagevector>=AgeClasses[ageClassindex+1]), Vol)
AgeVolumes[t,ageClassindex]=bi.sum()
daysincebeg[t]=(G1.dates[t]-datebeg).days
if yy==firstyear:
AVa=AgeVolumes
datesa=daysincebeg
else:
AVa=np.append(AVa,AgeVolumes,0)
datesa=np.append(datesa,daysincebeg,0)
locator = mdates.AutoDateLocator()
formator = mdates.AutoDateFormatter(locator)
AVa=AVa/Vol.sum()*100
####################
# 1st figure :
####################
fig=plt.figure(figsize=(15, 15))
ax=plt.subplot(1, 1, 1)
#ax.xaxis_date()
#ax.xaxis.set_major_locator(locator)
#ax.xaxis.set_major_formatter(formator)
#plt.contourf(datesa, AgeClasses, AVa.T,levels=np.linspace(0,10,100),cmap='GnBu')
plt.contourf(datesa, AgeClasses, AVa.T,levels=np.linspace(0,1.5,100),cmap='gist_ncar_r')
plt.colorbar()
plt.plot([0.0, datesa.max()], [0.0, datesa.max()], 'r-', lw=2)
plt.title('Volumes for age of Waters - [% of volume]')
plt.ylabel('Age - [d]')
plt.xlabel('Time- [d]')
plt.grid(True)
fig.savefig(G1.figoutputdir+'AgeVolumes.png')
| gpl-3.0 | 3,543,788,336,527,825,000 | 33.950617 | 212 | 0.530908 | false |
kaffeebrauer/Lean | Algorithm.Framework/Execution/StandardDeviationExecutionModel.py | 1 | 7152 | # QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Common")
AddReference("QuantConnect.Indicators")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Algorithm.Framework")
from System import *
from QuantConnect import *
from QuantConnect.Indicators import *
from QuantConnect.Data import *
from QuantConnect.Data.Market import *
from QuantConnect.Orders import *
from QuantConnect.Algorithm import *
from QuantConnect.Algorithm.Framework import *
from QuantConnect.Algorithm.Framework.Execution import *
from QuantConnect.Algorithm.Framework.Portfolio import *
import numpy as np
class StandardDeviationExecutionModel(ExecutionModel):
'''Execution model that submits orders while the current market prices is at least the configured number of standard
deviations away from the mean in the favorable direction (below/above for buy/sell respectively)'''
def __init__(self,
period = 60,
deviations = 2,
resolution = Resolution.Minute):
'''Initializes a new instance of the StandardDeviationExecutionModel class
Args:
period: Period of the standard deviation indicator
deviations: The number of deviations away from the mean before submitting an order
resolution: The resolution of the STD and SMA indicators'''
self.period = period
self.deviations = deviations
self.resolution = resolution
self.targetsCollection = PortfolioTargetCollection()
self.symbolData = {}
# Gets or sets the maximum order value in units of the account currency.
# This defaults to $20,000. For example, if purchasing a stock with a price
# of $100, then the maximum order size would be 200 shares.
self.MaximumOrderValue = 20000
def Execute(self, algorithm, targets):
'''Executes market orders if the standard deviation of price is more
than the configured number of deviations in the favorable direction.
Args:
algorithm: The algorithm instance
targets: The portfolio targets'''
self.targetsCollection.AddRange(targets)
for target in self.targetsCollection.OrderByMarginImpact(algorithm):
symbol = target.Symbol
# calculate remaining quantity to be ordered
unorderedQuantity = OrderSizing.GetUnorderedQuantity(algorithm, target)
# fetch our symbol data containing our STD/SMA indicators
data = self.symbolData.get(symbol, None)
if data is None: return
# check order entry conditions
if data.STD.IsReady and self.PriceIsFavorable(data, unorderedQuantity):
# get the maximum order size based on total order value
maxOrderSize = OrderSizing.Value(data.Security, self.MaximumOrderValue)
orderSize = np.min([maxOrderSize, np.abs(unorderedQuantity)])
# round down to even lot size
orderSize -= orderSize % data.Security.SymbolProperties.LotSize
if orderSize != 0:
algorithm.MarketOrder(symbol, np.sign(unorderedQuantity) * orderSize)
self.targetsCollection.ClearFulfilled(algorithm)
def OnSecuritiesChanged(self, algorithm, changes):
'''Event fired each time the we add/remove securities from the data feed
Args:
algorithm: The algorithm instance that experienced the change in securities
changes: The security additions and removals from the algorithm'''
for removed in changes.RemovedSecurities:
# clean up data from removed securities
if removed.Symbol in self.symbolData:
if self.IsSafeToRemove(algorithm, removed.Symbol):
data = self.symbolData.pop(removed.Symbol)
algorithm.SubscriptionManager.RemoveConsolidator(removed.Symbol, data.Consolidator)
addedSymbols = []
for added in changes.AddedSecurities:
if added.Symbol not in self.symbolData:
self.symbolData[added.Symbol] = SymbolData(algorithm, added, self.period, self.resolution)
addedSymbols.append(added.Symbol)
if len(addedSymbols) > 0:
# warmup our indicators by pushing history through the consolidators
history = algorithm.History(addedSymbols, self.period, self.resolution)
if history.empty: return
tickers = history.index.levels[0]
for ticker in tickers:
symbol = SymbolCache.GetSymbol(ticker)
symbolData = self.symbolData[symbol]
for tuple in history.loc[ticker].itertuples():
bar = TradeBar(tuple.Index, symbol, tuple.open, tuple.high, tuple.low, tuple.close, tuple.volume)
symbolData.Consolidator.Update(bar)
def PriceIsFavorable(self, data, unorderedQuantity):
'''Determines if the current price is more than the configured
number of standard deviations away from the mean in the favorable direction.'''
deviations = self.deviations * data.STD.Current.Value
if unorderedQuantity > 0:
if data.Security.BidPrice < data.SMA.Current.Value - deviations:
return True
else:
if data.Security.AskPrice > data.SMA.Current.Value + deviations:
return True
return False
def IsSafeToRemove(self, algorithm, symbol):
'''Determines if it's safe to remove the associated symbol data'''
# confirm the security isn't currently a member of any universe
return not any([kvp.Value.ContainsMember(symbol) for kvp in algorithm.UniverseManager])
class SymbolData:
def __init__(self, algorithm, security, period, resolution):
self.Security = security
self.Consolidator = algorithm.ResolveConsolidator(security.Symbol, resolution)
smaName = algorithm.CreateIndicatorName(security.Symbol, "SMA{}".format(period), resolution)
self.SMA = SimpleMovingAverage(smaName, period)
algorithm.RegisterIndicator(security.Symbol, self.SMA, self.Consolidator)
stdName = algorithm.CreateIndicatorName(security.Symbol, "STD{}".format(period), resolution)
self.STD = StandardDeviation(stdName, period)
algorithm.RegisterIndicator(security.Symbol, self.STD, self.Consolidator) | apache-2.0 | -3,448,392,155,997,403,000 | 46.673333 | 120 | 0.68993 | false |
ShakedY/ai-project | py2.5/bin/smtpd.py | 1 | 18102 | #!/home/shaked/Desktop/prob-plan-recognition/seq-sat-lama/py2.5/bin/python
"""An RFC 2821 smtp proxy.
Usage: %(program)s [options] [localhost:localport [remotehost:remoteport]]
Options:
--nosetuid
-n
This program generally tries to setuid `nobody', unless this flag is
set. The setuid call will fail if this program is not run as root (in
which case, use this flag).
--version
-V
Print the version number and exit.
--class classname
-c classname
Use `classname' as the concrete SMTP proxy class. Uses `PureProxy' by
default.
--debug
-d
Turn on debugging prints.
--help
-h
Print this message and exit.
Version: %(__version__)s
If localhost is not given then `localhost' is used, and if localport is not
given then 8025 is used. If remotehost is not given then `localhost' is used,
and if remoteport is not given, then 25 is used.
"""
# Overview:
#
# This file implements the minimal SMTP protocol as defined in RFC 821. It
# has a hierarchy of classes which implement the backend functionality for the
# smtpd. A number of classes are provided:
#
# SMTPServer - the base class for the backend. Raises NotImplementedError
# if you try to use it.
#
# DebuggingServer - simply prints each message it receives on stdout.
#
# PureProxy - Proxies all messages to a real smtpd which does final
# delivery. One known problem with this class is that it doesn't handle
# SMTP errors from the backend server at all. This should be fixed
# (contributions are welcome!).
#
# MailmanProxy - An experimental hack to work with GNU Mailman
# <www.list.org>. Using this server as your real incoming smtpd, your
# mailhost will automatically recognize and accept mail destined to Mailman
# lists when those lists are created. Every message not destined for a list
# gets forwarded to a real backend smtpd, as with PureProxy. Again, errors
# are not handled correctly yet.
#
# Please note that this script requires Python 2.0
#
# Author: Barry Warsaw <[email protected]>
#
# TODO:
#
# - support mailbox delivery
# - alias files
# - ESMTP
# - handle error codes from the backend smtpd
import sys
import os
import errno
import getopt
import time
import socket
import asyncore
import asynchat
__all__ = ["SMTPServer","DebuggingServer","PureProxy","MailmanProxy"]
program = sys.argv[0]
__version__ = 'Python SMTP proxy version 0.2'
class Devnull:
def write(self, msg): pass
def flush(self): pass
DEBUGSTREAM = Devnull()
NEWLINE = '\n'
EMPTYSTRING = ''
COMMASPACE = ', '
def usage(code, msg=''):
print >> sys.stderr, __doc__ % globals()
if msg:
print >> sys.stderr, msg
sys.exit(code)
class SMTPChannel(asynchat.async_chat):
COMMAND = 0
DATA = 1
def __init__(self, server, conn, addr):
asynchat.async_chat.__init__(self, conn)
self.__server = server
self.__conn = conn
self.__addr = addr
self.__line = []
self.__state = self.COMMAND
self.__greeting = 0
self.__mailfrom = None
self.__rcpttos = []
self.__data = ''
self.__fqdn = socket.getfqdn()
self.__peer = conn.getpeername()
print >> DEBUGSTREAM, 'Peer:', repr(self.__peer)
self.push('220 %s %s' % (self.__fqdn, __version__))
self.set_terminator('\r\n')
# Overrides base class for convenience
def push(self, msg):
asynchat.async_chat.push(self, msg + '\r\n')
# Implementation of base class abstract method
def collect_incoming_data(self, data):
self.__line.append(data)
# Implementation of base class abstract method
def found_terminator(self):
line = EMPTYSTRING.join(self.__line)
print >> DEBUGSTREAM, 'Data:', repr(line)
self.__line = []
if self.__state == self.COMMAND:
if not line:
self.push('500 Error: bad syntax')
return
method = None
i = line.find(' ')
if i < 0:
command = line.upper()
arg = None
else:
command = line[:i].upper()
arg = line[i+1:].strip()
method = getattr(self, 'smtp_' + command, None)
if not method:
self.push('502 Error: command "%s" not implemented' % command)
return
method(arg)
return
else:
if self.__state != self.DATA:
self.push('451 Internal confusion')
return
# Remove extraneous carriage returns and de-transparency according
# to RFC 821, Section 4.5.2.
data = []
for text in line.split('\r\n'):
if text and text[0] == '.':
data.append(text[1:])
else:
data.append(text)
self.__data = NEWLINE.join(data)
status = self.__server.process_message(self.__peer,
self.__mailfrom,
self.__rcpttos,
self.__data)
self.__rcpttos = []
self.__mailfrom = None
self.__state = self.COMMAND
self.set_terminator('\r\n')
if not status:
self.push('250 Ok')
else:
self.push(status)
# SMTP and ESMTP commands
def smtp_HELO(self, arg):
if not arg:
self.push('501 Syntax: HELO hostname')
return
if self.__greeting:
self.push('503 Duplicate HELO/EHLO')
else:
self.__greeting = arg
self.push('250 %s' % self.__fqdn)
def smtp_NOOP(self, arg):
if arg:
self.push('501 Syntax: NOOP')
else:
self.push('250 Ok')
def smtp_QUIT(self, arg):
# args is ignored
self.push('221 Bye')
self.close_when_done()
# factored
def __getaddr(self, keyword, arg):
address = None
keylen = len(keyword)
if arg[:keylen].upper() == keyword:
address = arg[keylen:].strip()
if not address:
pass
elif address[0] == '<' and address[-1] == '>' and address != '<>':
# Addresses can be in the form <[email protected]> but watch out
# for null address, e.g. <>
address = address[1:-1]
return address
def smtp_MAIL(self, arg):
print >> DEBUGSTREAM, '===> MAIL', arg
address = self.__getaddr('FROM:', arg) if arg else None
if not address:
self.push('501 Syntax: MAIL FROM:<address>')
return
if self.__mailfrom:
self.push('503 Error: nested MAIL command')
return
self.__mailfrom = address
print >> DEBUGSTREAM, 'sender:', self.__mailfrom
self.push('250 Ok')
def smtp_RCPT(self, arg):
print >> DEBUGSTREAM, '===> RCPT', arg
if not self.__mailfrom:
self.push('503 Error: need MAIL command')
return
address = self.__getaddr('TO:', arg) if arg else None
if not address:
self.push('501 Syntax: RCPT TO: <address>')
return
self.__rcpttos.append(address)
print >> DEBUGSTREAM, 'recips:', self.__rcpttos
self.push('250 Ok')
def smtp_RSET(self, arg):
if arg:
self.push('501 Syntax: RSET')
return
# Resets the sender, recipients, and data, but not the greeting
self.__mailfrom = None
self.__rcpttos = []
self.__data = ''
self.__state = self.COMMAND
self.push('250 Ok')
def smtp_DATA(self, arg):
if not self.__rcpttos:
self.push('503 Error: need RCPT command')
return
if arg:
self.push('501 Syntax: DATA')
return
self.__state = self.DATA
self.set_terminator('\r\n.\r\n')
self.push('354 End data with <CR><LF>.<CR><LF>')
class SMTPServer(asyncore.dispatcher):
def __init__(self, localaddr, remoteaddr):
self._localaddr = localaddr
self._remoteaddr = remoteaddr
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
# try to re-use a server port if possible
self.set_reuse_addr()
self.bind(localaddr)
self.listen(5)
print >> DEBUGSTREAM, \
'%s started at %s\n\tLocal addr: %s\n\tRemote addr:%s' % (
self.__class__.__name__, time.ctime(time.time()),
localaddr, remoteaddr)
def handle_accept(self):
conn, addr = self.accept()
print >> DEBUGSTREAM, 'Incoming connection from %s' % repr(addr)
channel = SMTPChannel(self, conn, addr)
# API for "doing something useful with the message"
def process_message(self, peer, mailfrom, rcpttos, data):
"""Override this abstract method to handle messages from the client.
peer is a tuple containing (ipaddr, port) of the client that made the
socket connection to our smtp port.
mailfrom is the raw address the client claims the message is coming
from.
rcpttos is a list of raw addresses the client wishes to deliver the
message to.
data is a string containing the entire full text of the message,
headers (if supplied) and all. It has been `de-transparencied'
according to RFC 821, Section 4.5.2. In other words, a line
containing a `.' followed by other text has had the leading dot
removed.
This function should return None, for a normal `250 Ok' response;
otherwise it returns the desired response string in RFC 821 format.
"""
raise NotImplementedError
class DebuggingServer(SMTPServer):
# Do something with the gathered message
def process_message(self, peer, mailfrom, rcpttos, data):
inheaders = 1
lines = data.split('\n')
print '---------- MESSAGE FOLLOWS ----------'
for line in lines:
# headers first
if inheaders and not line:
print 'X-Peer:', peer[0]
inheaders = 0
print line
print '------------ END MESSAGE ------------'
class PureProxy(SMTPServer):
def process_message(self, peer, mailfrom, rcpttos, data):
lines = data.split('\n')
# Look for the last header
i = 0
for line in lines:
if not line:
break
i += 1
lines.insert(i, 'X-Peer: %s' % peer[0])
data = NEWLINE.join(lines)
refused = self._deliver(mailfrom, rcpttos, data)
# TBD: what to do with refused addresses?
print >> DEBUGSTREAM, 'we got some refusals:', refused
def _deliver(self, mailfrom, rcpttos, data):
import smtplib
refused = {}
try:
s = smtplib.SMTP()
s.connect(self._remoteaddr[0], self._remoteaddr[1])
try:
refused = s.sendmail(mailfrom, rcpttos, data)
finally:
s.quit()
except smtplib.SMTPRecipientsRefused, e:
print >> DEBUGSTREAM, 'got SMTPRecipientsRefused'
refused = e.recipients
except (socket.error, smtplib.SMTPException), e:
print >> DEBUGSTREAM, 'got', e.__class__
# All recipients were refused. If the exception had an associated
# error code, use it. Otherwise,fake it with a non-triggering
# exception code.
errcode = getattr(e, 'smtp_code', -1)
errmsg = getattr(e, 'smtp_error', 'ignore')
for r in rcpttos:
refused[r] = (errcode, errmsg)
return refused
class MailmanProxy(PureProxy):
def process_message(self, peer, mailfrom, rcpttos, data):
from cStringIO import StringIO
from Mailman import Utils
from Mailman import Message
from Mailman import MailList
# If the message is to a Mailman mailing list, then we'll invoke the
# Mailman script directly, without going through the real smtpd.
# Otherwise we'll forward it to the local proxy for disposition.
listnames = []
for rcpt in rcpttos:
local = rcpt.lower().split('@')[0]
# We allow the following variations on the theme
# listname
# listname-admin
# listname-owner
# listname-request
# listname-join
# listname-leave
parts = local.split('-')
if len(parts) > 2:
continue
listname = parts[0]
if len(parts) == 2:
command = parts[1]
else:
command = ''
if not Utils.list_exists(listname) or command not in (
'', 'admin', 'owner', 'request', 'join', 'leave'):
continue
listnames.append((rcpt, listname, command))
# Remove all list recipients from rcpttos and forward what we're not
# going to take care of ourselves. Linear removal should be fine
# since we don't expect a large number of recipients.
for rcpt, listname, command in listnames:
rcpttos.remove(rcpt)
# If there's any non-list destined recipients left,
print >> DEBUGSTREAM, 'forwarding recips:', ' '.join(rcpttos)
if rcpttos:
refused = self._deliver(mailfrom, rcpttos, data)
# TBD: what to do with refused addresses?
print >> DEBUGSTREAM, 'we got refusals:', refused
# Now deliver directly to the list commands
mlists = {}
s = StringIO(data)
msg = Message.Message(s)
# These headers are required for the proper execution of Mailman. All
# MTAs in existance seem to add these if the original message doesn't
# have them.
if not msg.getheader('from'):
msg['From'] = mailfrom
if not msg.getheader('date'):
msg['Date'] = time.ctime(time.time())
for rcpt, listname, command in listnames:
print >> DEBUGSTREAM, 'sending message to', rcpt
mlist = mlists.get(listname)
if not mlist:
mlist = MailList.MailList(listname, lock=0)
mlists[listname] = mlist
# dispatch on the type of command
if command == '':
# post
msg.Enqueue(mlist, tolist=1)
elif command == 'admin':
msg.Enqueue(mlist, toadmin=1)
elif command == 'owner':
msg.Enqueue(mlist, toowner=1)
elif command == 'request':
msg.Enqueue(mlist, torequest=1)
elif command in ('join', 'leave'):
# TBD: this is a hack!
if command == 'join':
msg['Subject'] = 'subscribe'
else:
msg['Subject'] = 'unsubscribe'
msg.Enqueue(mlist, torequest=1)
class Options:
setuid = 1
classname = 'PureProxy'
def parseargs():
global DEBUGSTREAM
try:
opts, args = getopt.getopt(
sys.argv[1:], 'nVhc:d',
['class=', 'nosetuid', 'version', 'help', 'debug'])
except getopt.error, e:
usage(1, e)
options = Options()
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-V', '--version'):
print >> sys.stderr, __version__
sys.exit(0)
elif opt in ('-n', '--nosetuid'):
options.setuid = 0
elif opt in ('-c', '--class'):
options.classname = arg
elif opt in ('-d', '--debug'):
DEBUGSTREAM = sys.stderr
# parse the rest of the arguments
if len(args) < 1:
localspec = 'localhost:8025'
remotespec = 'localhost:25'
elif len(args) < 2:
localspec = args[0]
remotespec = 'localhost:25'
elif len(args) < 3:
localspec = args[0]
remotespec = args[1]
else:
usage(1, 'Invalid arguments: %s' % COMMASPACE.join(args))
# split into host/port pairs
i = localspec.find(':')
if i < 0:
usage(1, 'Bad local spec: %s' % localspec)
options.localhost = localspec[:i]
try:
options.localport = int(localspec[i+1:])
except ValueError:
usage(1, 'Bad local port: %s' % localspec)
i = remotespec.find(':')
if i < 0:
usage(1, 'Bad remote spec: %s' % remotespec)
options.remotehost = remotespec[:i]
try:
options.remoteport = int(remotespec[i+1:])
except ValueError:
usage(1, 'Bad remote port: %s' % remotespec)
return options
if __name__ == '__main__':
options = parseargs()
# Become nobody
if options.setuid:
try:
import pwd
except ImportError:
print >> sys.stderr, \
'Cannot import module "pwd"; try running with -n option.'
sys.exit(1)
nobody = pwd.getpwnam('nobody')[2]
try:
os.setuid(nobody)
except OSError, e:
if e.errno != errno.EPERM: raise
print >> sys.stderr, \
'Cannot setuid "nobody"; try running with -n option.'
sys.exit(1)
classname = options.classname
if "." in classname:
lastdot = classname.rfind(".")
mod = __import__(classname[:lastdot], globals(), locals(), [""])
classname = classname[lastdot+1:]
else:
import __main__ as mod
class_ = getattr(mod, classname)
proxy = class_((options.localhost, options.localport),
(options.remotehost, options.remoteport))
try:
asyncore.loop()
except KeyboardInterrupt:
pass
| gpl-3.0 | -7,248,076,950,587,915,000 | 31.382826 | 78 | 0.553861 | false |
Grognak/Grognaks-Mod-Manager | lib/killable_threading.py | 1 | 4056 | import threading
class KillableThread(threading.Thread):
"""A base class for threads that die on command.
Subclasses' run() loops test if self.keep_alive is False.
Instead of sleeping, they should call nap().
And any subclass method, meant to be called by other
threads, that interrupts a nap() should include wake_up().
"""
def __init__(self):
threading.Thread.__init__(self)
self.snooze_cond = threading.Condition()
self.keep_alive = True
def nap(self, seconds):
"""Sleep but stay responsive.
This sleep is preempted by a call to wake_up().
According to this site, timeouts for Queues,
Conditions, etc., can waste CPU cycles polling
excessively often (20x/sec). But you'd need
hundreds of threads to have a problem.
http://blog.codedstructure.net/2011/02/concurrent-queueget-with-timeouts-eats.html
:param seconds: How long to wait. Or None for indefinite.
"""
with self.snooze_cond:
self.snooze_cond.wait(seconds)
def wake_up(self):
"""Interrupts a nap(). (thread-safe)"""
with self.snooze_cond:
self.snooze_cond.notify()
def stop_living(self):
"""Tells this thread to die. (thread-safe)
This method is preferred over setting keep_alive directly,
for the benefit of threads that need to sleep with interruption.
"""
self.keep_alive = False
self.wake_up()
class WrapperThread(KillableThread):
"""A thread that runs a payload func and stays killable.
It manages this by letting the payload know how to
check keep_alive and how to sleep.
"""
def __init__(self):
KillableThread.__init__(self)
self._payload = None
self._payload_args = None
self._payload_kwargs = None
self._failure_func = None
self._success_func = None
def set_payload(self, payload, *args, **kwargs):
"""Sets the payload function.
All further args will be forwarded to the payload.
This thread will inject two extra keyword args:
"keep_alive_func": Callback to check keep_alive.
No args.
"sleep_func": Callback to sleep.
A number in seconds.
So the payload must be capable of accepting those.
"""
self._payload = payload
self._payload_args = args
self._payload_kwargs = kwargs
self._payload_kwargs["keep_alive_func"] = self.keeping_alive
self._payload_kwargs["sleep_func"] = self.nap
def set_failure_func(self, failure_func):
"""Sets a callback to run on failure.
It will be given 1 arg: an exception.
"""
self._failure_func = failure_func
def set_success_func(self, successs_func):
"""Sets a callback to run on success.
It will be given 1 arg: whatever the payload returned.
"""
self._success_func = successs_func
def run(self):
result = None
if (self._payload is not None):
try:
result = self._payload(*self._payload_args, **self._payload_kwargs)
except (Exception) as err:
if (self.keeping_alive()):
if (self._failure_func is not None):
try:
self._failure_func(err)
except (Exception) as err:
logging.exception(err)
self.keep_alive = False
else:
if (self.keeping_alive()):
if (self._success_func is not None):
try:
self._success_func(result)
except (Exception) as err:
logging.exception(err)
self.keep_alive = False
def keeping_alive(self):
"""Returns True if this thread should continue, False otherwise."""
return self.keep_alive
| gpl-3.0 | -4,480,165,473,395,033,000 | 32.520661 | 90 | 0.571006 | false |
TSDBBench/Overlord | TSDBBench.py | 1 | 34595 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
__author__ = 'Andreas Bader'
__version__ = "1.00"
import logging
import logging.config
import argparse
from fabric.api import *
import os
import time
import Util
import subprocess
import threading
import Vm
import ConfigParser
import datetime
import re
import platform
pyYcsbPdfGenPath="ProcessYcsbLog.py"
testDBs=['basicdb','basicjdbc','basickairosdb','basicopentsdb']
vagrantCredFiles=["vagrantconf.rb", "vagrantconf_gen.rb", "vagrantconf_db.rb", "aws_commands.txt"]
vagrantBasicFilesFolder="basic"
logFile="debug_log_%s.log" % (time.strftime("%Y%m%d%H%M%S", time.localtime()))
logConfigFile="logging.conf"
availProviders=['virtualbox', 'vsphere', 'openstack', 'digital_ocean', 'aws'] # First one is default
def run_workload(genDict, dbDict, dbName, workloadName, timeseries, granularity, bucket, test, onlyPrerun, debug, logger):
if test:
command = ""
else:
command = "nohup "
ipStr = ""
hnStr = ""
for dbKey in sorted(dbDict.keys()):
if dbDict[dbKey] == None or dbDict[dbKey] == "":
ipStr += "%s " %(dbDict[dbKey].vm.hostname())
logger.warning("IP of vm %s is None or an empty string, using hostname instead. This does not work on some providers (e.g. OpenStack)!" %(dbKey))
else:
ipStr += "%s " %(dbDict[dbKey].ip)
hnStr += "%s " % (dbDict[dbKey].name) #.vm.hostname() does not work here!
ip0 = dbDict[dbDict.keys()[0]].ip
if ip0 == None or ip0 == "":
ip0 = dbDict[dbDict.keys()[0]].vm.hostname()
logger.info("BEGIN: Running workload '%s' on %s with ip string %s and hostname string %s." %(workloadName,ip0,ipStr,hnStr))
command+="python2 /home/vagrant/files/RunWorkload.py -d %s -w %s -i %s -s %s" %(dbName,workloadName,ipStr,hnStr)
if timeseries:
command+=" -t"
if granularity:
command+=" -g %s" % (granularity)
if onlyPrerun:
command+=" -p"
if not test:
command += " -n"
if bucket:
command+=" -b %s" % (bucket)
if debug:
command += " --debug"
if not test:
command += " </dev/null"
else:
command += " -n"
# here we expect to get an error and return code 255, seems to be normal when starting a backround process!
ret = genDict[genDict.keys()[0]].run_without_output(True, command, True,True,test)
logger.info("END: Running workload '%s' on %s." %(workloadName,ip0))
return ret
def wait_for_vm(vms, logger, timeout=3600, noshutdown=False):
timerBegin=time.clock()
if len(vms.keys()) < 1:
logger.error("DB VM Dict has zero keys.")
return False
keyOfFirst=sorted(vms.keys())[0]
try:
while vms[keyOfFirst].vm.status()[0].state == "running":
time.sleep(10)
if time.clock()-timerBegin > 3600:
logger.error("VM % is still up, waiting for it to shutdown timeouted after %s seconds." %(Vm.hostname(),timeout))
return False
except IndexError:
logger.error("Python-Vagrant could not parse the output of vagrant status --machine-readable, try check it for "
"yourself. The output should be parsable CSV. Sometimes the \"plugin outdated\" message causes "
"this error. Check that all vagrant plugins are uptodate.", exc_info=True)
return False
if noshutdown:
logger.info("Noshutdown is activated, trying to boot it up again.")
for vmKey in sorted(vms.keys()):
vms[vmKey].vm.up()
return True
def get_remote_file(vm,remotePath,localPath,logger):
with hide('output','running', 'warnings', 'stdout', 'stderr'),\
settings(host_string= vm.user_hostname_port(),
key_filename = vm.keyfile(),
disable_known_hosts = True, warn_only=True):
ret = get(remote_path=remotePath, local_path=localPath)
if len(ret) > 1:
logger.warning("More than one file copied from %s to %s: %s." %(remotePath, localPath, ret))
if len(ret) < 1:
logger.error("No files copied from %s to %s." %(remotePath, localPath))
return ret
def rm_remote_file(vm,remotePath,logger):
with hide('output','running', 'stdout'),\
settings(host_string= vm.user_hostname_port(),
key_filename = vm.keyfile(),
disable_known_hosts = True,
warn_only = True):
run ("rm %s" %(remotePath))
def get_ycsb_file(vm,dbName,workloadName,logger):
ret = get_remote_file(vm,"/home/vagrant/ycsb_%s_%s_*.log" %(dbName,workloadName),".",logger)
if len(ret) > 1:
logger.warning("More than one file copied for %s %s: %s. Taking first one." %(dbName, workloadName, ret))
if len(ret) < 1:
return None
return ret[0]
# returns True when errors are found
def check_result_file(path, logger):
if Util.check_file_exists(path):
file = open(path,"r")
errorsFound = False
errors = []
warningsFound = False
warnings = []
exceptionsFound = False
exceptions = []
for line in file:
if "warn" in line.lower():
warningsFound = True
warnings.append(line)
if "error" in line.lower():
errorsFound = True
errors.append(line)
if "exception" in line.lower():
exceptionsFound = True
exceptions.append(line)
file.close();
if errorsFound:
logger.error("The following errors occurred: ")
for error in errors:
logger.error(error)
return True
if warningsFound:
logger.warning("The following warnings occurred: ")
for warning in warnings:
logger.warning(warning)
return True
if exceptionsFound:
logger.error("The following exceptions occurred: ")
for exception in exceptions:
logger.error(exception)
return True
else:
logger.error("%s not found, can't check for errors." %(path))
return True
# returns True when not all queries are executed
# only possible for testworkload and testworkloadb
# machtes two lines:
# [INSERT], Operations, 1000
# and
# [INSERT], Return=0, 1000
# both numbers on the end of the line must be the same
def check_result_file_extended(path, workload, logger):
if workload not in ["testworkloada", "testworkloadb"]:
return False
if Util.check_file_exists(path):
file = open(path, "r")
resultDict={}
error = False
atLeastOneReturnedZeroDict = {}
for line in file:
if re.match("\[(INSERT|READ|SCAN|AVG|COUNT|SUM)\],\s*(Return=|Operations).+$", line) != None:
splitters = line.split(",")
queryType = splitters[0].replace("[","").replace("]","")
lineType = splitters[1]
amount = int(splitters[2].replace(" ",""))
if "Operations" in lineType:
if queryType in resultDict.keys():
error = True # nothing should be found twice
else:
resultDict[queryType] = [amount,0]
elif "Return=" in lineType:
# check if at least a few non-INSERT queries returned 0 (=succesful)
# INSERT queries must return 0, -1 is not allowed
if queryType not in atLeastOneReturnedZeroDict.keys():
atLeastOneReturnedZeroDict[queryType] = False
if "Return=0" in lineType and "INSERT" in queryType and amount == resultDict[queryType][0]:
atLeastOneReturnedZeroDict[queryType] = True
elif "Return=0" in lineType and amount > 0:
atLeastOneReturnedZeroDict[queryType] = True
if queryType not in resultDict.keys():
error = True # should already be found in operations line
else:
resultDict[queryType][1]+=amount
sum = 0
for key in resultDict:
if key != "INSERT":
sum += resultDict[key][1]
if resultDict[key][0] != resultDict[key][1]:
return True
for key in atLeastOneReturnedZeroDict:
if not atLeastOneReturnedZeroDict[key]:
return True
if (workload == "testworkloada" and len(resultDict.keys()) != 2 and sum != resultDict["INSERT"][1]) or \
(workload == "testworkloadb" and len(resultDict.keys()) != 5 and sum != resultDict["INSERT"][1]) :
return True
return error
else:
logger.error("%s not found, can't check for errors." % (path))
return True
def generate_html(paths, pdf, overwrite):
if Util.check_file_exists(pyYcsbPdfGenPath):
tsString = ""
if args.timeseries:
tsString=" -t"
overwriteString = ""
if overwrite:
overwriteString=" -o"
ycsbFileString = "-f"
if len(paths) < 1:
logger.error("Can't create html or pdf, paths is empty." )
return False
for path in paths:
ycsbFileString += " %s" %(path)
pdfString = ""
if args.pdf:
pdfString = " -p"
multiStr = ""
if len(paths) > 1:
multiStr = " -s"
try:
retcode = subprocess.call("python2 %s %s%s%s%s%s" %(pyYcsbPdfGenPath,ycsbFileString,tsString,pdfString,overwriteString,multiStr), shell=True)
if retcode != 0:
logger.error("Generation of pdf/html returned with %s." %(retcode))
else:
logger.info("Successfully generated pdf/html file.")
except OSError, e:
logger.error("Errors occured while running pdf/html creation process.", exc_info=True)
else:
logger.error("Can't create html or pdf, %s does not exist." %(pyYcsbPdfGenPath))
def cleanup_vm(name, vm, pathFolder, pathVagrantfile, logger, linear):
logger.info("Cleaning up %s." %(name))
if vm != None and linear:
vm.destroy()
if pathFolder != None and pathFolder != "":
if not Util.delete_folder(pathFolder,logger,True):
logger.warning("Error while cleaning up %s." %(name))
return False
if pathVagrantfile != None and pathVagrantfile != "":
if not Util.delete_file(pathVagrantfile,logger,True):
logger.warning("Error while cleaning up %s." %(name))
return False
return True
def cleanup_vms(vmDict,logger, linear):
logger.info("Begin Cleaning up.")
if not linear:
logger.info("Waiting to finish creation if not finished...")
for key in vmDict.keys():
# Wait for Creation to finish if unfinished
vmDict[key].join()
if vmDict[key].created:
# Start Destroying if created :)
vmDict[key].start()
# Wait for Destroying to finish if unfinished
vmDict[key].join()
for key in vmDict.keys():
cleanup_vm(key, vmDict[key].vm,vmDict[key].pathFolder,vmDict[key].pathVagrantfile, logger, linear)
vmDict.pop(key)
overallTime=datetime.datetime.now()
# Configure ArgumentParser
parser = argparse.ArgumentParser(prog="TSDBBench.py",version=__version__,description="A tool for automated bencharming of time series databases.", formatter_class=argparse.RawDescriptionHelpFormatter, epilog="")
parser.add_argument("-l", "--log", action='store_true', help="Be more verbose, log vagrant output.")
parser.add_argument("-t", "--tmpfolder", metavar="TMP", required=True, help="Path to Temp Space")
parser.add_argument("-f", "--vagrantfolders", metavar="VAGRANT", nargs='+', required=True, help="Path to folder(s) with Vagrantfiles. Files from additional folder(s) overwrite existing files from preceding folder(s).")
parser.add_argument("-w", "--workload", metavar="WORKLOAD", help="Only process workload WORKLOAD")
parser.add_argument("-d", "--databases", metavar="DATABASES", nargs='+', help="Only process workloads for all machines for DATABASE (Generator will always be created!), Set to 'all' for all DATABASES, set to 'test' for some special test DB set.)")
parser.add_argument("-n", "--nodestroy", action='store_true', help="Do not destroy VMs")
parser.add_argument("-o", "--noshutdown", action='store_true', help="Do not shutdown db vms, leave them running. Remember: After finishing workload they are rebooted!")
parser.add_argument("-s", "--timeseries", action='store_true', help="Force workload to do timeseries output")
parser.add_argument("-g", "--granularity", metavar="GRANULARITY", type=int, default=1000, help="If forcing to do timeseries output, use granularity GRANULARITY. Default:1000")
parser.add_argument("-b", "--bucket", metavar="BUCKET", type=int, default=100000, help="Use BUCKET bucket size for measurement histograms. Default:100000")
parser.add_argument("-m", "--html", action='store_true', help="Generate html output (ProcessYcsbLog.py required!")
parser.add_argument("-p", "--pdf", action='store_true', help="Generate pdf output (ProcessYcsbLog.py required!")
parser.add_argument("-u", "--nohup", action='store_true', help="Also fetch nohup output (for debugging only)")
parser.add_argument("-c", "--linear", action='store_true', help="Create VMs linear, do not use parallelisation.")
parser.add_argument("-r", "--provider", metavar="PROVIDER", type=str, default=availProviders[0], choices=availProviders, help="Which provider to use. Available: %s" %(availProviders))
parser.add_argument("-z", "--test", action='store_true', help="Test mode. Goes through all or the given databases with the given workload and tests each database. When using testworkloada or testworkloadb it is also checked if the amount of queries matches.")
args = parser.parse_args()
# Configure Logging
logLevel = logging.WARN
if args.log and not args.test:
logLevel = logging.DEBUG
try:
logging.config.fileConfig(logConfigFile)
except ConfigParser.NoSectionError:
print("Error: Can't load logging config from '%s'." %(logConfigFile))
exit(-1)
logger = logging.getLogger("TSDBBench")
if not args.test:
for handler in logger.handlers:
handler.setLevel(logLevel)
else:
logger.handlers = []
if not Util.delete_file(logFile,logger,True):
exit(-1)
if args.log or args.test:
handler = logging.FileHandler(logFile)
if args.test:
handler.setLevel(logging.DEBUG)
else:
handler.setLevel(logLevel)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s: %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
if args.test:
print("Executing in test mode.")
print("Python: %s" %(platform.python_version()))
print("Platform: %s" %(platform.platform()))
print("Databases: %s" % (args.databases))
print("Workload: %s" %(args.workload))
if args.workload == "testworkloada" or args.workload == "testworkloadb":
print("Result checking is used for this workload.")
else:
print("Result checking is NOT used for this workload.")
print("Provider: %s" %(args.provider))
print("Parallel creation of VMs: %s" %(not args.linear))
print("Log is written to '%s'." %(logFile))
print("Logging to shell is disabled (except fabric warnings).")
if args.provider == "digital_ocean" and not args.linear:
logger.warning("Provider '%s' does not support parallel creation of VMs. Linear creation is automatically enabled. See https://github.com/devopsgroup-io/vagrant-digitalocean/pull/230 for further details." % args.provider)
args.linear = True
if len(args.databases) > 1 and (args.nodestroy or args.noshutdown):
logger.warning("The arguments --noshutdown and --nodestroy do not work with multiple databases at one run. Both are automatically disabled.")
args.nodestroy = False
args.noshutdown = False
# File checks and deletions (if necessary)
for folder in args.vagrantfolders:
if not Util.check_folder(folder,logger):
exit(-1)
for vagrantCredFile in vagrantCredFiles:
found_cred_file = False
for folder in args.vagrantfolders:
if Util.check_file_exists(os.path.join(folder,vagrantCredFile)):
found_cred_file = True
if not found_cred_file:
logger.error("%s not found in any of the given vagrantfolders (%s)." %(vagrantCredFile, args.vagrantfolders))
exit(-1)
if not Util.check_folder(args.tmpfolder,logger):
exit(-1)
generators={} # list of generator vms
dbs={} # dictinary of db vms
# format "name" = {"path_folder" : "/bla/tmppath", "path_vagrantfile":"/bla/tmppath/file", "vm": vm}
creationTimesGenerators=datetime.datetime.now()
termSize = Util.get_terminal_size(logger)
# Generating Generator VMs
if args.test:
print(Util.multiply_string("-", termSize))
print("Stage 1: Creation of generator VMs.")
generatorFound=False
for path, dir in Util.unsorted_paths(args.vagrantfolders,logger,"",True):
if os.path.isdir(os.path.join(path, dir)) and dir == "generator":
generatorFound=True
found=0 # how many .vagrant files are found, At least 1 is needed!
# search in all generator folders
for path, file in Util.unsorted_paths(args.vagrantfolders,logger, "generator", True):
if os.path.isfile(os.path.join(path, file)):
split = file.rsplit(".vagrant", 1)
# if rsplit is used on bla.vagrant, the result should be ["bla",""]
if len(split)>1 and split[1] == "":
if split[0] in generators.keys():
continue
found+=1
# check if Generator, generator, Generator_1, etcpp. as machine is used, but always create if
# something else than Generator is given (Generator is always created!)
if not args.databases or args.databases == None or args.databases == [] \
or not Util.check_if_in_databases("generator", args.databases) \
or (args.databases and Util.check_if_eq_databases(split[0], args.databases)) \
or (args.databases and Util.check_if_eq_databases(split[0].rsplit("_",1)[0], args.databases)):
if args.linear:
if args.test:
Util.print_wo_nl(split[0] + Util.multiply_string(".", termSize-len(split[0])-len("[ERROR]")))
virtMachine = Vm.Vm(args.vagrantfolders, vagrantCredFiles, vagrantBasicFilesFolder, args.tmpfolder, split[0], logger, args.provider, args.log)
virtMachine.create_vm()
generators[virtMachine.name] = virtMachine
if not virtMachine.created:
if args.test:
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(split[0]))
if not args.nodestroy:
cleanup_vms(generators,logger, args.linear)
exit(-1)
if args.test:
print("[OK]")
else:
virtMachine = Vm.Vm(args.vagrantfolders, vagrantCredFiles, vagrantBasicFilesFolder, args.tmpfolder, split[0], logger, args.provider, args.log)
virtMachine.start()
Util.sleep_random(2.5,5.0) # needed for openstack, otherwise two vms get the same floating ip
generators[virtMachine.name] = virtMachine
if found == 0:
logger.error("No .vagrant files found in %s." %(Util.unsorted_paths(args.vagrantfolders, logger, "generator")))
exit(-1)
break
if args.linear:
creationTimesGenerators = datetime.datetime.now() - creationTimesGenerators
if not generatorFound:
logger.error("No Generator found, %s does not exist." %(Util.unsorted_paths(args.vagrantfolders, logger, "generator")))
exit(-1)
if args.databases and (Util.check_if_eq_databases("generator",args.databases) or Util.check_if_eq_databases_rsplit("generator",args.databases)):
if not args.linear:
for generatorKey in generators.keys():
if args.test:
Util.print_wo_nl(generatorKey + Util.multiply_string(".", termSize - len(generatorKey) - len("[ERROR]")))
logger.info("Wait for creation of %s to finish." %(generators[generatorKey].name))
generators[generatorKey].join()
if not generators[generatorKey].created:
if args.test:
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(generators[generatorKey].name))
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
exit(-1)
if args.test:
print("[OK]")
creationTimesGenerators = datetime.datetime.now() - creationTimesGenerators
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
exit(0)
if args.test and args.linear:
print(Util.multiply_string("-", termSize))
print("Stage 2: Creation of database VMs and execution of workloads.")
ycsbfiles=[]
processedDatabaseVMs=[] # for multi-vagrantfolder-function
processedDatabases=[]
failedDatabases=[]
workingDatabases=[]
notTestedDatabases=list(args.databases)
creationTimesDB={}
workloadTimes={}
# Doing Tests if basic or test is in given dbs
if args.databases and (Util.check_if_eq_databases("basic", args.databases) or Util.check_if_eq_databases("test", args.databases)):
if not args.linear:
for generatorKey in generators.keys():
if args.test:
Util.print_wo_nl(
generatorKey + Util.multiply_string(".", termSize - len(generatorKey) - len("[ERROR]")))
logger.info("Wait for creation of %s to finish." %(generators[generatorKey].name))
generators[generatorKey].join()
if not generators[generatorKey].created:
if args.test:
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(generators[generatorKey].name))
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
exit(-1)
if args.test:
print("[OK]")
creationTimesGenerators = datetime.datetime.now() - creationTimesGenerators
logger.info("Processing Test Databases")
for database in testDBs:
if args.workload:
logger.info("Starting workload '%s' on Generator %s." %(database,generators[generators.keys()[0]].vm.hostname()))
run_workload(generators, generators, database, args.workload, args.timeseries, args.granularity, args.bucket, True, False, args.log, logger)
ycsbFile = get_ycsb_file(generators[generators.keys()[0]].vm,database.lower(),args.workload.lower(),logger)
ycsbfiles.append(ycsbFile)
check_result_file(ycsbFile, logger)
if (args.html or args.pdf) and len(ycsbfiles) == 1:
generate_html([ycsbFile],args.pdf,False)
else:
logger.info("No Workload given, doing nothing.")
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
else:
# Generating Database VMs
logger.info("Processing Database VMs" )
for path, dir in Util.unsorted_paths(args.vagrantfolders, logger, "", False):
if os.path.isdir(os.path.join(path, dir)):
if dir== "generator" or dir.find(".")==0 or dir in processedDatabases:
continue
found=0 # how many .vagrant files are found, At least 1 is needed!
if not args.databases or args.databases == "" \
or re.match("basic.*", dir) != None \
or (args.databases and not Util.check_if_eq_databases(dir, args.databases) and not Util.check_if_eq_databases("all", args.databases)):
continue
if Util.check_if_eq_databases("all", args.databases):
if "all" in notTestedDatabases:
notTestedDatabases.remove("all")
if dir not in notTestedDatabases and dir not in workingDatabases and dir not in failedDatabases:
notTestedDatabases.append(dir)
logger.info("Processing %s." % (dir))
creationTimesDB[dir]=datetime.datetime.now()
for path2, file in Util.unsorted_paths(args.vagrantfolders, logger, dir, True):
if os.path.isfile(os.path.join(path, dir, file)):
split = file.rsplit(".vagrant", 1)
# if rsplit is used on bla.vagrant, the result should be ["bla",""]
if len(split)>1 and split[1] == "":
found+=1
if args.databases and args.databases != None and args.databases != [] \
and split[0] not in processedDatabaseVMs \
and (Util.check_if_eq_databases(split[0], args.databases) \
or Util.check_if_eq_databases(split[0].rsplit("_",1)[0], args.databases) \
or Util.check_if_eq_databases("all", args.databases)):
processedDatabaseVMs.append(split[0])
if args.linear:
if args.test:
Util.print_wo_nl(dir + Util.multiply_string(".", termSize - len(dir) - len("[ERROR]")))
virtMachine = Vm.Vm(args.vagrantfolders, vagrantCredFiles, vagrantBasicFilesFolder, args.tmpfolder, split[0], logger, args.provider, args.log)
virtMachine.create_vm()
dbs[virtMachine.name] = virtMachine
if not virtMachine.created:
if args.test:
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(split[0]))
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
cleanup_vms(dbs, logger, args.linear)
exit(-1)
else:
virtMachine = Vm.Vm(args.vagrantfolders, vagrantCredFiles, vagrantBasicFilesFolder, args.tmpfolder, split[0], logger, args.provider, args.log)
virtMachine.start()
Util.sleep_random(2.5,5.0) # needed for openstack, otherwise two vms get the same floating ip
dbs[virtMachine.name] = virtMachine
if args.linear:
creationTimesDB[dir] = datetime.datetime.now() - creationTimesDB[dir]
processedDatabases.append(dir)
if not args.linear:
for generatorKey in generators.keys():
if args.test and len(workingDatabases) == 0: # only before first database
Util.print_wo_nl(generatorKey + Util.multiply_string(".", termSize - len(generatorKey) - len("[ERROR]")))
logger.info("Wait for creation of %s to finish." %(generators[generatorKey].name))
generators[generatorKey].join()
if not generators[generatorKey].created:
if args.test and len(workingDatabases) == 0: # only before first database
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(generators[generatorKey].name))
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
cleanup_vms(dbs, logger, args.linear)
exit(-1)
if args.test and len(workingDatabases) == 0: # only before first database
print("[OK]")
if args.test:
if len(workingDatabases) == 0: # only before first database, after last generator VM in parellel mode
creationTimesGenerators = datetime.datetime.now() - creationTimesGenerators
print(Util.multiply_string("-", termSize))
print("Stage 2: Creation of database VMs and execution of workloads.")
Util.print_wo_nl(dir + Util.multiply_string(".", termSize - len(dir) - len("[ERROR]")))
for dbKey in dbs.keys():
logger.info("Wait for creation of %s to finish." %(dbs[dbKey].name))
dbs[dbKey].join()
if not dbs[dbKey].created:
if args.test:
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(dbs[dbKey].name))
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
cleanup_vms(dbs, logger, args.linear)
exit(-1)
creationTimesDB[dir] = datetime.datetime.now() - creationTimesDB[dir]
if found == 0:
logger.error("No .vagrant files found in %s." % (Util.unsorted_paths(args.vagrantfolders, logger, dir)))
if args.workload:
workloadTimes[dir] = datetime.datetime.now()
logger.info("Starting workload '%s' on %s on Generator %s." %(args.workload,dbs[dbs.keys()[0]].vm.hostname(),generators[generators.keys()[0]].vm.hostname()))
run_workload(generators, dbs, dir, args.workload, args.timeseries, args.granularity, args.bucket, False, False, args.log, logger)
logger.info("Waiting for workload to finish...")
wait_for_vm(dbs, logger, 3600, args.noshutdown)
ycsbFile = get_ycsb_file(generators[generators.keys()[0]].vm, dir.lower(), args.workload.lower(), logger)
ycsbfiles.append(ycsbFile)
if args.nohup:
logger.info("Trying to fetch nohup files from generators.")
nohupCounter=0
for generatorKey in generators.keys():
get_remote_file(generators[generatorKey].vm,"/home/vagrant/nohup.out","./nohup_%s_%s_%s.out" % (dir.lower(), args.workload.lower(), nohupCounter), logger)
rm_remote_file(generators[generatorKey].vm,"/home/vagrant/nohup.out",logger)
nohupCounter+=1;
workloadTimes[dir] = datetime.datetime.now() - workloadTimes[dir]
checkResult=check_result_file(ycsbFile, logger)
if args.test:
checkRestul2 = check_result_file_extended(ycsbFile, args.workload, logger)
if checkResult or checkRestul2:
print("[ERROR]")
failedDatabases.append(dir)
notTestedDatabases.remove(dir)
else:
print("[OK]")
workingDatabases.append(dir)
notTestedDatabases.remove(dir)
if (args.html or args.pdf) and len(args.databases) == 1 and len(ycsbfiles) == 1:
generate_html([ycsbFile],args.pdf,False)
else:
logger.info("No Workload given, just running Prerun commands.")
run_workload(generators, dbs, dir, args.workload, args.timeseries, args.granularity, args.bucket, False, True, args.log, logger)
if args.nohup:
logger.info("Trying to fetch nohup files from generators.")
nohupCounter=0
for generatorKey in generators.keys():
get_remote_file(generators[generatorKey].vm,"/home/vagrant/nohup.out","./nohup_%s_%s_%s.out" % (dir.lower(), "none", nohupCounter), logger)
rm_remote_file(generators[generatorKey].vm,"/home/vagrant/nohup.out",logger)
nohupCounter+=1;
if not args.nodestroy and not args.noshutdown:
cleanup_vms(dbs,logger, args.linear)
if not args.nodestroy and not args.noshutdown:
cleanup_vms(dbs, logger, args.linear)
cleanup_vms(generators, logger , args.linear)
if (args.html or args.pdf) and len(ycsbfiles) > 1:
if args.test:
print(Util.multiply_string("-", termSize))
print("Stage 3: Creation ofcombined PDF file.")
logger.info("More than one DB given, also generating combined html/pdf file.")
generate_html(ycsbfiles,args.pdf,True)
overallTime = datetime.datetime.now() - overallTime
if args.test:
print(Util.multiply_string("-", termSize))
print("Statistics:")
print("Failed databases: %s" %(failedDatabases))
print("Not tested databases: %s" % (notTestedDatabases))
print("Working databases: %s" % (workingDatabases))
print("Amount of time needed overall: %s" %(Util.timedelta_to_string(overallTime)))
print("Amount of time needed to create generator VMs: %s" %(Util.timedelta_to_string(creationTimesGenerators)))
print("Amount of time needed to create database VMs:")
for key in creationTimesDB.keys():
timedelta_str = Util.timedelta_to_string(creationTimesDB[key])
print(key + Util.multiply_string("-", termSize-len(key)-len(timedelta_str)) + timedelta_str)
print("Amount of time needed to complete %s:" %(args.workload))
for key in workloadTimes.keys():
timedelta_str = Util.timedelta_to_string(workloadTimes[key])
print(key + Util.multiply_string("-", termSize - len(key) - len(timedelta_str)) + timedelta_str)
print("Ending with return code 0.")
exit(0) | apache-2.0 | 3,100,984,776,506,554,400 | 50.10192 | 259 | 0.589623 | false |
flipjack/misrutas | project/app/views.py | 1 | 22105 | # -*- encoding: utf-8 -*-
from django.shortcuts import render, redirect
from django.contrib.auth import logout
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.views.decorators.csrf import csrf_exempt
import json
from forms import *
from models import *
from open_facebook import OpenFacebook
from allauth.socialaccount.models import SocialToken
import datetime
from django.db.models import Q
from django.contrib.sites.models import Site
from datetime import datetime
from gmaps import Geocoding
#from googleplaces import GooglePlaces, types, lang
from django.core.serializers.base import Serializer as BaseSerializer
from django.core.serializers.python import Serializer as PythonSerializer
from django.core.serializers.json import Serializer as JsonSerializer
from django.utils import six
class ExtBaseSerializer(BaseSerializer):
""" Abstract serializer class; everything is the same as Django's base except from the marked lines """
def serialize(self, queryset, **options):
self.options = options
self.stream = options.pop('stream', six.StringIO())
self.selected_fields = options.pop('fields', None)
self.selected_props = options.pop('props', None)
self.use_natural_keys = options.pop('use_natural_keys', False)
self.use_natural_foreign_keys = options.pop('use_natural_foreign_keys', False)
self.use_natural_primary_keys = options.pop('use_natural_primary_keys', False)
self.start_serialization()
self.first = True
for obj in queryset:
self.start_object(obj)
concrete_model = obj._meta.concrete_model
for field in concrete_model._meta.local_fields:
if field.serialize:
if field.rel is None:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_field(obj, field)
else:
if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
self.handle_fk_field(obj, field)
for field in concrete_model._meta.many_to_many:
if field.serialize:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_m2m_field(obj, field)
for field in self.selected_props:
self.handle_prop(obj, field)
self.end_object(obj)
if self.first:
self.first = False
self.end_serialization()
return self.getvalue()
def handle_prop(self, obj, field):
self._current[field] = getattr(obj, field)()
class ExtPythonSerializer(ExtBaseSerializer, PythonSerializer):
pass
class ExtJsonSerializer(ExtPythonSerializer, JsonSerializer):
pass
def user_logout(request):
logout(request)
return HttpResponseRedirect(reverse('landing'))
def landing(request):
if not request.user.is_anonymous():
return HttpResponseRedirect(reverse('index'))
return render(request, 'app/login.html',locals())
def index(request):
return render(request, 'app/index.html',locals())
@csrf_exempt
def profile(request, user, ide):
user = User.objects.filter(username=user, ide=ide)[0]
form = UserForm(instance=request.user)
form_2 = FileForm()
form_3 = My_groupForm()
form_4 = My_vehiclesForm()
my_groups = My_groups.objects.filter(user=request.user)
if request.method == "POST":
if 'my_info' in request.POST:
form = UserForm(request.POST, instance=request.user)
if form.is_valid():
form.save()
messages.success(request, 'Se modificó la información de tu perfil')
else:
modal_my_info = 'open'
messages.error(request, 'Hay errores con tu formulario')
if 'my_files' in request.POST:
form_2 = FileForm(request.POST, request.FILES)
if form_2.is_valid():
archive = form_2.save(commit=False)
archive.user = request.user
archive.save()
form_2 = FileForm()
messages.success(request, 'Se guardo exitosamente tu archivo')
else:
modal_my_files = 'open'
messages.error(request, 'Algunos datos en tu formulario estan incorrectos')
if 'my_vehicles' in request.POST:
form_4 = My_vehiclesForm(request.POST, request.FILES)
if form_4.is_valid():
obj = form_4.save(commit=False)
obj.user = request.user
obj.save()
form_4 = My_vehiclesForm()
messages.success(request, 'Se guardo exitosamente tu vehículo')
else:
messages.error(request, 'Algunos datos en tu formulario estan incorrectos')
if 'transport' in request.POST:
transport = request.POST.get('transport')
interest = Interests.objects.get(pk=transport)
request.user.interest = interest
request.user.save()
messages.success(request, 'Se cambió tu transporte principal')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'delete_file' in request.POST:
ide = request.POST.get('delete_file')
Documents.objects.get(ide=ide).delete()
messages.success(request, 'Se eliminó correctamente tu archivo')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'delete_vehicle' in request.POST:
ide = request.POST.get('delete_vehicle')
My_vehicles.objects.get(ide=ide).delete()
messages.success(request, 'Se eliminó correctamente tu vehículo')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'button_action' in request.POST:
accion = request.POST.get('accion')
if accion == "follow_friend":
if user in request.user.friend_request.all():
request.user.friends.add(user)
user.friends.add(request.user)
messages.success(request, 'Se agregó amigo a tu lista')
request.user.friend_request.remove(user)
else:
user.friend_request.add(request.user)
messages.success(request, 'Se envió tu solicitud de amistad')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if accion == "delete_friend":
request.user.friends.remove(user)
user.friends.remove(request.user)
messages.success(request, 'Se eliminó este amigo de tu lista')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if accion == "cancel_request":
user.friend_request.remove(request.user)
messages.success(request, 'Se canceló tu solicitud')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if accion == "negate_request":
request.user.friend_request.remove(user)
messages.success(request, 'Se canceló la solicitud')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'new_group' in request.POST:
form3 = My_groupForm(request.POST)
if form3.is_valid():
model = form3.save(commit = False)
model.user = request.user
model.save()
messages.success(request, 'Se agregó un nuevo grupo')
else:
modal_my_groups = 'open'
messages.error(request, 'Hay errores con tu formulario')
interests = Interests.objects.all()
user = User.objects.filter(username=user, ide=ide)[0]
my_vehicles = My_vehicles.objects.filter(user=request.user)
return render(request, 'app/profile.html',locals())
def events(request):
return render(request, 'app/events.html',locals())
def my_routes(request):
events = Events.objects.all()
return render(request, 'app/my_routes.html',locals())
def interest(request, interest, id, ide):
waypoint = Waypoint_event.objects.filter(id=id, ide=ide)[0]
return render(request, 'app/interest.html',locals())
def my_events(request):
form = MassiveForm()
now = datetime.now()
if request.POST:
form = MassiveForm(request.POST)
if form.is_valid():
massive = form.save()
massive.administrators.add(request.user)
massive.guests.add(request.user)
massive.confirmeds.add(request.user)
massive_itinerary = Massive_itinerary()
massive_itinerary.name = massive.name
massive_itinerary.start_date = massive.start_date
massive_itinerary.start_time = massive.start_time
massive_itinerary.place = massive.place_point
massive_itinerary.place_lat = massive.place_point_lat
massive_itinerary.place_lng = massive.place_point_lng
massive_itinerary.event = massive
massive_itinerary.description = ''
massive_itinerary.user = request.user
massive_itinerary.principal = True
massive_itinerary.save()
return HttpResponseRedirect( reverse('event_details', args=(massive.slug, massive.ide)) )
guests = Massive.objects.filter(guests = request.user, start_date__gte = datetime.now())
confirmeds = Massive.objects.filter(confirmeds = request.user, start_date__gte = datetime.now())
all_events = Massive.objects.filter(confirmeds = request.user)
return render(request, 'app/my_events.html',locals())
@csrf_exempt
def event_details(request, slug, ide):
massive = Massive.objects.filter(slug=slug, ide=ide)[0]
form2 = Massive_itineraryForm()
form3 = Waypoint_eventForm()
if request.POST:
if 'assist' in request.POST:
event = Massive.objects.filter(id=request.POST.get('ide'))[0]
user = User.objects.filter(id=request.POST.get('user'))[0]
event.confirmeds.add(user)
event.save()
messages.success(request, 'Se actualizo la información de tu evento')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'update_description' in request.POST:
massive.description = request.POST.get('code')
massive.save()
messages.success(request, 'Se actualizo la información de tu evento')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'asign_route' in request.POST:
form3 = Waypoint_eventForm(request.POST)
if form3.is_valid():
point = form3.save(commit=False)
massive_itinerary = Massive_itinerary.objects.filter(event = massive).exclude(place='').order_by('start_date','start_time')
events = Events()
events.name = 'Ruta de evento, ' + massive.name
events.start_date = point.start_date
events.start_time = point.start_time
events.end_date = massive.end_date
events.event_type = "Secreto"
events.massive = massive
events.save()
events.administrators.add(request.user)
count = 1
waypoint_event = Waypoint_event()
waypoint_event.point = point.point
waypoint_event.point_lat = point.point_lat
waypoint_event.point_lng = point.point_lng
waypoint_event.user = request.user
waypoint_event.order = count
waypoint_event.number = count
waypoint_event.event = events
waypoint_event.save()
count += 1
for obj in massive_itinerary:
waypoint_event = Waypoint_event()
waypoint_event.point = obj.place
waypoint_event.point_lat = obj.place_lat
waypoint_event.point_lng = obj.place_lng
waypoint_event.user = request.user
waypoint_event.order = count
waypoint_event.number = count
waypoint_event.event = events
waypoint_event.save()
count += 1
messages.success(request, 'Se asocio una nueva ruta a tu evento')
return HttpResponseRedirect( reverse('route_details', args=(events.id, events.ide)) )
if 'update_event' in request.POST:
form = Massive2Form(request.POST, instance = massive)
if form.is_valid():
massive = form.save()
massive_itinerary = Massive_itinerary.objects.filter(principal=True, event=massive)[0]
massive_itinerary.name = massive.name
massive_itinerary.start_date = massive.start_date
massive_itinerary.start_time = massive.start_time
massive_itinerary.place = massive.place_point
massive_itinerary.place_lat = massive.place_point_lat
massive_itinerary.place_lng = massive.place_point_lng
massive_itinerary.event = massive
massive_itinerary.description = ''
massive_itinerary.user = request.user
massive_itinerary.principal = True
massive_itinerary.save()
messages.success(request, 'Se actualizo la información de tu evento')
if 'new_massive_itinerary' in request.POST:
form2 = Massive_itineraryForm(request.POST)
if form2.is_valid():
obj = form2.save(commit=False)
obj.event = massive
obj.user = request.user
obj.save()
messages.success(request, 'Se agregó un nuevo registro al itinerario')
now = datetime.now()
massive = Massive.objects.filter(slug=slug, ide=ide)[0]
form = Massive2Form(instance = massive)
massive_itinerary = Massive_itinerary.objects.filter(event = massive).order_by('start_date','start_time')
site = Site.objects.all()[0]
return render(request, 'app/event_details.html',locals())
def event_itinerary(request, slug, ide):
massive = Massive_itinerary.objects.filter( event__slug = slug, ide = ide)[0]
form2 = Massive_itineraryForm(instance = massive)
massive = Massive_itinerary.objects.filter( event__slug = slug, ide = ide)[0]
if request.POST:
form2 = Massive_itineraryForm(request.POST)
if form2.is_valid():
form2.save()
messages.success(request, 'Se modificó el itinerario')
site = Site.objects.all()[0]
return render(request, 'app/event_itinerary.html',locals())
def new_event(request):
form = EventsForm()
if request.POST:
form = EventsForm(request.POST)
if form.is_valid():
event = form.save(commit=False)
waypoint_event = Waypoint_event()
waypoint_event.name = event.meeting_point
waypoint_event.point = event.meeting_point
waypoint_event.point_lat = event.meeting_point_lat
waypoint_event.point_lng = event.meeting_point_lng
waypoint_event.user = request.user
waypoint_event.order = 1
waypoint_event.number = 1
waypoint_event_ = Waypoint_event()
waypoint_event_.name = event.destination_point
waypoint_event_.point = event.destination_point
waypoint_event_.point_lat = event.destination_point_lat
waypoint_event_.point_lng = event.destination_point_lng
waypoint_event_.user = request.user
waypoint_event_.order = 2
waypoint_event_.number = 2
event.meeting_point = None
event.meeting_point_lat = None
event.meeting_point_lng = None
event.destination_point = None
event.destination_point_lat = None
event.destination_point_lng = None
event.save()
event.administrators.add(request.user)
waypoint_event.event = event
waypoint_event.save()
waypoint_event_.event = event
waypoint_event_.save()
return HttpResponseRedirect( reverse('route_details', args=(event.id, event.ide)) )
else:
messages.error(request, 'Algunos datos en tu formulario estan incorrectos')
return render(request, 'app/new_event.html',locals())
@csrf_exempt
def route_planing(request, id, ide):
def new_last_point(event, point, lat, lng):
waypoint_event = Waypoint_event()
waypoint_event.point = point
waypoint_event.name = point
waypoint_event.point_lat = lat
waypoint_event.point_lng = lng
waypoint_event.user = request.user
event_end_point = event.end_point()
waypoint_event.order = event_end_point.order
waypoint_event.number = event_end_point.number
waypoint_event.event = event
waypoint_event.save()
event_end_point.order += 1
event_end_point.number += 1
event_end_point.save()
event = Events.objects.filter(id = id, ide = ide)[0]
form1a = Events3aForm(instance = event)
form2a = Events4aForm(instance = event)
form3a = Events5aForm(instance = event)
form1b = Events3bForm(instance = event)
form2b = Events4bForm(instance = event)
form3b = Events5bForm(instance = event)
if request.method == "POST":
if 'save_route' in request.POST:
form1a = Events3aForm(request.POST, instance = event)
form2a = Events4aForm(request.POST, instance = event)
form3a = Events5aForm(request.POST, instance = event)
form1b = Events3bForm(request.POST, instance = event)
form2b = Events4bForm(request.POST, instance = event)
form3b = Events5bForm(request.POST, instance = event)
if form1a.is_valid() and form2a.is_valid() and form3a.is_valid() and form1b.is_valid() and form2b.is_valid() and form3b.is_valid():
form1a.save()
form2a.save()
form3a.save()
form1b.save()
form2b.save()
form3b.save()
return HttpResponseRedirect( reverse('route_details', args=(event.id, event.ide)) )
if 'new_point_form' in request.POST:
new_last_point(Events.objects.filter(id = id, ide = ide)[0], request.POST.get('point'), request.POST.get('point_lat'), request.POST.get('point_lng'))
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'new_point_click' in request.POST:
event = Events.objects.filter(id = id, ide = ide)[0]
api = Geocoding()
nombre = ''
for direccion in api.reverse( float(request.POST.get('lat')), float(request.POST.get('lng')))[0]['address_components']:
entra = True
nombre += direccion['long_name']
nombre += ', '
if entra:
nombre = nombre[:-2]
new_last_point(Events.objects.filter(id = id, ide = ide)[0], nombre, request.POST.get('lat'), request.POST.get('lng'))
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'del_waypoint' in request.POST:
event = Events.objects.filter(id = id, ide = ide)[0]
Waypoint_event.objects.filter(id=request.POST.get('del_waypoint'))[0].delete()
waypoints = Waypoint_event.objects.filter(event=event).order_by('order')
order = 1
number = 1
for obj in waypoints:
obj.order = order
obj.number = number
obj.save()
order += 1
number += 1
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'order' in request.POST:
data = json.loads(request.POST.get('order'))
order = 1
number = 1
for obj in data:
waypoint = Waypoint_event.objects.filter(id=obj)[0]
waypoint.order = order
waypoint.number = number
waypoint.save()
order += 1
number += 1
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
event = Events.objects.filter(id = id, ide = ide)[0]
waypoints = Waypoint_event.objects.filter(event=event).order_by('order')
return render(request, 'app/route_planing.html',locals())
def route_details(request, id, ide):
site = Site.objects.all()[0].domain
event = Events.objects.filter(id = id, ide = ide)[0]
waypoints = Waypoint_event.objects.filter(event=event).order_by('order')
form = Events2Form(instance=event)
if request.method == "POST":
form = Events2Form(request.POST, instance=event)
if form.is_valid():
form.save()
messages.success(request, 'Se actualizaron los datos de la orden')
else:
print form.errors
messages.error(request, 'Hay datos incorrectos en la orden')
return render(request, 'app/route_details.html',locals())
def interest(request, interest, id, ide):
waypoint = Waypoint_event.objects.filter(id=id, ide=ide)[0]
return render(request, 'app/interest.html',locals())
@csrf_exempt
def invite_friends(request, slug, ide):
if request.method == "POST":
if 'selected_friends' in request.POST:
massive = Massive.objects.filter(slug = slug, ide = ide)[0]
ide = request.POST.get('ide')
typeobj = request.POST.get('typeobj')
if typeobj == 'grupo':
group = My_groups.objects.filter(id=ide)[0]
for user in group.friends.all():
massive.guests.add(user)
if typeobj == 'friend':
user = User.objects.filter(id=ide)[0]
massive.guests.add(user)
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
massive = Massive.objects.filter(slug = slug, ide = ide)[0]
my_groups = My_groups.objects.filter(user=request.user)
return render(request, 'app/invite_friends.html',locals())
@csrf_exempt
def friends(request):
if request.method == "POST":
if 'email' in request.POST:
email = request.POST.get('email')
user = User.objects.filter(email = email)
if user:
if user[0] == request.user:
data = {"ok":"false"}
messages.error(request, 'No puedes seguirte a ti mismo')
else:
data = {"ok": "true"}
request.user.friends.add(user[0])
messages.success(request, 'Ahora estas siguiendo a: '+ user[0].first_name+', '+user[0].last_name )
else:
data = {"ok":"false"}
messages.error(request, 'No hay ningún usuario con este email asociado')
return HttpResponse(json.dumps(data),content_type="application/json")
return render(request, 'app/friends.html',locals())
def details(request, slug, id):
return render(request, 'app/details.html',locals())
@csrf_exempt
def group(request, slug, ide):
group = My_groups.objects.filter(user=request.user, slug=slug, ide=ide)[0]
form = My_group_editForm(instance = group)
if request.method == "POST":
if 'add_friend' in request.POST:
print request.POST.get('add_friend')
friend = User.objects.filter(id =request.POST.get('add_friend'))[0]
group.friends.add(friend)
messages.success(request, 'Se agregó amigo al grupo')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'delete_friend' in request.POST:
friend = User.objects.filter(id =request.POST.get('delete_friend'))[0]
group.friends.remove(friend)
messages.success(request, 'Se eliminó amigo del grupo')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'info_group' in request.POST:
form = My_group_editForm(request.POST, instance = group)
if form.is_valid():
form.save()
messages.success(request, 'Grupo editado con éxito')
else:
modal_info = 'open'
messages.error(request, 'Algunos datos en tu formulario estan incorrectos')
return render(request, 'app/group.html',locals())
@csrf_exempt
def search_friends(request):
data = ExtJsonSerializer().serialize(User.objects.filter( Q(complete_name__icontains=request.POST.get('friend_value')) | Q(email=request.POST.get('friend_value')) ), fields=['first_name', 'last_name', 'date_joined', 'username', 'ide'], props=['picture'])
return HttpResponse(data, content_type="application/json")
def policy(request):
return render(request, 'privacy.html',locals())
| mit | -6,006,489,336,733,788,000 | 37.272097 | 256 | 0.700765 | false |
DeppSRL/open-partecipate | docs/conf.py | 1 | 7775 | # -*- coding: utf-8 -*-
#
# open_partecipate documentation build configuration file, created by
# sphinx-quickstart.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'open_partecipate'
copyright = u'2015, Your name'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'open_partecipatedoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'open_partecipate.tex', u'open_partecipate Documentation',
u'Your name', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'open_partecipate', u'open_partecipate Documentation',
[u'Your name'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'open_partecipate', u'open_partecipate Documentation',
u'Your name', 'open_partecipate',
'A short description of the project.','Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote' | bsd-3-clause | -7,254,335,542,409,455,000 | 31.132231 | 80 | 0.705338 | false |
flavour/ifrc_qa | modules/s3/s3translate.py | 1 | 66270 | # -*- coding: utf-8 -*-
""" Translation API
@copyright: 2012-2016 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import os
import parser
import token
from gluon import current
from gluon.languages import read_dict, write_dict
from gluon.storage import Storage
from s3fields import S3ReusableField
"""
List of classes with description :
TranslateAPI : API class to retrieve strings and files by module
TranslateGetFiles : Class to traverse the eden directory and
categorize files based on module
TranslateParseFiles : Class to extract strings to translate from code files
TranslateReadFiles : Class to open a file, read its contents and build
a parse tree (for .py files) or use regex
(for html/js files) to obtain a list of strings
by calling methods from TranslateParseFiles
Strings : Class to manipulate strings and their files
Pootle : Class to synchronise a Pootle server's translation
with the local one
TranslateReportStatus : Class to report the translated percentage of each
language file for each module. It also updates
these percentages as and when required
"""
# =============================================================================
class TranslateAPI:
"""
API class for the Translation module to get
files, modules and strings individually
"""
core_modules = ("auth", "default", "errors", "appadmin")
def __init__(self):
self.grp = TranslateGetFiles()
self.grp.group_files(current.request.folder)
# ---------------------------------------------------------------------
@staticmethod
def get_langcodes():
""" Return a list of language codes """
lang_list = []
langdir = os.path.join(current.request.folder, "languages")
files = os.listdir(langdir)
for f in files:
lang_list.append(f[:-3])
return lang_list
# ---------------------------------------------------------------------
def get_modules(self):
""" Return a list of modules """
return self.grp.modlist
# ---------------------------------------------------------------------
def get_strings_by_module(self, module):
""" Return a list of strings corresponding to a module """
grp = self.grp
d = grp.d
if module in d.keys():
fileList = d[module]
else:
current.log.warning("Module '%s' doesn't exist!" % module)
return []
modlist = grp.modlist
strings = []
sappend = strings.append
R = TranslateReadFiles()
findstr = R.findstr
for f in fileList:
if f.endswith(".py") == True:
tmpstr = findstr(f, "ALL", modlist)
elif f.endswith(".html") == True or \
f.endswith(".js") == True:
tmpstr = R.read_html_js(f)
else:
tmpstr = []
for s in tmpstr:
sappend(("%s:%s" % (f, str(s[0])), s[1]))
# Handle "special" files separately
fileList = d["special"]
for f in fileList:
if f.endswith(".py") == True:
tmpstr = findstr(f, module, modlist)
for s in tmpstr:
sappend(("%s:%s" % (f, str(s[0])), s[1]))
return strings
# ---------------------------------------------------------------------
def get_strings_by_file(self, filename):
""" Return a list of strings in a given file """
if os.path.isfile(filename):
filename = os.path.abspath(filename)
else:
print "'%s' is not a valid file path!" % filename
return []
R = TranslateReadFiles()
strings = []
sappend = strings.append
tmpstr = []
if filename.endswith(".py") == True:
tmpstr = R.findstr(filename, "ALL", self.grp.modlist)
elif filename.endswith(".html") == True or \
filename.endswith(".js") == True:
tmpstr = R.read_html_js(filename)
else:
print "Please enter a '.py', '.js' or '.html' file path"
return []
for s in tmpstr:
sappend(("%s:%s" % (filename, str(s[0])), s[1]))
return strings
# =============================================================================
class TranslateGetFiles:
""" Class to group files by modules """
def __init__(self):
"""
Set up a dictionary to hold files belonging to a particular
module with the module name as the key. Files which contain
strings belonging to more than one module are grouped under
the "special" key.
"""
# Initialize to an empty list for each module
d = {}
modlist = self.get_module_list(current.request.folder)
for m in modlist:
d[m] = []
# List of files belonging to 'core' module
d["core"] = []
# 'special' files which contain strings from more than one module
d["special"] = []
self.d = d
self.modlist = modlist
# ---------------------------------------------------------------------
@staticmethod
def get_module_list(dir):
"""
Returns a list of modules using files in /controllers/
as point of reference
"""
mod = []
mappend = mod.append
cont_dir = os.path.join(dir, "controllers")
mod_files = os.listdir(cont_dir)
for f in mod_files:
if f[0] != ".":
# Strip extension
mappend(f[:-3])
# Add Modules which aren't in controllers
mod += ["support",
"translate",
]
return mod
# ---------------------------------------------------------------------
def group_files(self, currentDir, curmod="", vflag=0):
"""
Recursive function to group Eden files into respective modules
"""
path = os.path
currentDir = path.abspath(currentDir)
base_dir = path.basename(currentDir)
if base_dir in (".git",
"docs",
"languages",
"private",
"templates", # Added separately
"tests",
"uploads",
):
# Skip
return
# If current directory is '/views', set vflag
if base_dir == "views":
vflag = 1
d = self.d
files = os.listdir(currentDir)
for f in files:
if f.startswith(".") or f.endswith(".pyc") or f in ("test.py", "tests.py"):
continue
curFile = path.join(currentDir, f)
if path.isdir(curFile):
# If the current directory is /views,
# categorize files based on the directory name
if vflag:
self.group_files(curFile, f, vflag)
else:
self.group_files(curFile, curmod, vflag)
else:
# If in /appname/views, categorize by parent directory name
if vflag:
base = curmod
# Categorize file as "special" as it contains strings
# belonging to various modules
elif f in ("s3menus.py",
"s3cfg.py",
"000_config.py",
"config.py",
"menus.py"):
base = "special"
else:
# Remove extension ('.py')
base = path.splitext(f)[0]
# If file has "s3" as prefix, remove "s3" to get module name
if "s3" in base:
base = base[2:]
# If file is inside /models and file name is
# of the form var_module.py, remove the "var_" prefix
#elif base_dir == "models" and "_" in base:
# base = base.split("_")[1]
# If base refers to a module, append to corresponding list
if base in d.keys():
d[base].append(curFile)
else:
# Append it to "core" files list
d["core"].append(curFile)
# =============================================================================
class TranslateParseFiles:
"""
Class to extract strings to translate from code files
"""
def __init__(self):
""" Initializes all object variables """
self.cflag = 0 # To indicate if next element is a class
self.fflag = 0 # To indicate if next element is a function
self.sflag = 0 # To indicate 'T' has just been found
self.tflag = 0 # To indicate we are currently inside T(...)
self.mflag = 0 # To indicate we are currently inside M(...)
self.bracket = 0 # Acts as a counter for parenthesis in T(...)
self.outstr = "" # Collects all the data inside T(...)
self.class_name = "" # Stores the current class name
self.func_name = "" # Stores the current function name
self.mod_name = "" # Stores module that the string may belong to
self.findent = -1 # Stores indentation level in menus.py
# ---------------------------------------------------------------------
def parseList(self, entry, tmpstr):
""" Recursive function to extract strings from a parse tree """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseList = self.parseList
for element in entry:
parseList(element, tmpstr)
else:
if token.tok_name[id] == "STRING":
tmpstr.append(value)
# ---------------------------------------------------------------------
def parseConfig(self, spmod, strings, entry, modlist):
""" Function to extract strings from config.py / 000_config.py """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
# If the element is not a root node,
# go deeper into the tree using dfs
if isinstance(value, list):
parseConfig = self.parseConfig
for element in entry:
parseConfig(spmod, strings, element, modlist)
else:
if self.fflag == 1 and token.tok_name[id] == "NAME":
# Here, func_name stores the module_name of the form
# deployment.settings.module_name.variable
self.func_name = value
self.fflag = 0
# Set flag to store the module name from
# deployment_settings.module_name
elif token.tok_name[id] == "NAME" and \
(value == "deployment_settings" or \
value == "settings"):
self.fflag = 1
# Get module name from deployment_setting.modules list
elif self.tflag == 0 and self.func_name == "modules" and \
token.tok_name[id] == "STRING":
if value[1:-1] in modlist:
self.mod_name = value[1:-1]
# If 'T' is encountered, set sflag
elif token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
# If sflag is set and '(' is found, set tflag
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
# Check if inside 'T()'
elif self.tflag == 1:
# If '(' is encountered, append it to outstr
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
# If it's not the last ')' of 'T()',
# append to outstr
if self.bracket > 0:
self.outstr += ")"
# If it's the last ')', add string to list
else:
if spmod == "core":
if self.func_name != "modules" and \
self.func_name not in modlist:
strings.append((entry[2], self.outstr))
elif (self.func_name == "modules" and \
self.mod_name == spmod) or \
(self.func_name == spmod):
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
# If we are inside 'T()', append value to outstr
elif self.bracket > 0:
self.outstr += value
# ---------------------------------------------------------------------
def parseS3cfg(self, spmod, strings, entry, modlist):
""" Function to extract the strings from s3cfg.py """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseS3cfg = self.parseS3cfg
for element in entry:
parseS3cfg(spmod, strings, element, modlist)
else:
# If value is a function name, store it in func_name
if self.fflag == 1:
self.func_name = value
self.fflag = 0
# If value is 'def', set fflag to store func_name next
elif token.tok_name[id] == "NAME" and value == "def":
self.fflag = 1
# If 'T' is encountered, set sflag
elif token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
elif self.tflag == 1:
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
if self.bracket > 0:
self.outstr += ")"
else:
# If core module is requested
if spmod == "core":
# If extracted data doesn't belong
# to any other module, append to list
if "_" not in self.func_name or \
self.func_name.split("_")[1] not in modlist:
strings.append((entry[2], self.outstr))
# If 'module' in 'get_module_variable()'
# is the requested module, append to list
elif "_" in self.func_name and \
self.func_name.split("_")[1] == spmod:
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
elif self.bracket > 0:
self.outstr += value
# ---------------------------------------------------------------------
def parseMenu(self, spmod, strings, entry, level):
""" Function to extract the strings from menus.py """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseMenu = self.parseMenu
for element in entry:
parseMenu(spmod, strings, element, level + 1)
else:
# If value is a class name, store it in class_name
if self.cflag == 1:
self.class_name = value
self.cflag = 0
# If value is 'class', set cflag to store class name next
elif token.tok_name[id] == "NAME" and value == "class":
self.cflag = 1
elif self.fflag == 1:
# Here func_name is used to store the function names
# which are in 'S3OptionsMenu' class
self.func_name = value
self.fflag = 0
# If value is "def" and it's the first function in the
# S3OptionsMenu class or its indentation level is equal
# to the first function in 'S3OptionsMenu class', then
# set fflag and store the indentation level in findent
elif token.tok_name[id] == "NAME" and value == "def" and \
(self.findent == -1 or level == self.findent):
if self.class_name == "S3OptionsMenu":
self.findent = level
self.fflag = 1
else:
self.func_name = ""
# If current element is 'T', set sflag
elif token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
# If inside 'T()', extract the data accordingly
elif self.tflag == 1:
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
if self.bracket > 0:
self.outstr += ")"
else:
# If the requested module is 'core' and
# extracted data doesn't lie inside the
# S3OptionsMenu class, append it to list
if spmod == "core":
if self.func_name == "":
strings.append((entry[2], self.outstr))
# If the function name (in S3OptionsMenu class)
# is equal to the module requested,
# then append it to list
elif self.func_name == spmod:
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
elif self.bracket > 0:
self.outstr += value
else:
# Get strings inside 'M()'
# If value is 'M', set mflag
if token.tok_name[id] == "NAME" and value == "M":
self.mflag = 1
elif self.mflag == 1:
# If mflag is set and argument inside is a string,
# append it to list
if token.tok_name[id] == "STRING":
if spmod == "core":
if self.func_name == "":
strings.append((entry[2], value))
elif self.func_name == spmod:
strings.append((entry[2], value))
# If current argument in 'M()' is of type arg = var
# or if ')' is found, unset mflag
elif token.tok_name[id] == "EQUAL" or \
token.tok_name[id] == "RPAR":
self.mflag = 0
# ---------------------------------------------------------------------
def parseAll(self, strings, entry):
""" Function to extract all the strings from a file """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseAll = self.parseAll
for element in entry:
parseAll(strings, element)
else:
# If current element is 'T', set sflag
if token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
# If inside 'T', extract data accordingly
elif self.tflag == 1:
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
if self.bracket > 0:
self.outstr += ")"
else:
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
elif self.bracket > 0:
self.outstr += value
else:
# If current element is 'M', set mflag
if token.tok_name[id] == "NAME" and value == "M":
self.mflag = 1
elif self.mflag == 1:
# If inside 'M()', extract string accordingly
if token.tok_name[id] == "STRING":
strings.append((entry[2], value))
elif token.tok_name[id] == "EQUAL" or \
token.tok_name[id] == "RPAR":
self.mflag = 0
# =============================================================================
class TranslateReadFiles:
""" Class to read code files """
# ---------------------------------------------------------------------
@staticmethod
def findstr(fileName, spmod, modlist):
"""
Using the methods in TranslateParseFiles to extract the strings
fileName -> the file to be used for extraction
spmod -> the required module
modlist -> a list of all modules in Eden
"""
try:
f = open(fileName)
except:
path = os.path.split(__file__)[0]
fileName = os.path.join(path, fileName)
try:
f = open(fileName)
except:
return
# Read all contents of file
fileContent = f.read()
f.close()
# Remove CL-RF and NOEOL characters
fileContent = "%s\n" % fileContent.replace("\r", "")
try:
st = parser.suite(fileContent)
except:
return []
# Create a parse tree list for traversal
stList = parser.st2list(st, line_info=1)
P = TranslateParseFiles()
# List which holds the extracted strings
strings = []
if spmod == "ALL":
# If all strings are to be extracted, call ParseAll()
parseAll = P.parseAll
for element in stList:
parseAll(strings, element)
else:
# Handle cases for special files which contain
# strings belonging to different modules
appname = current.request.application
fileName = os.path.basename(fileName)
if fileName == "s3menus.py":
parseMenu = P.parseMenu
for element in stList:
parseMenu(spmod, strings, element, 0)
elif fileName == "s3cfg.py":
parseS3cfg = P.parseS3cfg
for element in stList:
parseS3cfg(spmod, strings, element, modlist)
elif fileName in ("000_config.py", "config.py"):
parseConfig = P.parseConfig
for element in stList:
parseConfig(spmod, strings, element, modlist)
# Extract strings from deployment_settings.variable() calls
final_strings = []
fsappend = final_strings.append
settings = current.deployment_settings
for (loc, s) in strings:
if s[0] != '"' and s[0] != "'":
# This is a variable
if "settings." in s:
# Convert the call to a standard form
s = s.replace("current.deployment_settings", "settings")
s = s.replace("()", "")
l = s.split(".")
obj = settings
# Get the actual value
for atr in l[1:]:
try:
obj = getattr(obj, atr)()
except:
current.log.warning("Can't find this deployment_setting, maybe a crud.settings", atr)
else:
s = obj
fsappend((loc, s))
else:
#@ToDo : Get the value of non-settings variables
pass
else:
fsappend((loc, s))
return final_strings
# ---------------------------------------------------------------------
@staticmethod
def read_html_js(filename):
"""
Function to read and extract strings from html/js files
using regular expressions
"""
import re
PY_STRING_LITERAL_RE = r'(?<=[^\w]T\()(?P<name>'\
+ r"[uU]?[rR]?(?:'''(?:[^']|'{1,2}(?!'))*''')|"\
+ r"(?:'(?:[^'\\]|\\.)*')|"\
+ r'(?:"""(?:[^"]|"{1,2}(?!"))*""")|'\
+ r'(?:"(?:[^"\\]|\\.)*"))'
regex_trans = re.compile(PY_STRING_LITERAL_RE, re.DOTALL)
findall = regex_trans.findall
html_js_file = open(filename)
linecount = 0
strings = []
sappend = strings.append
for line in html_js_file:
linecount += 1
occur = findall(line)
for s in occur:
sappend((linecount, s))
html_js_file.close()
return strings
# ---------------------------------------------------------------------
@staticmethod
def get_user_strings():
"""
Function to return the list of user-supplied strings
"""
user_file = os.path.join(current.request.folder, "uploads",
"user_strings.txt")
strings = []
COMMENT = "User supplied"
if os.path.exists(user_file):
f = open(user_file, "r")
for line in f:
line = line.replace("\n", "").replace("\r", "")
strings.append((COMMENT, line))
f.close()
return strings
# ---------------------------------------------------------------------
@staticmethod
def merge_user_strings_file(newstrings):
"""
Function to merge the existing file of user-supplied strings
with newly uploaded strings
"""
user_file = os.path.join(current.request.folder, "uploads",
"user_strings.txt")
oldstrings = []
oappend = oldstrings.append
if os.path.exists(user_file):
f = open(user_file, "r")
for line in f:
oappend(line)
f.close()
# Append user strings if not already present
f = open(user_file, "a")
for s in newstrings:
if s not in oldstrings:
f.write(s)
f.close()
# ---------------------------------------------------------------------
@staticmethod
def get_database_strings(all_template_flag):
"""
Function to get database strings from csv files
which are to be considered for translation.
"""
from s3import import S3BulkImporter
# List of database strings
database_strings = []
dappend = database_strings.append
template_list = []
base_dir = current.request.folder
path = os.path
# If all templates flag is set we look in all templates' tasks.cfg file
if all_template_flag:
template_dir = path.join(base_dir, "modules", "templates")
files = os.listdir(template_dir)
# template_list will have the list of all templates
tappend = template_list.append
for f in files:
curFile = path.join(template_dir, f)
baseFile = path.basename(curFile)
if path.isdir(curFile):
tappend(baseFile)
else:
# Set current template.
template_list.append(current.deployment_settings.base.template)
# List of fields which don't have an S3ReusableFiled defined but we
# know we wish to translate
# @ToDo: Extend to dict if we need to support some which don't just translate the name
always_translate = ("project_beneficiary_type_id",
"stats_demographic_id",
)
# Use bulk importer class to parse tasks.cfg in template folder
bi = S3BulkImporter()
S = Strings()
read_csv = S.read_csv
for template in template_list:
pth = path.join(base_dir, "modules", "templates", template)
if path.exists(path.join(pth, "tasks.cfg")) == False:
continue
bi.load_descriptor(pth)
s3db = current.s3db
for csv in bi.tasks:
# Ignore special import files
if csv[0] != 1:
continue
# csv is in format: prefix, tablename, path of csv file
# assuming represent.translate is always on primary key id
translate = False
fieldname = "%s_%s_id" % (csv[1], csv[2])
if fieldname in always_translate:
translate = True
represent = Storage(fields = ["name"])
elif hasattr(s3db, fieldname) is False:
continue
else:
reusable_field = s3db.get(fieldname)
# Excludes lambdas which are in defaults()
# i.e. reusable fields in disabled modules
if reusable_field and isinstance(reusable_field, S3ReusableField):
represent = reusable_field.attr.represent
if hasattr(represent, "translate"):
translate = represent.translate
# If translate attribute is set to True
if translate:
if hasattr(represent, "fields") is False:
# Only name field is considered
fields = ["name"]
else:
# List of fields is retrieved from represent.fields
fields = represent.fields
# Consider it for translation (csv[3])
csv_path = csv[3]
try:
data = read_csv(csv_path)
except IOError:
# Phantom
continue
title_row = data[0]
idx = 0
idxlist = []
idxappend = idxlist.append
for e in title_row:
if e.lower() in fields:
idxappend(idx)
idx += 1
if idxlist:
# Line number of string retrieved.
line_number = 1
for row in data[1:]:
line_number += 1
# If string is not empty
for idx in idxlist:
try:
s = row[idx]
except:
current.log.error("CSV row incomplete", csv_path)
if s != "":
loc = "%s:%s" % (csv_path, line_number)
dappend((loc, s))
return database_strings
# =============================================================================
class Strings:
""" Class to manipulate strings and their files """
# ---------------------------------------------------------------------
@staticmethod
def remove_quotes(Strings):
"""
Function to remove single or double quotes around the strings
"""
l = []
lappend = l.append
for (d1, d2) in Strings:
if (d1[0] == '"' and d1[-1] == '"') or \
(d1[0] == "'" and d1[-1] == "'"):
d1 = d1[1:-1]
if (d2[0] == '"' and d2[-1] == '"') or \
(d2[0] == "'" and d2[-1] == "'"):
d2 = d2[1:-1]
lappend((d1, d2))
return l
# ---------------------------------------------------------------------
@staticmethod
def remove_duplicates(Strings):
"""
Function to club all the duplicate strings into one row
with ";" separated locations
"""
uniq = {}
appname = current.request.application
for (loc, data) in Strings:
uniq[data] = ""
for (loc, data) in Strings:
# Remove the prefix from the filename
loc = loc.split(appname, 1)[1]
if uniq[data] != "":
uniq[data] = uniq[data] + ";" + loc
else:
uniq[data] = loc
l = []
lappend = l.append
for data in uniq.keys():
lappend((uniq[data], data))
return l
# ---------------------------------------------------------------------
@staticmethod
def remove_untranslated(lang_code):
"""
Function to remove all untranslated strings from a lang_code.py
"""
w2pfilename = os.path.join(current.request.folder, "languages",
"%s.py" % lang_code)
data = read_dict(w2pfilename)
#try:
# # Python 2.7
# # - won't even compile
# data = {k: v for k, v in data.iteritems() if k != v}
#except:
# Python 2.6
newdata = {}
for k, v in data.iteritems():
if k != v:
new_data[k] = v
data = new_data
write_dict(w2pfilename, data)
# ---------------------------------------------------------------------
def export_file(self, langfile, modlist, filelist, filetype, all_template_flag):
"""
Function to get the strings by module(s)/file(s), merge with
those strings from existing w2p language file which are already
translated and call the "write_xls()" method if the
default filetype "xls" is chosen. If "po" is chosen, then the
write_po()" method is called.
"""
request = current.request
settings = current.deployment_settings
appname = request.application
folder = request.folder
join = os.path.join
langcode = langfile[:-3]
langfile = join(folder, "languages", langfile)
# If the language file doesn't exist, create it
if not os.path.exists(langfile):
f = open(langfile, "w")
f.write("")
f.close()
NewStrings = []
A = TranslateAPI()
if all_template_flag == 1:
# Select All Templates
A.grp.group_files(join(folder, "modules", "templates"))
else:
# Specific template(s) is selected
templates = settings.get_template()
if not isinstance(templates, (tuple, list)):
templates = (templates,)
group_files = A.grp.group_files
for template in templates:
template_folder = join(folder, "modules", "templates", template)
group_files(template_folder)
R = TranslateReadFiles()
## Select Modules
# Core Modules are always included
core_modules = ("auth", "default")
for mod in core_modules:
modlist.append(mod)
# appadmin and error are part of admin
if "admin" in modlist:
modlist.append("appadmin")
modlist.append("error")
# Select dependent modules
models = current.models
for mod in modlist:
if hasattr(models, mod):
obj = getattr(models, mod)
# Currently only inv module has a depends list
if hasattr(obj, "depends"):
for element in obj.depends:
if element not in modlist:
modlist.append(element)
get_strings_by_module = A.get_strings_by_module
for mod in modlist:
NewStrings += get_strings_by_module(mod)
# Retrieve strings in a file
get_strings_by_file = A.get_strings_by_file
for f in filelist:
NewStrings += get_strings_by_file(f)
# Remove quotes
NewStrings = self.remove_quotes(NewStrings)
# Add database strings
NewStrings += R.get_database_strings(all_template_flag)
# Add user-supplied strings
NewStrings += R.get_user_strings()
# Remove duplicates
NewStrings = self.remove_duplicates(NewStrings)
NewStrings.sort(key=lambda tup: tup[1])
# Retrieve strings from existing w2p language file
OldStrings = self.read_w2p(langfile)
OldStrings.sort(key=lambda tup: tup[0])
# Merge those strings which were already translated earlier
Strings = []
sappend = Strings.append
i = 0
lim = len(OldStrings)
for (l, s) in NewStrings:
while i < lim and OldStrings[i][0] < s:
i += 1
if i != lim and OldStrings[i][0] == s and \
OldStrings[i][1].startswith("*** ") == False:
sappend((l, s, OldStrings[i][1]))
else:
sappend((l, s, ""))
if filetype == "xls":
# Create excel file
return self.write_xls(Strings, langcode)
elif filetype == "po":
# Create pootle file
return self.write_po(Strings)
# ---------------------------------------------------------------------
@staticmethod
def read_csv(fileName):
""" Function to read a CSV file and return a list of rows """
import csv
csv.field_size_limit(2**20) # 1 Mb
data = []
dappend = data.append
f = open(fileName, "rb")
transReader = csv.reader(f)
for row in transReader:
dappend(row)
f.close()
return data
# ---------------------------------------------------------------------
@staticmethod
def read_w2p(fileName):
"""
Function to read a web2py language file and
return a list of translation string pairs
"""
data = read_dict(fileName)
# Convert to list of tuples
# @ToDo: Why?
strings = []
sappend = strings.append
for s in data:
sappend((s, data[s]))
return strings
# ---------------------------------------------------------------------
@staticmethod
def write_csv(fileName, data):
""" Function to write a list of rows into a csv file """
import csv
f = open(fileName, "wb")
# Quote all the elements while writing
transWriter = csv.writer(f, delimiter=" ",
quotechar='"', quoting = csv.QUOTE_ALL)
transWriter.writerow(("location", "source", "target"))
for row in data:
transWriter.writerow(row)
f.close()
# ---------------------------------------------------------------------
def write_po(self, data):
""" Returns a ".po" file constructed from given strings """
from subprocess import call
from tempfile import NamedTemporaryFile
from gluon.contenttype import contenttype
f = NamedTemporaryFile(delete=False)
csvfilename = "%s.csv" % f.name
self.write_csv(csvfilename, data)
g = NamedTemporaryFile(delete=False)
pofilename = "%s.po" % g.name
# Shell needed on Win32
# @ToDo: Copy relevant parts of Translate Toolkit internally to avoid external dependencies
call(["csv2po", "-i", csvfilename, "-o", pofilename], shell=True)
h = open(pofilename, "r")
# Modify headers to return the po file for download
filename = "trans.po"
disposition = "attachment; filename=\"%s\"" % filename
response = current.response
response.headers["Content-Type"] = contenttype(".po")
response.headers["Content-disposition"] = disposition
h.seek(0)
return h.read()
# ---------------------------------------------------------------------
def write_w2p(self, csvfiles, lang_code, option):
"""
Function to merge multiple translated csv files into one
and then merge/overwrite the existing w2p language file
"""
w2pfilename = os.path.join(current.request.folder, "languages",
"%s.py" % lang_code)
# Dictionary to store translated strings
# with untranslated string as the key
data = {}
errors = 0
for f in csvfiles:
newdata = self.read_csv(f)
# Test: 2 cols or 3?
cols = len(newdata[0])
if cols == 1:
raise SyntaxError("CSV file needs to have at least 2 columns!")
elif cols == 2:
# 1st column is source, 2nd is target
for row in newdata:
data[row[0]] = row[1]
else:
# 1st column is location, 2nd is source, 3rd is target
for row in newdata:
data[row[1]] = row[2]
if option == "m":
# Merge strings with existing .py file
keys = data.keys()
olddata = read_dict(w2pfilename)
for s in olddata:
if s not in keys:
data[s] = olddata[s]
write_dict(w2pfilename, data)
# ---------------------------------------------------------------------
@staticmethod
def write_xls(Strings, langcode):
"""
Function to create a spreadsheet (.xls file) of strings with
location, original string and translated string as columns
"""
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
import xlwt
from gluon.contenttype import contenttype
# Define spreadsheet properties
wbk = xlwt.Workbook("utf-8")
sheet = wbk.add_sheet("Translate")
style = xlwt.XFStyle()
font = xlwt.Font()
font.name = "Times New Roman"
style.font = font
sheet.write(0, 0, "location", style)
sheet.write(0, 1, "source", style)
sheet.write(0, 2, "target", style)
row_num = 1
# Write the data to spreadsheet
for (loc, d1, d2) in Strings:
d2 = d2.decode("string-escape").decode("utf-8")
sheet.write(row_num, 0, loc, style)
try:
sheet.write(row_num, 1, d1, style)
except:
current.log.warning("Invalid source string!", loc)
sheet.write(row_num, 1, "", style)
sheet.write(row_num, 2, d2, style)
row_num += 1
# Set column width
for colx in range(0, 3):
sheet.col(colx).width = 15000
# Initialize output
output = StringIO()
# Save the spreadsheet
wbk.save(output)
# Modify headers to return the xls file for download
filename = "%s.xls" % langcode
disposition = "attachment; filename=\"%s\"" % filename
response = current.response
response.headers["Content-Type"] = contenttype(".xls")
response.headers["Content-disposition"] = disposition
output.seek(0)
return output.read()
# =============================================================================
class Pootle:
"""
Class to synchronise a Pootle server's translation with the local
one
@ToDo: Before uploading file to Pootle, ensure all relevant
untranslated strings are present.
"""
# ---------------------------------------------------------------------
def upload(self, lang_code, filename):
"""
Upload a file to Pootle
"""
# @ToDo try/except error
import mechanize
import re
br = mechanize.Browser()
br.addheaders = [("User-agent", "Firefox")]
br.set_handle_equiv(False)
# Ignore robots.txt
br.set_handle_robots(False)
# Don't add Referer (sic) header
br.set_handle_referer(False)
settings = current.deployment_settings
username = settings.get_L10n_pootle_username()
if username is False:
current.log.error("No login information found")
return
pootle_url = settings.get_L10n_pootle_url()
login_url = "%saccounts/login" % pootle_url
try:
br.open(login_url)
except:
current.log.error("Connecton Error")
return
br.select_form("loginform")
br.form["username"] = username
br.form["password"] = settings.get_L10n_pootle_password()
br.submit()
current_url = br.geturl()
if current_url.endswith("login/"):
current.log.error("Login Error")
return
pattern = "<option value=(.+?)>%s.po" % lang_code
# Process lang_code (if of form ab_cd --> convert to ab_CD)
if len(lang_code) > 2:
lang_code = "%s_%s" % (lang_code[:2], lang_code[-2:].upper())
link = "%s%s/eden/" % (pootle_url, lang_code)
page_source = br.open(link).read()
# Use Regex to extract the value for field : "upload to"
regex = re.search(pattern, page_source)
result = regex.group(0)
result = re.split(r'[="]', result)
upload_code = result[2]
try:
br.select_form("uploadform")
# If user is not admin then overwrite option is not there
br.form.find_control(name="overwrite").value = ["overwrite"]
br.form.find_control(name ="upload_to").value = [upload_code]
br.form.add_file(open(filename), "text/plain", file_name)
br.submit()
except:
current.log.error("Error in Uploading form")
return
# ---------------------------------------------------------------------
def download(self, lang_code):
"""
Download a file from Pootle
@ToDo: Allow selection between different variants of language files
"""
import requests
import zipfile
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
from subprocess import call
from tempfile import NamedTemporaryFile
code = lang_code
if len(lang_code) > 2:
code = "%s_%s" % (lang_code[:2], lang_code[-2:].upper())
pootle_url = current.deployment_settings.get_L10n_pootle_url()
link = "%s%s/eden/export/zip" % (pootle_url, code)
try:
r = requests.get(link)
except:
current.log.error("Connection Error")
return False
zipf = zipfile.ZipFile(StringIO.StringIO(r.content))
zipf.extractall()
file_name_po = "%s.po" % lang_code
file_name_py = "%s.py" % lang_code
f = NamedTemporaryFile(delete=False)
w2pfilename = "%s.py" % f.name
call(["po2web2py", "-i", file_name_po, "-o", w2pfilename])
S = Strings()
path = os.path.join(current.request.folder, "languages", file_name_py)
pystrings = S.read_w2p(path)
pystrings.sort(key=lambda tup: tup[0])
postrings = S.read_w2p(w2pfilename)
# Remove untranslated strings
postrings = [tup for tup in postrings if tup[0] != tup[1]]
postrings.sort(key=lambda tup: tup[0])
os.unlink(file_name_po)
os.unlink(w2pfilename)
return (postrings, pystrings)
# ---------------------------------------------------------------------
def merge_strings(self, postrings, pystrings, preference):
"""
Merge strings from a PO file and a Py file
"""
lim_po = len(postrings)
lim_py = len(pystrings)
i = 0
j = 0
# Store strings which are missing from pootle
extra = []
eappend = extra.append
while i < lim_py and j < lim_po:
if pystrings[i][0] < postrings[j][0]:
if preference == False:
eappend(pystrings[i])
i += 1
elif pystrings[i][0] > postrings[j][0]:
j += 1
# pystrings[i] == postrings[j]
else:
# Pootle is being given preference
if preference:
# Check if string is not empty
if postrings[j][1] and not postrings[j][1].startswith("***"):
pystrings[i] = postrings[j]
# Py is being given prefernece
else:
if pystrings[i][1] and not pystrings[i][1].startswith("***"):
postrings[j] = pystrings[i]
i += 1
j += 1
if preference:
return pystrings
else:
# Add strings which were left
while i < lim_py:
extra.append(pystrings[i])
i += 1
# Add extra strings to Pootle list
for st in extra:
postrings.append(st)
postrings.sort(key=lambda tup: tup[0])
return postrings
# ---------------------------------------------------------------------
def merge_pootle(self, preference, lang_code):
# returns a tuple (postrings, pystrings)
ret = self.download(lang_code)
if not ret:
return
from subprocess import call
from tempfile import NamedTemporaryFile
import sys
# returns pystrings if preference was True else returns postrings
ret = self.merge_strings(ret[0], ret[1], preference)
S = Strings()
data = []
dappend = data.append
temp_csv = NamedTemporaryFile(delete=False)
csvfilename = "%s.csv" % temp_csv.name
if preference:
# Only python file has been changed
for i in ret:
dappend(("", i[0], i[1].decode("string-escape")))
S.write_csv(csvfilename, data)
# overwrite option
S.write_w2p([csvfilename], lang_code, "o")
os.unlink(csvfilename)
else:
# Only Pootle file has been changed
for i in ret:
dappend(("", i[0], i[1].decode("string-escape")))
S.write_csv(csvfilename, data)
temp_po = NamedTemporaryFile(delete=False)
pofilename = "%s.po" % temp_po.name
# Shell needed on Win32
# @ToDo: Copy relevant parts of Translate Toolkit internally to avoid external dependencies
call(["csv2po", "-i", csvfilename, "-o", pofilename], shell=True)
self.upload(lang_code, pofilename)
# Clean up extra created files
os.unlink(csvfilename)
os.unlink(pofilename)
# =============================================================================
class TranslateReportStatus(object):
"""
Class to report the percentage of translated strings for
each module for a given language.
"""
# -------------------------------------------------------------------------
@classmethod
def create_master_file(cls):
"""
Create master file of strings and their distribution in modules
"""
try:
import cPickle as pickle
except:
import pickle
# Instantiate the translateAPI
api = TranslateAPI()
# Generate list of modules
modules = api.get_modules()
modules.append("core")
# The list of all strings
all_strings = []
addstring = all_strings.append
# Dictionary of {module: indices of strings used in this module}
indices = {}
# Helper dict for fast lookups
string_indices = {}
index = 0
get_strings_by_module = api.get_strings_by_module
for module in modules:
module_indices = []
addindex = module_indices.append
strings = get_strings_by_module(module)
for (origin, string) in strings:
# Remove outermost quotes around the string
if (string[0] == '"' and string[-1] == '"') or\
(string[0] == "'" and string[-1] == "'"):
string = string[1:-1]
string_index = string_indices.get(string)
if string_index is None:
string_indices[string] = index
addstring(string)
addindex(index)
index += 1
else:
addindex(string_index)
indices[module] = module_indices
# Save all_strings and string_dict as pickle objects in a file
data_file = os.path.join(current.request.folder,
"uploads",
"temp.pkl")
f = open(data_file, "wb")
pickle.dump(all_strings, f)
pickle.dump(indices, f)
f.close()
# Mark all string counts as dirty
ptable = current.s3db.translate_percentage
current.db(ptable.id > 0).update(dirty=True)
# -------------------------------------------------------------------------
@classmethod
def update_string_counts(cls, lang_code):
"""
Update the translation percentages for all modules for a given
language.
@ToDo: Generate fresh .py files with all relevant strings for this
(since we don't store untranslated strings)
"""
try:
import cPickle as pickle
except:
import pickle
base_dir = current.request.folder
# Read the language file
langfile = "%s.py" % lang_code
langfile = os.path.join(base_dir, "languages", langfile)
lang_strings = read_dict(langfile)
# Retrieve the data stored in master file
data_file = os.path.join(base_dir, "uploads", "temp.pkl")
f = open(data_file, "rb")
all_strings = pickle.load(f)
string_dict = pickle.load(f)
f.close()
db = current.db
ptable = current.s3db.translate_percentage
translated = set()
addindex = translated.add
for index, string in enumerate(all_strings):
translation = lang_strings.get(string)
if translation is None or translation[:4] == "*** ":
continue
elif translation != string or lang_code == "en-gb":
addindex(index)
for module, indices in string_dict.items():
all_indices = set(indices)
num_untranslated = len(all_indices - translated)
num_translated = len(all_indices) - num_untranslated
data = dict(code = lang_code,
module = module,
translated = num_translated,
untranslated = num_untranslated,
dirty=False)
query = (ptable.code == lang_code) & \
(ptable.module == module)
record = db(query).select(ptable._id, limitby=(0, 1)).first()
if record:
record.update_record(**data)
else:
ptable.insert(**data)
return
# -------------------------------------------------------------------------
@classmethod
def get_translation_percentages(cls, lang_code):
"""
Get the percentages of translated strings per module for
the given language code.
@param lang_code: the language code
"""
pickle_file = os.path.join(current.request.folder,
"uploads",
"temp.pkl")
# If master file doesn't exist, create it
if not os.path.exists(pickle_file):
cls.create_master_file()
db = current.db
ptable = current.s3db.translate_percentage
query = (ptable.code == lang_code)
fields = ("dirty", "translated", "untranslated", "module")
rows = db(query).select(*fields)
if not rows or rows.first().dirty:
# Update the string counts
cls.update_string_counts(lang_code)
rows = db(query).select(*fields)
percentage = {}
total_strings = 0
total_translated = 0
total_untranslated = 0
for row in rows:
num_translated = row.translated
num_untranslated = row.untranslated
total_strings += num_translated + num_untranslated
if not num_untranslated:
percentage[row.module] = 100
else:
total = num_translated + num_untranslated
total_translated += num_translated
total_untranslated += num_untranslated
percentage[row.module] = \
round((float(num_translated) / total) * 100, 2)
if not total_untranslated:
percentage["complete_file"] = 100
else:
percentage["complete_file"] = \
round((float(total_translated) / (total_strings)) * 100, 2)
return percentage
# END =========================================================================
| mit | 234,777,986,828,744,220 | 37.195965 | 117 | 0.432322 | false |
vdmtools/vdmtools | test/powertest/metaivrun.py | 1 | 3464 | import gentestcases, cmdline, util, setup, report, convert, resfile
import os, string
true, false = 1,0
ext = ""
binext = ""
def execute(lang, type):
global ext, binext
#counter to indicate progress
total = 1
#os type
if util.IsWindowsOS() and os.environ.get('OSTYPE') == 'win32':
ext = "cpp"
binext = ".exe"
else:
ext = "cc"
#main() file
compiler = cmdline.LookUpWildCard('metaiv', lang, type, 'compiler')
flags = cmdline.LookUpWildCard('metaiv', lang, type, 'cflags')
MakeDriverAndObjectFiles("driver." + ext, compiler, flags)
#Set expected results
expSet = resfile.MakeStdExpansionSet('metaiv', lang, 'impl')
resfile.RegisterExpansionSet(expSet)
#jobSize is used to give a low level of outputting
jobSize = cmdline.LookUp('spec-job-size')
#initialize the extraction of test cases
gentestcases.StartSearch('metaiv', lang, type)
name = gentestcases.NextTestCase()
while (name != None):
#setting report
report.setTestCaseName(name)
if (total % jobSize) == 1:
report.Progress(2, "Handling test cases " + str(total) + "..." + str(total + jobSize - 1))
report.Progress(3, "Running " + name)
ok = RunTestCase(name, lang, type)
#cleaning up
if ok:
if util.CleanFile(ok):
baseName = util.ExtractName(name)
util.DeleteFiles([baseName + ".res"])#, "driver" + binext])
else:
break
#generates next test case
name = gentestcases.NextTestCase()
total = total + 1
#delete the driver and object files at the end
#util.DeleteFiles(["driver." + ext])
#-------------------------------------------------------------------------------
# Compiles each test case with the driver program, starts it and checks the
# result with the expected result file
# Return true if everything works fine
#-------------------------------------------------------------------------------
def RunTestCase(name, lang, type):
baseName = util.ExtractName(name)
#compile and run
compiler = cmdline.LookUpWildCard('metaiv', lang, type, 'compiler')
flags = cmdline.LookUpWildCard('metaiv', lang, type, 'cflags')
cmd = compiler + " -o driver driver.cc " + name + " " + flags
(exitCode, dummy1, dummy2) = util.RunCommand(cmd, 0, "Problem whith compiling")
ok = (exitCode == 0)
if ok:
if not os.path.exists("driver"):
report.Error("Driveri binary is not created")
(exitCode, stdout, stderr) = util.RunCommand("./driver" + binext, 0, "Error running c++ generated c++ binary", true)
ok = (exitCode == 0)
#compare results with expected result files
if ok:
ok = util.WriteFile(baseName + ".res", stdout)
else:
report.Error("Output", None, stdout, stderr)
if ok:
resFile = resfile.FindResFile(name)
ok = (resFile != None)
if ok:
resfile.CompareResult(name, baseName + ".res", resFile, None)
return ok
#--------------------------------------------------------------
# Makes driver file which is a main() file for all test cases.
# Also compiles necesary library files.
#--------------------------------------------------------------~
def MakeDriverAndObjectFiles(fileName, compiler, flags):
driverFile = open(fileName,"w")
driverFile.write('extern void metaivtest();\n')
driverFile.write('int main()\n')
driverFile.write('{\n')
driverFile.write(' metaivtest();\n')
driverFile.write(' return 0;\n')
driverFile.write('}\n')
driverFile.close()
| gpl-3.0 | -6,080,857,385,642,780,000 | 29.654867 | 118 | 0.60739 | false |
rajalokan/nova | nova/tests/functional/test_servers.py | 1 | 41312 | # Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import time
import zlib
import mock
from oslo_log import log as logging
from oslo_serialization import base64
from oslo_utils import timeutils
from nova.compute import api as compute_api
from nova.compute import rpcapi
from nova import context
from nova import exception
from nova import objects
from nova.objects import block_device as block_device_obj
from nova import test
from nova.tests.functional.api import client
from nova.tests.functional import integrated_helpers
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_network
from nova import volume
LOG = logging.getLogger(__name__)
class ServersTestBase(integrated_helpers._IntegratedTestBase):
api_major_version = 'v2'
_force_delete_parameter = 'forceDelete'
_image_ref_parameter = 'imageRef'
_flavor_ref_parameter = 'flavorRef'
_access_ipv4_parameter = 'accessIPv4'
_access_ipv6_parameter = 'accessIPv6'
_return_resv_id_parameter = 'return_reservation_id'
_min_count_parameter = 'min_count'
def setUp(self):
super(ServersTestBase, self).setUp()
# The network service is called as part of server creates but no
# networks have been populated in the db, so stub the methods.
# The networks aren't relevant to what is being tested.
fake_network.set_stub_network_methods(self)
self.conductor = self.start_service(
'conductor', manager='nova.conductor.manager.ConductorManager')
def _wait_for_state_change(self, server, from_status):
for i in range(0, 50):
server = self.api.get_server(server['id'])
if server['status'] != from_status:
break
time.sleep(.1)
return server
def _wait_for_deletion(self, server_id):
# Wait (briefly) for deletion
for _retries in range(50):
try:
found_server = self.api.get_server(server_id)
except client.OpenStackApiNotFoundException:
found_server = None
LOG.debug("Got 404, proceeding")
break
LOG.debug("Found_server=%s", found_server)
# TODO(justinsb): Mock doesn't yet do accurate state changes
# if found_server['status'] != 'deleting':
# break
time.sleep(.1)
# Should be gone
self.assertFalse(found_server)
def _delete_server(self, server_id):
# Delete the server
self.api.delete_server(server_id)
self._wait_for_deletion(server_id)
def _get_access_ips_params(self):
return {self._access_ipv4_parameter: "172.19.0.2",
self._access_ipv6_parameter: "fe80::2"}
def _verify_access_ips(self, server):
self.assertEqual('172.19.0.2',
server[self._access_ipv4_parameter])
self.assertEqual('fe80::2', server[self._access_ipv6_parameter])
class ServersTest(ServersTestBase):
def test_get_servers(self):
# Simple check that listing servers works.
servers = self.api.get_servers()
for server in servers:
LOG.debug("server: %s", server)
def test_create_server_with_error(self):
# Create a server which will enter error state.
def throw_error(*args, **kwargs):
raise exception.BuildAbortException(reason='',
instance_uuid='fake')
self.stub_out('nova.virt.fake.FakeDriver.spawn', throw_error)
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
created_server_id = created_server['id']
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
found_server = self._wait_for_state_change(found_server, 'BUILD')
self.assertEqual('ERROR', found_server['status'])
self._delete_server(created_server_id)
def test_create_and_delete_server(self):
# Creates and deletes a server.
# Create server
# Build the server data gradually, checking errors along the way
server = {}
good_server = self._build_minimal_create_server_request()
post = {'server': server}
# Without an imageRef, this throws 500.
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
# With an invalid imageRef, this throws 500.
server[self._image_ref_parameter] = self.get_invalid_image()
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
# Add a valid imageRef
server[self._image_ref_parameter] = good_server.get(
self._image_ref_parameter)
# Without flavorRef, this throws 500
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
server[self._flavor_ref_parameter] = good_server.get(
self._flavor_ref_parameter)
# Without a name, this throws 500
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
# Set a valid server name
server['name'] = good_server['name']
created_server = self.api.post_server(post)
LOG.debug("created_server: %s", created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Check it's there
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
# It should also be in the all-servers list
servers = self.api.get_servers()
server_ids = [s['id'] for s in servers]
self.assertIn(created_server_id, server_ids)
found_server = self._wait_for_state_change(found_server, 'BUILD')
# It should be available...
# TODO(justinsb): Mock doesn't yet do this...
self.assertEqual('ACTIVE', found_server['status'])
servers = self.api.get_servers(detail=True)
for server in servers:
self.assertIn("image", server)
self.assertIn("flavor", server)
self._delete_server(created_server_id)
def _force_reclaim(self):
# Make sure that compute manager thinks the instance is
# old enough to be expired
the_past = timeutils.utcnow() + datetime.timedelta(hours=1)
timeutils.set_time_override(override_time=the_past)
self.addCleanup(timeutils.clear_time_override)
ctxt = context.get_admin_context()
self.compute._reclaim_queued_deletes(ctxt)
def test_deferred_delete(self):
# Creates, deletes and waits for server to be reclaimed.
self.flags(reclaim_instance_interval=1)
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s", created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Wait for it to finish being created
found_server = self._wait_for_state_change(created_server, 'BUILD')
# It should be available...
self.assertEqual('ACTIVE', found_server['status'])
# Cannot restore unless instance is deleted
self.assertRaises(client.OpenStackApiException,
self.api.post_server_action, created_server_id,
{'restore': {}})
# Delete the server
self.api.delete_server(created_server_id)
# Wait for queued deletion
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SOFT_DELETED', found_server['status'])
self._force_reclaim()
# Wait for real deletion
self._wait_for_deletion(created_server_id)
def test_deferred_delete_restore(self):
# Creates, deletes and restores a server.
self.flags(reclaim_instance_interval=3600)
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s", created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Wait for it to finish being created
found_server = self._wait_for_state_change(created_server, 'BUILD')
# It should be available...
self.assertEqual('ACTIVE', found_server['status'])
# Delete the server
self.api.delete_server(created_server_id)
# Wait for queued deletion
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SOFT_DELETED', found_server['status'])
# Restore server
self.api.post_server_action(created_server_id, {'restore': {}})
# Wait for server to become active again
found_server = self._wait_for_state_change(found_server, 'DELETED')
self.assertEqual('ACTIVE', found_server['status'])
def test_deferred_delete_force(self):
# Creates, deletes and force deletes a server.
self.flags(reclaim_instance_interval=3600)
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s", created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Wait for it to finish being created
found_server = self._wait_for_state_change(created_server, 'BUILD')
# It should be available...
self.assertEqual('ACTIVE', found_server['status'])
# Delete the server
self.api.delete_server(created_server_id)
# Wait for queued deletion
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SOFT_DELETED', found_server['status'])
# Force delete server
self.api.post_server_action(created_server_id,
{self._force_delete_parameter: {}})
# Wait for real deletion
self._wait_for_deletion(created_server_id)
def test_create_server_with_metadata(self):
# Creates a server with metadata.
# Build the server data gradually, checking errors along the way
server = self._build_minimal_create_server_request()
metadata = {}
for i in range(30):
metadata['key_%s' % i] = 'value_%s' % i
server['metadata'] = metadata
post = {'server': server}
created_server = self.api.post_server(post)
LOG.debug("created_server: %s", created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
self.assertEqual(metadata, found_server.get('metadata'))
# The server should also be in the all-servers details list
servers = self.api.get_servers(detail=True)
server_map = {server['id']: server for server in servers}
found_server = server_map.get(created_server_id)
self.assertTrue(found_server)
# Details do include metadata
self.assertEqual(metadata, found_server.get('metadata'))
# The server should also be in the all-servers summary list
servers = self.api.get_servers(detail=False)
server_map = {server['id']: server for server in servers}
found_server = server_map.get(created_server_id)
self.assertTrue(found_server)
# Summary should not include metadata
self.assertFalse(found_server.get('metadata'))
# Cleanup
self._delete_server(created_server_id)
def test_server_metadata_actions_negative_invalid_state(self):
# Create server with metadata
server = self._build_minimal_create_server_request()
metadata = {'key_1': 'value_1'}
server['metadata'] = metadata
post = {'server': server}
created_server = self.api.post_server(post)
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
self.assertEqual(metadata, found_server.get('metadata'))
server_id = found_server['id']
# Change status from ACTIVE to SHELVED for negative test
self.flags(shelved_offload_time = -1)
self.api.post_server_action(server_id, {'shelve': {}})
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SHELVED', found_server['status'])
metadata = {'key_2': 'value_2'}
# Update Metadata item in SHELVED (not ACTIVE, etc.)
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_metadata,
server_id, metadata)
self.assertEqual(409, ex.response.status_code)
self.assertEqual('SHELVED', found_server['status'])
# Delete Metadata item in SHELVED (not ACTIVE, etc.)
ex = self.assertRaises(client.OpenStackApiException,
self.api.delete_server_metadata,
server_id, 'key_1')
self.assertEqual(409, ex.response.status_code)
self.assertEqual('SHELVED', found_server['status'])
# Cleanup
self._delete_server(server_id)
def test_create_and_rebuild_server(self):
# Rebuild a server with metadata.
# create a server with initially has no metadata
server = self._build_minimal_create_server_request()
server_post = {'server': server}
metadata = {}
for i in range(30):
metadata['key_%s' % i] = 'value_%s' % i
server_post['server']['metadata'] = metadata
created_server = self.api.post_server(server_post)
LOG.debug("created_server: %s", created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
created_server = self._wait_for_state_change(created_server, 'BUILD')
# rebuild the server with metadata and other server attributes
post = {}
post['rebuild'] = {
self._image_ref_parameter: "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6",
"name": "blah",
self._access_ipv4_parameter: "172.19.0.2",
self._access_ipv6_parameter: "fe80::2",
"metadata": {'some': 'thing'},
}
post['rebuild'].update(self._get_access_ips_params())
self.api.post_server_action(created_server_id, post)
LOG.debug("rebuilt server: %s", created_server)
self.assertTrue(created_server['id'])
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
self.assertEqual({'some': 'thing'}, found_server.get('metadata'))
self.assertEqual('blah', found_server.get('name'))
self.assertEqual(post['rebuild'][self._image_ref_parameter],
found_server.get('image')['id'])
self._verify_access_ips(found_server)
# rebuild the server with empty metadata and nothing else
post = {}
post['rebuild'] = {
self._image_ref_parameter: "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6",
"metadata": {},
}
self.api.post_server_action(created_server_id, post)
LOG.debug("rebuilt server: %s", created_server)
self.assertTrue(created_server['id'])
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
self.assertEqual({}, found_server.get('metadata'))
self.assertEqual('blah', found_server.get('name'))
self.assertEqual(post['rebuild'][self._image_ref_parameter],
found_server.get('image')['id'])
self._verify_access_ips(found_server)
# Cleanup
self._delete_server(created_server_id)
def test_rename_server(self):
# Test building and renaming a server.
# Create a server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s", created_server)
server_id = created_server['id']
self.assertTrue(server_id)
# Rename the server to 'new-name'
self.api.put_server(server_id, {'server': {'name': 'new-name'}})
# Check the name of the server
created_server = self.api.get_server(server_id)
self.assertEqual(created_server['name'], 'new-name')
# Cleanup
self._delete_server(server_id)
def test_create_multiple_servers(self):
# Creates multiple servers and checks for reservation_id.
# Create 2 servers, setting 'return_reservation_id, which should
# return a reservation_id
server = self._build_minimal_create_server_request()
server[self._min_count_parameter] = 2
server[self._return_resv_id_parameter] = True
post = {'server': server}
response = self.api.post_server(post)
self.assertIn('reservation_id', response)
reservation_id = response['reservation_id']
self.assertNotIn(reservation_id, ['', None])
# Create 1 more server, which should not return a reservation_id
server = self._build_minimal_create_server_request()
post = {'server': server}
created_server = self.api.post_server(post)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# lookup servers created by the first request.
servers = self.api.get_servers(detail=True,
search_opts={'reservation_id': reservation_id})
server_map = {server['id']: server for server in servers}
found_server = server_map.get(created_server_id)
# The server from the 2nd request should not be there.
self.assertIsNone(found_server)
# Should have found 2 servers.
self.assertEqual(len(server_map), 2)
# Cleanup
self._delete_server(created_server_id)
for server_id in server_map:
self._delete_server(server_id)
def test_create_server_with_injected_files(self):
# Creates a server with injected_files.
personality = []
# Inject a text file
data = 'Hello, World!'
personality.append({
'path': '/helloworld.txt',
'contents': base64.encode_as_bytes(data),
})
# Inject a binary file
data = zlib.compress(b'Hello, World!')
personality.append({
'path': '/helloworld.zip',
'contents': base64.encode_as_bytes(data),
})
# Create server
server = self._build_minimal_create_server_request()
server['personality'] = personality
post = {'server': server}
created_server = self.api.post_server(post)
LOG.debug("created_server: %s", created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Check it's there
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
found_server = self._wait_for_state_change(found_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Cleanup
self._delete_server(created_server_id)
def test_stop_start_servers_negative_invalid_state(self):
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
created_server_id = created_server['id']
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Start server in ACTIVE
# NOTE(mkoshiya): When os-start API runs, the server status
# must be SHUTOFF.
# By returning 409, I want to confirm that the ACTIVE server does not
# cause unexpected behavior.
post = {'os-start': {}}
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
created_server_id, post)
self.assertEqual(409, ex.response.status_code)
self.assertEqual('ACTIVE', found_server['status'])
# Stop server
post = {'os-stop': {}}
self.api.post_server_action(created_server_id, post)
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SHUTOFF', found_server['status'])
# Stop server in SHUTOFF
# NOTE(mkoshiya): When os-stop API runs, the server status
# must be ACTIVE or ERROR.
# By returning 409, I want to confirm that the SHUTOFF server does not
# cause unexpected behavior.
post = {'os-stop': {}}
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
created_server_id, post)
self.assertEqual(409, ex.response.status_code)
self.assertEqual('SHUTOFF', found_server['status'])
# Cleanup
self._delete_server(created_server_id)
def test_revert_resized_server_negative_invalid_state(self):
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
created_server_id = created_server['id']
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Revert resized server in ACTIVE
# NOTE(yatsumi): When revert resized server API runs,
# the server status must be VERIFY_RESIZE.
# By returning 409, I want to confirm that the ACTIVE server does not
# cause unexpected behavior.
post = {'revertResize': {}}
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
created_server_id, post)
self.assertEqual(409, ex.response.status_code)
self.assertEqual('ACTIVE', found_server['status'])
# Cleanup
self._delete_server(created_server_id)
def test_resize_server_negative_invalid_state(self):
# Avoid migration
self.flags(allow_resize_to_same_host=True)
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
created_server_id = created_server['id']
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Resize server(flavorRef: 1 -> 2)
post = {'resize': {"flavorRef": "2", "OS-DCF:diskConfig": "AUTO"}}
self.api.post_server_action(created_server_id, post)
found_server = self._wait_for_state_change(found_server, 'RESIZE')
self.assertEqual('VERIFY_RESIZE', found_server['status'])
# Resize server in VERIFY_RESIZE(flavorRef: 2 -> 1)
# NOTE(yatsumi): When resize API runs, the server status
# must be ACTIVE or SHUTOFF.
# By returning 409, I want to confirm that the VERIFY_RESIZE server
# does not cause unexpected behavior.
post = {'resize': {"flavorRef": "1", "OS-DCF:diskConfig": "AUTO"}}
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
created_server_id, post)
self.assertEqual(409, ex.response.status_code)
self.assertEqual('VERIFY_RESIZE', found_server['status'])
# Cleanup
self._delete_server(created_server_id)
def test_confirm_resized_server_negative_invalid_state(self):
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
created_server_id = created_server['id']
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Confirm resized server in ACTIVE
# NOTE(yatsumi): When confirm resized server API runs,
# the server status must be VERIFY_RESIZE.
# By returning 409, I want to confirm that the ACTIVE server does not
# cause unexpected behavior.
post = {'confirmResize': {}}
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
created_server_id, post)
self.assertEqual(409, ex.response.status_code)
self.assertEqual('ACTIVE', found_server['status'])
# Cleanup
self._delete_server(created_server_id)
class ServersTestV21(ServersTest):
api_major_version = 'v2.1'
class ServersTestV219(ServersTestBase):
api_major_version = 'v2.1'
def _create_server(self, set_desc = True, desc = None):
server = self._build_minimal_create_server_request()
if set_desc:
server['description'] = desc
post = {'server': server}
response = self.api.api_post('/servers', post).body
return (server, response['server'])
def _update_server(self, server_id, set_desc = True, desc = None):
new_name = integrated_helpers.generate_random_alphanumeric(8)
server = {'server': {'name': new_name}}
if set_desc:
server['server']['description'] = desc
self.api.api_put('/servers/%s' % server_id, server)
def _rebuild_server(self, server_id, set_desc = True, desc = None):
new_name = integrated_helpers.generate_random_alphanumeric(8)
post = {}
post['rebuild'] = {
"name": new_name,
self._image_ref_parameter: "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6",
self._access_ipv4_parameter: "172.19.0.2",
self._access_ipv6_parameter: "fe80::2",
"metadata": {'some': 'thing'},
}
post['rebuild'].update(self._get_access_ips_params())
if set_desc:
post['rebuild']['description'] = desc
self.api.api_post('/servers/%s/action' % server_id, post)
def _create_server_and_verify(self, set_desc = True, expected_desc = None):
# Creates a server with a description and verifies it is
# in the GET responses.
created_server_id = self._create_server(set_desc,
expected_desc)[1]['id']
self._verify_server_description(created_server_id, expected_desc)
self._delete_server(created_server_id)
def _update_server_and_verify(self, server_id, set_desc = True,
expected_desc = None):
# Updates a server with a description and verifies it is
# in the GET responses.
self._update_server(server_id, set_desc, expected_desc)
self._verify_server_description(server_id, expected_desc)
def _rebuild_server_and_verify(self, server_id, set_desc = True,
expected_desc = None):
# Rebuilds a server with a description and verifies it is
# in the GET responses.
self._rebuild_server(server_id, set_desc, expected_desc)
self._verify_server_description(server_id, expected_desc)
def _verify_server_description(self, server_id, expected_desc = None,
desc_in_resp = True):
# Calls GET on the servers and verifies that the description
# is set as expected in the response, or not set at all.
response = self.api.api_get('/servers/%s' % server_id)
found_server = response.body['server']
self.assertEqual(server_id, found_server['id'])
if desc_in_resp:
# Verify the description is set as expected (can be None)
self.assertEqual(expected_desc, found_server.get('description'))
else:
# Verify the description is not included in the response.
self.assertNotIn('description', found_server)
servers = self.api.api_get('/servers/detail').body['servers']
server_map = {server['id']: server for server in servers}
found_server = server_map.get(server_id)
self.assertTrue(found_server)
if desc_in_resp:
# Verify the description is set as expected (can be None)
self.assertEqual(expected_desc, found_server.get('description'))
else:
# Verify the description is not included in the response.
self.assertNotIn('description', found_server)
def _create_assertRaisesRegex(self, desc):
# Verifies that a 400 error is thrown on create server
with self.assertRaisesRegex(client.OpenStackApiException,
".*Unexpected status code.*") as cm:
self._create_server(True, desc)
self.assertEqual(400, cm.exception.response.status_code)
def _update_assertRaisesRegex(self, server_id, desc):
# Verifies that a 400 error is thrown on update server
with self.assertRaisesRegex(client.OpenStackApiException,
".*Unexpected status code.*") as cm:
self._update_server(server_id, True, desc)
self.assertEqual(400, cm.exception.response.status_code)
def _rebuild_assertRaisesRegex(self, server_id, desc):
# Verifies that a 400 error is thrown on rebuild server
with self.assertRaisesRegex(client.OpenStackApiException,
".*Unexpected status code.*") as cm:
self._rebuild_server(server_id, True, desc)
self.assertEqual(400, cm.exception.response.status_code)
def test_create_server_with_description(self):
self.api.microversion = '2.19'
# Create and get a server with a description
self._create_server_and_verify(True, 'test description')
# Create and get a server with an empty description
self._create_server_and_verify(True, '')
# Create and get a server with description set to None
self._create_server_and_verify()
# Create and get a server without setting the description
self._create_server_and_verify(False)
def test_update_server_with_description(self):
self.api.microversion = '2.19'
# Create a server with an initial description
server_id = self._create_server(True, 'test desc 1')[1]['id']
# Update and get the server with a description
self._update_server_and_verify(server_id, True, 'updated desc')
# Update and get the server name without changing the description
self._update_server_and_verify(server_id, False, 'updated desc')
# Update and get the server with an empty description
self._update_server_and_verify(server_id, True, '')
# Update and get the server by removing the description (set to None)
self._update_server_and_verify(server_id)
# Update and get the server with a 2nd new description
self._update_server_and_verify(server_id, True, 'updated desc2')
# Cleanup
self._delete_server(server_id)
def test_rebuild_server_with_description(self):
self.api.microversion = '2.19'
# Create a server with an initial description
server = self._create_server(True, 'test desc 1')[1]
server_id = server['id']
self._wait_for_state_change(server, 'BUILD')
# Rebuild and get the server with a description
self._rebuild_server_and_verify(server_id, True, 'updated desc')
# Rebuild and get the server name without changing the description
self._rebuild_server_and_verify(server_id, False, 'updated desc')
# Rebuild and get the server with an empty description
self._rebuild_server_and_verify(server_id, True, '')
# Rebuild and get the server by removing the description (set to None)
self._rebuild_server_and_verify(server_id)
# Rebuild and get the server with a 2nd new description
self._rebuild_server_and_verify(server_id, True, 'updated desc2')
# Cleanup
self._delete_server(server_id)
def test_version_compatibility(self):
# Create a server with microversion v2.19 and a description.
self.api.microversion = '2.19'
server_id = self._create_server(True, 'test desc 1')[1]['id']
# Verify that the description is not included on V2.18 GETs
self.api.microversion = '2.18'
self._verify_server_description(server_id, desc_in_resp = False)
# Verify that updating the server with description on V2.18
# results in a 400 error
self._update_assertRaisesRegex(server_id, 'test update 2.18')
# Verify that rebuilding the server with description on V2.18
# results in a 400 error
self._rebuild_assertRaisesRegex(server_id, 'test rebuild 2.18')
# Cleanup
self._delete_server(server_id)
# Create a server on V2.18 and verify that the description
# defaults to the name on a V2.19 GET
server_req, response = self._create_server(False)
server_id = response['id']
self.api.microversion = '2.19'
self._verify_server_description(server_id, server_req['name'])
# Cleanup
self._delete_server(server_id)
# Verify that creating a server with description on V2.18
# results in a 400 error
self.api.microversion = '2.18'
self._create_assertRaisesRegex('test create 2.18')
def test_description_errors(self):
self.api.microversion = '2.19'
# Create servers with invalid descriptions. These throw 400.
# Invalid unicode with non-printable control char
self._create_assertRaisesRegex(u'invalid\0dstring')
# Description is longer than 255 chars
self._create_assertRaisesRegex('x' * 256)
# Update and rebuild servers with invalid descriptions.
# These throw 400.
server_id = self._create_server(True, "desc")[1]['id']
# Invalid unicode with non-printable control char
self._update_assertRaisesRegex(server_id, u'invalid\u0604string')
self._rebuild_assertRaisesRegex(server_id, u'invalid\u0604string')
# Description is longer than 255 chars
self._update_assertRaisesRegex(server_id, 'x' * 256)
self._rebuild_assertRaisesRegex(server_id, 'x' * 256)
class ServerTestV220(ServersTestBase):
api_major_version = 'v2.1'
def setUp(self):
super(ServerTestV220, self).setUp()
self.api.microversion = '2.20'
fake_network.set_stub_network_methods(self)
self.ctxt = context.get_admin_context()
def _create_server(self):
server = self._build_minimal_create_server_request()
post = {'server': server}
response = self.api.api_post('/servers', post).body
return (server, response['server'])
def _shelve_server(self):
server = self._create_server()[1]
server_id = server['id']
self._wait_for_state_change(server, 'BUILD')
self.api.post_server_action(server_id, {'shelve': None})
return self._wait_for_state_change(server, 'ACTIVE')
def _get_fake_bdms(self, ctxt):
return block_device_obj.block_device_make_list(self.ctxt,
[fake_block_device.FakeDbBlockDeviceDict(
{'device_name': '/dev/vda',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': '5d721593-f033-4f6d-ab6f-b5b067e61bc4'})])
def test_attach_detach_vol_to_shelved_server(self):
self.flags(shelved_offload_time=-1)
found_server = self._shelve_server()
self.assertEqual('SHELVED', found_server['status'])
server_id = found_server['id']
# Test attach volume
with test.nested(mock.patch.object(compute_api.API,
'_check_attach_and_reserve_volume'),
mock.patch.object(rpcapi.ComputeAPI,
'attach_volume')) as (mock_reserve,
mock_attach):
volume_attachment = {"volumeAttachment": {"volumeId":
"5d721593-f033-4f6d-ab6f-b5b067e61bc4"}}
self.api.api_post(
'/servers/%s/os-volume_attachments' % (server_id),
volume_attachment)
self.assertTrue(mock_reserve.called)
self.assertTrue(mock_attach.called)
# Test detach volume
self.stub_out('nova.volume.cinder.API.get', fakes.stub_volume_get)
with test.nested(mock.patch.object(compute_api.API,
'_check_and_begin_detach'),
mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid'),
mock.patch.object(rpcapi.ComputeAPI,
'detach_volume')
) as (mock_check, mock_get_bdms, mock_rpc):
mock_get_bdms.return_value = self._get_fake_bdms(self.ctxt)
attachment_id = mock_get_bdms.return_value[0]['volume_id']
self.api.api_delete('/servers/%s/os-volume_attachments/%s' %
(server_id, attachment_id))
self.assertTrue(mock_check.called)
self.assertTrue(mock_rpc.called)
self._delete_server(server_id)
def test_attach_detach_vol_to_shelved_offloaded_server(self):
self.flags(shelved_offload_time=0)
found_server = self._shelve_server()
self.assertEqual('SHELVED_OFFLOADED', found_server['status'])
server_id = found_server['id']
# Test attach volume
with test.nested(mock.patch.object(compute_api.API,
'_check_attach_and_reserve_volume'),
mock.patch.object(volume.cinder.API,
'attach')) as (mock_reserve, mock_vol):
volume_attachment = {"volumeAttachment": {"volumeId":
"5d721593-f033-4f6d-ab6f-b5b067e61bc4"}}
attach_response = self.api.api_post(
'/servers/%s/os-volume_attachments' % (server_id),
volume_attachment).body['volumeAttachment']
self.assertTrue(mock_reserve.called)
self.assertTrue(mock_vol.called)
self.assertIsNone(attach_response['device'])
# Test detach volume
self.stub_out('nova.volume.cinder.API.get', fakes.stub_volume_get)
with test.nested(mock.patch.object(compute_api.API,
'_check_and_begin_detach'),
mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid'),
mock.patch.object(compute_api.API,
'_local_cleanup_bdm_volumes')
) as (mock_check, mock_get_bdms, mock_clean_vols):
mock_get_bdms.return_value = self._get_fake_bdms(self.ctxt)
attachment_id = mock_get_bdms.return_value[0]['volume_id']
self.api.api_delete('/servers/%s/os-volume_attachments/%s' %
(server_id, attachment_id))
self.assertTrue(mock_check.called)
self.assertTrue(mock_clean_vols.called)
self._delete_server(server_id)
| apache-2.0 | 409,646,539,307,872,260 | 40.85613 | 79 | 0.612074 | false |
hariseldon99/archives | dtwa_ising_longrange/dtwa_ising_longrange/dtwa_ising_longrange.py | 1 | 45340 | #!/usr/bin/env python
#Author: Analabha Roy
from __future__ import division, print_function
from mpi4py import MPI
from reductions import Intracomm
from redirect_stdout import stdout_redirected
import sys
import copy
import random
import numpy as np
from scipy.signal import fftconvolve
from scipy.sparse import *
from scipy.integrate import odeint
from pprint import pprint
from tabulate import tabulate
threshold = 1e-4
root = 0
#This is the kronecker delta symbol for vector indices
deltaij = np.eye(3)
#This is the Levi-Civita symbol for vector indices
eijk = np.zeros((3, 3, 3))
eijk[0, 1, 2] = eijk[1, 2, 0] = eijk[2, 0, 1] = 1
eijk[0, 2, 1] = eijk[2, 1, 0] = eijk[1, 0, 2] = -1
def t_deriv(quantities, times):
dt = np.gradient(times)
return np.gradient(quantities, dt)
def drive(t, params):
return params.h0 * np.cos(params.omega * t)
def weyl_hamilt(s,times,param):
"""
Evaluates the Weyl Symbols of the Hamiltonian, H_w
Does this at all times
If |s^a> = (s^a_0, s^a_1 ... s^a_N), and
H_w = -(1/2) * \sum_{nm} J_{nm} (J_x s^n_x s^m_x + J_y s^n_y s^m_y
+ J_z s^n_z s^m_z) - h(t) * \sum_n (h_x s^n_x +h_y s^n_y
+ h_z s^n_z)
"""
N = param.latsize
#s[:, 0:N] = sx , s[:, N:2*N] = sy, s[:, 2*N:3*N] = sz
drvs = drive(times, param)
hw = param.jx * np.dot(s[:,0*N:1*N],param.jmat.dot(s[:,0*N:1*N].T))
hw += param.jy * np.dot(s[:,1*N:2*N],param.jmat.dot(s[:,1*N:2*N].T))
hw += param.jz * np.dot(s[:,2*N:3*N],param.jmat.dot(s[:,2*N:3*N].T))
hw = hw /(2.0 * param.norm)
hw += drvs * (param.hx * np.sum(s[:, 0:N]) +\
param.hy * np.sum(s[:, N:2*N]) + param.hz * np.sum(s[:, 2*N:3*N]))
return -hw
def func_1storder(s, t, param):
"""
The RHS of general case, per Schachemmayer eq A2
"""
N = param.latsize
#s[0:N] = sx , s[N:2*N] = sy, s[2*N:3*N] = sz
drv = drive(t, param)
jsx = 2.0 * param.jx * param.jmat.dot(s[0:N])/param.norm
jsx += 2.0 * drv * param.hx
jsy = 2.0 * param.jy * param.jmat.dot(s[N:2*N])/param.norm
jsy += 2.0 * drv * param.hy
jsz = 2.0 * param.jz * param.jmat.dot(s[2*N:3*N])/param.norm
jsz += 2.0 * drv * param.hz
dsxdt = s[N:2*N] * jsz - s[2*N:3*N] * jsy
dsydt = s[2*N:3*N] * jsx - s[0:N] * jsz
dszdt = s[0:N] * jsy - s[N:2*N] * jsx
return np.concatenate((dsxdt, dsydt, dszdt))
def jac_1storder(s, t, param):
"""
Jacobian of the general case. First order.
This is given by 9 NXN submatrices:
J00=J11=J22=0
Although Jacobian is NOT antisymmetric in general! See below
J01 = +J_z diag(J|s^x>) + h(t) h_z - J_y (J#|s^z>)
J10 = -J_z diag(J|s^x>) - h(t) h_z + J_x (J#|s^z>)
J02 = -J_y diag(J|s^y>) - h(t) h_y + J_z (J#|s^y>)
J20 = +J_y diag(J|s^y>) + h(t) h_y - J_x (J#|s^y>)
J12 = +J_x diag(J|s^x>) + h(t) h_x - J_z (J#|s^x>)
J21 = -J_x diag(J|s^x>) - h(t) h_x + J_y (J#|s^x>)
Here, '#' (hash operator) means multiply each row of a matrix by the
corresponding vector element. This is implemented by numpy.multiply()
"""
N = param.latsize
#s[0:N] = sx , s[N:2*N] = sy, s[2*N:3*N] = sz
full_jacobian = np.zeros(shape=(3*N, 3*N))
drivemat = 2.0 * drive(t, param) * np.eye(N)
diag_jsx = np.diagflat((param.jmat.dot(s[0:N])))/param.norm
diag_jsy = np.diagflat((param.jmat.dot(s[N:2*N])))/param.norm
#diag_jsz = np.diagflat((param.jmat.dot(s[2*N:3*N])))/param.norm
hash_jsx = (np.multiply(param.jmat.T, s[0:N]).T)/param.norm
hash_jsy = (np.multiply(param.jmat.T, s[N:2*N]).T)/param.norm
hash_jsz = (np.multiply(param.jmat.T, s[2*N:3*N]).T)/param.norm
full_jacobian[0:N, N:2*N] = param.jz * diag_jsx + drivemat * param.hz\
-param.jy * hash_jsz
full_jacobian[N:2*N, 0:N] = -param.jz * diag_jsx - \
drivemat * param.hz + param.jx * hash_jsz
full_jacobian[0:N, 2*N:3*N] = -param.jy * diag_jsy - drivemat * \
param.hy + param.jz * hash_jsy
full_jacobian[2*N:3*N, 0:N] = param.jy * diag_jsy + drivemat * \
param.hy - param.jx * hash_jsy
full_jacobian[N:2*N, 2*N:3*N] = param.jx * diag_jsx + drivemat * \
param.hx - param.jz * hash_jsx
full_jacobian[2*N:3*N, N:2*N] = -param.jx * diag_jsx - drivemat * \
param.hx + param.jy * hash_jsx
return full_jacobian
def func_2ndorder(s, t, param):
"""
The RHS of general case, second order correction, per Lorenzo
"J" is the J_{ij} hopping matrix
-\partial_t |s^x> = -first order + 2 (J^y Jg^{yz} - J^z Jg^{zy})
/norm,
-\partial_t |s^y> = -first order + 2 (-J^z Jg^{zx} + J^x Jg^{xz})
/norm,
-\partial_t |s^z> = -first order + 2 (-J^x Jg^{xy} + J^y Jg^{yx})
/norm.
"""
N = param.latsize
#svec is the tensor s^l_\mu
#G = s[3*N:].reshape(3,3,N,N) is the tensor g^{ab}_{\mu\nu}.
sview = s.view()
stensor = sview[0:3*N].reshape(3, N)
gtensor = sview[3*N:].reshape(3, 3, N, N)
gtensor[:,:,range(N),range(N)] = 0.0 #Set the diagonals of g_munu to 0
htensor = np.zeros_like(stensor)
htensor[0].fill(param.hvec[0])
htensor[1].fill(param.hvec[1])
htensor[2].fill(param.hvec[2])
Gtensor = np.einsum("mg,abgn->abmn", param.jmat, gtensor)/param.norm
Mtensor = np.einsum("am,b,mn->abmn", stensor, param.jvec, \
param.jmat)/param.norm
hvec_dressed = htensor + np.einsum("llgm->lm", Mtensor)
dtensor = gtensor + np.einsum("am,bn", stensor, stensor)
dsdt_1 = func_1storder(sview[0:3*N], t, param).reshape(3, N)
dsdt = dsdt_1 - \
2.0 * np.einsum("bcmm,b,abc->am", Gtensor, param.jvec, eijk)
dgdt = -np.einsum("lbmn,abl->abmn", Mtensor, eijk) + \
np.einsum("lanm,abl->abmn", Mtensor, eijk)
dgdt -= np.einsum("lm,kbmn,lka->abmn", hvec_dressed, gtensor, eijk) -\
np.einsum("llnm,kbmn,lka->abmn", Mtensor, gtensor, eijk) +\
np.einsum("ln,akmn,lkb->abmn", hvec_dressed, gtensor, eijk) -\
np.einsum("llmn,akmn,lkb->abmn", Mtensor, gtensor, eijk)
dgdt -= np.einsum("l,km,lbmn,lka->abmn", \
param.jvec, stensor, Gtensor, eijk) + \
np.einsum("l,kn,lanm,lkb->abmn", param.jvec, stensor, \
Gtensor, eijk)
dgdt += np.einsum("almn,lkmn,lkb->abmn", Mtensor, dtensor, eijk)\
+ np.einsum("blnm,lknm,lka->abmn", Mtensor, dtensor, eijk)
#Flatten it before returning
return np.concatenate((dsdt.flatten(), 2.0 * dgdt.flatten()))
def jac_2ndorder(s, t, param):
"""
Jacobian of the general case. Second order.
"""
N = param.latsize
fullsize_2ndorder = 3 * N + 9 * N**2
#svec is the tensor s^l_\mu
#G = s[3*N:].reshape(3,3,N,N) is the tensor g^{ab}_{\mu\nu}.
sview = s.view()
stensor = sview[0:3*N].reshape(3, N)
gtensor = sview[3*N:].reshape(3, 3, N, N)
htensor = np.zeros_like(stensor)
htensor[0].fill(param.hvec[0])
htensor[1].fill(param.hvec[1])
htensor[2].fill(param.hvec[2])
jjtensor = np.einsum("a,mn->amn", param.jvec, param.jmat)
sstensor = np.einsum("km,ln->klmn",stensor,stensor)
Mtensor = np.einsum("am,b,mn->abmn", stensor, param.jvec, \
param.jmat)/param.norm
hvec_dressed = htensor + np.einsum("llgm->lm", Mtensor)
full_jacobian = np.zeros(shape=(fullsize_2ndorder, fullsize_2ndorder))
#J00 subblock
full_jacobian[0:3*N, 0:3*N] = jac_1storder(s, t, param)
#J01 subblock. Precalculated
full_jacobian[0:3*N, 3*N:] = param.dsdotdg
#J10 subblock
full_jacobian[3*N:, 0:3*N] = -(np.einsum("pml,kbmn,pka->abpmnl", \
jjtensor,gtensor, eijk) + np.einsum("pnl,akmn,pkb->abpmnl", \
jjtensor, gtensor, eijk)).reshape(9*N*N,3*N)
full_jacobian[3*N:, 0:3*N] -= (np.einsum("qmg,ml,bqng,qpa->abpmnl",\
jjtensor, param.deltamn,gtensor, eijk) + \
np.einsum("qng,nl,aqmg,qpb->abpmnl",jjtensor, param.deltamn, \
gtensor, eijk) ).reshape(9*N*N,3*N)
full_jacobian[3*N:, 0:3*N] += (np.einsum("qmn,ml,bqnn,qpa->abpmnl",\
jjtensor, param.deltamn,gtensor, eijk) + \
np.einsum("qnm,nl,aqmm,qpb->abpmnl", jjtensor,param.deltamn, \
gtensor, eijk)).reshape(9*N*N,3*N)
full_jacobian[3*N:, 0:3*N] += (np.einsum("qmn,ml,pa,qkmn,qkb->abpmnl",\
jjtensor,param.deltamn,deltaij,gtensor+sstensor,eijk) + \
np.einsum("qmn,nl,pb,kqmn,qka->abpmnl", jjtensor,param.deltamn, \
deltaij,gtensor+sstensor,eijk)).reshape(9*N*N,3*N)
full_jacobian[3*N:, 0:3*N] += (np.einsum("pmn,ml,akmn,pkb->abpmnl",\
jjtensor,param.deltamn, sstensor, eijk) + \
np.einsum("pmn,nl,bknm,pka->abpmnl", jjtensor,param.deltamn, \
sstensor, eijk) + np.einsum("kmn,nl,akmm,kpb->abpmnl",\
jjtensor,param.deltamn, sstensor, eijk) + \
np.einsum("kmn,ml,bknn,kpa->abpmnl", jjtensor,param.deltamn, \
sstensor, eijk)).reshape(9*N*N,3*N)
full_jacobian[3*N:, 0:3*N] = 2.0 * \
(full_jacobian[3*N:, 0:3*N]/param.norm)
full_jacobian[3*N:, 0:3*N] += param.dsdotdg.T
#J11 subblock:
full_jacobian[3*N:, 3*N:] = -(np.einsum("qm,mlnhbpqra->abrpmnlh",\
hvec_dressed, param.delta_eps_tensor)).reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] += (np.einsum("qqmn,mlnhbpqra->abrpmnlh", \
Mtensor, param.delta_eps_tensor)).reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] -= (np.einsum("qn,mlnharqpb->abrpmnlh",\
hvec_dressed, param.delta_eps_tensor)).reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] += (np.einsum("qqnm,mlnharqpb->abrpmnlh",\
Mtensor, param.delta_eps_tensor)).reshape(9*N*N,9*N*N)
excl_tensor = -np.einsum("qmh,km,nl,br,pka->abrpmnlh",\
jjtensor,stensor, param.deltamn, deltaij,eijk)
excl_tensor += -np.einsum("qnh,kn,ml,ar,pkb->abrpmnlh",\
jjtensor,stensor, param.deltamn, deltaij,eijk)
excl_tensor += -np.einsum("qml,km,nh,bp,rka->abrpmnlh",\
jjtensor,stensor, param.deltamn, deltaij,eijk)
excl_tensor += -np.einsum("qnl,kn,mh,ap,rkb->abrpmnlh",\
jjtensor,stensor, param.deltamn, deltaij,eijk)
#Set the \eta=\mu,\nu components of excl_tensor to 0
excl_tensor[:,:,:,:,range(N),:,:,range(N)] = 0.0
excl_tensor[:,:,:,:,:,range(N),:,range(N)] = 0.0
full_jacobian[3*N:, 3*N:] += excl_tensor.reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] += (np.einsum("rmn,am,ml,nh,rpb->abrpmnlh",\
jjtensor,stensor,param.deltamn,param.deltamn,eijk) + \
np.einsum("rmn,bn,mh,nl,rpa->abrpmnlh",\
jjtensor,stensor,param.deltamn,param.deltamn,eijk)).reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] -= (np.einsum("pmn,am,mh,nl,prb->abrpmnlh",\
jjtensor,stensor,param.deltamn,param.deltamn,eijk) + \
np.einsum("pmn,bn,ml,nh,pra->abrpmnlh",\
jjtensor,stensor,param.deltamn,param.deltamn,eijk)).reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] = 2.0 * (full_jacobian[3*N:, 3*N:]/param.norm)
return full_jacobian
class ParamData:
"""Class to store parameters, precalculated objects,
filenames, objects like Kac norm
time-independent part of Jacobian. Set s_order to
true if doing second order dtwa'
"""
def __init__(self,pbc=False ,nonorm=True, latsize=101, beta=1.0, \
h0=1.0, omega=0.0, hx=0.0, hy=0.0, hz=0.0,\
jx=0.0, jy=0.0, jz=1.0):
#Default Output file names. Each file dumps a different observable
self.output_magx = "sx_outfile.txt"
self.output_magy = "sy_outfile.txt"
self.output_magz = "sz_outfile.txt"
self.output_sxvar = "sxvar_outfile.txt"
self.output_syvar = "syvar_outfile.txt"
self.output_szvar = "szvar_outfile.txt"
self.output_sxyvar = "sxyvar_outfile.txt"
self.output_sxzvar = "sxzvar_outfile.txt"
self.output_syzvar = "syzvar_outfile.txt"
#Whether to normalize with Kac norm or not
self.nonorm = nonorm
self.latsize = latsize
self.beta = beta #Power law index for long range interactions
self.h0 = h0 # Drive amplitude
self.omega = omega #Drive frequency
self.hx = hx #x transverse field
self.hy = hy #y transverse field
self.hz = hz #z transverse field
self.jx = jx #x hopping
self.jy = jy #y hopping
self.jz = jz #z hopping
self.jvec = np.array([jx, jy, jz])
self.hvec = np.array([hx, hy, hz])
N = self.latsize
self.fullsize_2ndorder = 3 * N + 9 * N**2
self.deltamn = np.eye(N)
# These are the lattice sites for two point density matrix calc.
self.tpnt_sites = (np.floor(N/2), np.floor(N/2)+2)
if(pbc):
self.periodic_boundary_conditions = True
self.open_boundary_conditions = False
#This is the dense Jmn hopping matrix with power law decay for
#periodic or open boundary conditions.
J = dia_matrix((N, N))
mid_diag = np.floor(N/2).astype(int)
for i in xrange(1,mid_diag+1):
elem = pow(i, -self.beta)
J.setdiag(elem, k=i)
J.setdiag(elem, k=-i)
for i in xrange(mid_diag+1, N):
elem = pow(N-i, -self.beta)
J.setdiag(elem, k=i)
J.setdiag(elem, k=-i)
else: #Open boundary conditions
self.periodic_boundary_conditions = False
self.open_boundary_conditions = True
J = dia_matrix((N, N))
for i in xrange(1,N):
elem = pow(i, -self.beta)
J.setdiag(elem, k=i)
J.setdiag(elem, k=-i)
self.jmat = J.toarray()
#This is the optional Kac norm
mid = np.floor(N/2).astype(int)
if self.nonorm:
self.norm = 1.0
else:
self.norm =\
2.0 * np.sum(1/(pow(\
np.arange(1, mid+1), self.beta).astype(float)))
class OutData:
"""Class to store output data"""
def __init__(self, t, sx, sy, sz, sxx, syy, szz, sxy, sxz, syz,\
params):
self.t_output = t
self.sx, self.sy, self.sz = sx, sy, sz
self.sxvar, self.syvar, self.szvar = sxx, syy, szz
self.sxyvar, self.sxzvar, self.syzvar = sxy, sxz, syz
self.__dict__.update(params.__dict__)
def normalize_data(self, w_totals, lsize):
self.sx = self.sx/(w_totals * lsize)
self.sy = self.sy/(w_totals * lsize)
self.sz = self.sz/(w_totals * lsize)
self.sxvar = (1/lsize) + (self.sxvar/(w_totals * lsize * lsize))
self.sxvar = self.sxvar - (self.sx)**2
self.syvar = (1/lsize) + (self.syvar/(w_totals * lsize * lsize))
self.syvar = self.syvar - (self.sy)**2
self.szvar = (1/lsize) + (self.szvar/(w_totals * lsize * lsize))
self.szvar = self.szvar - (self.sz)**2
self.sxyvar = (self.sxyvar/(w_totals * lsize * lsize))
self.sxyvar = self.sxyvar - (self.sx * self.sy)
self.sxzvar = (self.sxzvar/(w_totals * lsize * lsize))
self.sxzvar = self.sxzvar - (self.sx * self.sz)
self.syzvar = (self.syzvar/(w_totals * lsize * lsize))
self.syzvar = self.syzvar - (self.sy * self.sz)
def dump_data(self):
np.savetxt(self.output_magx, \
np.vstack((self.t_output, self.sx)).T, delimiter=' ')
np.savetxt(self.output_magy, \
np.vstack((self.t_output, self.sy)).T, delimiter=' ')
np.savetxt(self.output_magz, \
np.vstack((self.t_output, self.sz)).T, delimiter=' ')
np.savetxt(self.output_sxvar, \
np.vstack((self.t_output, self.sxvar)).T, delimiter=' ')
np.savetxt(self.output_syvar, \
np.vstack((self.t_output, self.syvar)).T, delimiter=' ')
np.savetxt(self.output_szvar, \
np.vstack((self.t_output, self.szvar)).T, delimiter=' ')
np.savetxt(self.output_sxyvar, \
np.vstack((self.t_output, self.sxyvar)).T, delimiter=' ')
np.savetxt(self.output_sxzvar, \
np.vstack((self.t_output, self.sxzvar)).T, delimiter=' ')
np.savetxt(self.output_syzvar, \
np.vstack((self.t_output, self.syzvar)).T, delimiter=' ')
class OutData_ij:
"""
Class to store ij output data
The gij is a numpy array of 3X3 gij matrices at multiple times
"""
def __init__(self, t, sites, sxi, syi, szi, sxj, syj, szj, sy_iplusk,\
syy_k, gij=None):
self.times = t
self.sites = sites
self.sxi, self.syi, self.szi = sxi, syi, szi
self.sxj, self.syj, self.szj = sxj, syj, szj
#Output formatting dictionaries
self.sitespinsdict = {"time": t,\
"sxi": self.sxi.view(),\
"syi": self.syi.view(),\
"szi": self.szi.view(),\
"sxj": self.sxj.view(),\
"syj": self.syj.view(),\
"szj": self.szj.view()}
self.sy_iplusk = sy_iplusk
self.syy_k = syy_k
if gij is not None:
self.gij = gij
v = self.gij.view()
self.sitecorrdict = {"time": t,\
"gxxij": v[:, 0, 0],\
"gxyij": v[:, 0, 1],\
"gxzij": v[:, 0, 2],\
"gyxij": v[:, 1, 0],\
"gyyij": v[:, 1, 1],\
"gyzij": v[:, 1, 2],\
"gzxij": v[:, 2, 0],\
"gzyij": v[:, 2, 1],\
"gzzij": v[:, 2, 2]}
def normalize_data(self, w_totals):
self.sitespinsdict['sxi'] = self.sitespinsdict['sxi']/(w_totals)
self.sitespinsdict['syi'] = self.sitespinsdict['syi']/(w_totals)
self.sitespinsdict['szi'] = self.sitespinsdict['szi']/(w_totals)
self.sitespinsdict['sxj'] = self.sitespinsdict['sxj']/(w_totals)
self.sitespinsdict['syj'] = self.sitespinsdict['syj']/(w_totals)
self.sitespinsdict['szj'] = self.sitespinsdict['szj']/(w_totals)
#Normalize the spatial correlations:
self.sy_iplusk = self.sy_iplusk/(w_totals)
self.syy_k = self.syy_k/(w_totals)
self.syy_k -= np.array([self.sy_iplusk[i] *\
self.sitespinsdict['syi'][i] for i in xrange(self.times.size)])
if hasattr(self, 'sitecorrdict'):
for key in self.sitecorrdict.iterkeys():
if key is not "time":
self.sitecorrdict[key] = self.sitecorrdict[key]/(w_totals)
def dump_data(self):
print("\n\n Tabular dump of site data:\n\n")
print("\n Note that, in the first order case,")
print("the 'gij' columns actually print sijs,")
print("which are s(..)i * s(..)j\n\n")
print("Sites chosen:", self.sites)
print("\n")
print(tabulate(self.sitespinsdict, headers="keys", floatfmt=".6f" ))
print("Spatial correlations from site i = \n", self.sites[0])
print(np.vstack((self.times,self.syy_k.T)).T)
if hasattr(self, 'sitecorrdict'):
print(" ")
print(tabulate(self.sitecorrdict, headers="keys", floatfmt=".6f"))
class Dtwa_System:
"""
This is the class that creates the DTWA system,
has all MPI_Gather routines for aggregating the
samples, and executes the dtwa methods (1st and 2nd order)
Set s_order to true if doing second order dtwa
Set jac to false if you don't want to evaluate the jacobian, since
it may be too big in some cases and cause the routine to crash.
"""
def __init__(self, params, mpicomm, n_t=2000, file_output=True,\
seed_offset=0, s_order=False, jac=False,\
verbose=True, sitedata=False):
"""
Input default values and amke precalculated objects
Comm = MPI Communicator
"""
self.jac = jac
self.__dict__.update(params.__dict__)
self.n_t = n_t
self.file_output = file_output
self.comm=mpicomm
self.seed_offset = seed_offset
self.s_order = s_order
#Booleans for verbosity and for calculating site data
self.verbose = verbose
self.sitedata = sitedata
N = params.latsize
#Only computes these if you want 2nd order
if self.s_order and self.jac:
#Below are the constant subblocks of the 2nd order Jacobian
#The 00 subblock is the first order Jacobian in func below
#The entire 01 subblock, fully time independent (ds_dot/dg):
self.dsdotdg = -np.einsum("p,mh,ml,apr->arpmlh",\
self.jvec, self.jmat, self.deltamn, eijk)
self.dsdotdg += np.einsum("r,ml,mh,arp->arpmlh", \
self.jvec,self.jmat, self.deltamn, eijk)
self.dsdotdg = 2.0 * (self.dsdotdg/self.norm)
self.dsdotdg = self.dsdotdg.reshape(3*N, 9*N**2)
self.delta_eps_tensor = np.einsum("ml,nh,ar,qpb->mlnharqpb",\
self.deltamn,self.deltamn,deltaij,eijk)
self.delta_eps_tensor += np.einsum("mh,nl,ap,qrb->mhnlapqrb",\
self.deltamn,self.deltamn,deltaij,eijk)
#The time independent part of the 10 subblock (dg_dot/ds):
#is the SAME as ds_dot/dg
def sum_reduce_all_data(self, datalist_loc,t, mpcomm):
"""
Does the parallel sum reduction of all data
"""
#Do local sums
sx_locsum = np.sum(data.sx for data in datalist_loc)
sy_locsum = np.sum(data.sy for data in datalist_loc)
sz_locsum = np.sum(data.sz for data in datalist_loc)
sxvar_locsum = np.sum(data.sxvar for data in datalist_loc)
syvar_locsum = np.sum(data.syvar for data in datalist_loc)
szvar_locsum = np.sum(data.szvar for data in datalist_loc)
sxyvar_locsum = np.sum(data.sxyvar for data in datalist_loc)
sxzvar_locsum = np.sum(data.sxzvar for data in datalist_loc)
syzvar_locsum = np.sum(data.syzvar for data in datalist_loc)
#Only root processor will actually get the data
sx_totals = np.zeros_like(sx_locsum) if mpcomm.rank == root\
else None
sy_totals = np.zeros_like(sy_locsum) if mpcomm.rank == root\
else None
sz_totals = np.zeros_like(sz_locsum) if mpcomm.rank == root\
else None
sxvar_totals = np.zeros_like(sxvar_locsum) if mpcomm.rank == root\
else None
syvar_totals = np.zeros_like(syvar_locsum) if mpcomm.rank == root\
else None
szvar_totals = np.zeros_like(szvar_locsum) if mpcomm.rank == root\
else None
sxyvar_totals = np.zeros_like(sxyvar_locsum) if mpcomm.rank == root\
else None
sxzvar_totals = np.zeros_like(sxzvar_locsum) if mpcomm.rank == root\
else None
syzvar_totals = np.zeros_like(syzvar_locsum) if mpcomm.rank == root\
else None
#To prevent conflicts with other comms
duplicate_comm = Intracomm(mpcomm)
sx_totals = duplicate_comm.reduce(sx_locsum, root=root)
sy_totals = duplicate_comm.reduce(sy_locsum, root=root)
sz_totals = duplicate_comm.reduce(sz_locsum, root=root)
sxvar_totals = duplicate_comm.reduce(sxvar_locsum, root=root)
syvar_totals = duplicate_comm.reduce(syvar_locsum, root=root)
szvar_totals = duplicate_comm.reduce(szvar_locsum, root=root)
sxyvar_totals = duplicate_comm.reduce(sxyvar_locsum, root=root)
sxzvar_totals = duplicate_comm.reduce(sxzvar_locsum, root=root)
syzvar_totals = duplicate_comm.reduce(syzvar_locsum, root=root)
if mpcomm.rank == root:
return OutData(t, sx_totals, sy_totals, sz_totals, sxvar_totals, \
syvar_totals, szvar_totals, sxyvar_totals, sxzvar_totals,\
syzvar_totals, self)
else:
return None
def sum_reduce_site_data(self, datalist_loc, t, sites, mpcomm):
"""
Does the parallel sum reduction of site data
"""
sxi_locsum = np.sum(data.sxi for data in datalist_loc)
syi_locsum = np.sum(data.syi for data in datalist_loc)
szi_locsum = np.sum(data.szi for data in datalist_loc)
sxj_locsum = np.sum(data.sxj for data in datalist_loc)
syj_locsum = np.sum(data.syj for data in datalist_loc)
szj_locsum = np.sum(data.szj for data in datalist_loc)
sy_iplusk_locsum = np.sum(data.sy_iplusk for data in datalist_loc)
syy_k_locsum = np.sum(data.syy_k for data in datalist_loc)
try: #This is to take care of the case when gij = None
gijs_locsum = np.sum(data.gij for data in datalist_loc)
except AttributeError:
gijs_locsum = None
sxi_totals = np.zeros_like(sxi_locsum) if mpcomm.rank == root\
else None
syi_totals = np.zeros_like(syi_locsum) if mpcomm.rank == root\
else None
szi_totals = np.zeros_like(szi_locsum) if mpcomm.rank == root\
else None
sxj_totals = np.zeros_like(sxj_locsum) if mpcomm.rank == root\
else None
syj_totals = np.zeros_like(syj_locsum) if mpcomm.rank == root \
else None
szj_totals = np.zeros_like(szj_locsum) if mpcomm.rank == root \
else None
sy_iplusk_totals = np.zeros_like(sy_iplusk_locsum) \
if mpcomm.rank == root else None
syy_k_totals = np.zeros_like(syy_k_locsum) \
if mpcomm.rank == root else None
gijs_totals = np.zeros_like(gijs_locsum) if mpcomm.rank == root \
else None
#To prevent conflicts with other comms
duplicate_comm = Intracomm(mpcomm)
#Broadcast these reductions to root
sxi_totals = duplicate_comm.reduce(sxi_locsum, root=root)
syi_totals = duplicate_comm.reduce(syi_locsum, root=root)
szi_totals = duplicate_comm.reduce(szi_locsum, root=root)
sxj_totals = duplicate_comm.reduce(sxj_locsum, root=root)
syj_totals = duplicate_comm.reduce(syj_locsum, root=root)
szj_totals = duplicate_comm.reduce(szj_locsum, root=root)
sy_iplusk_totals = duplicate_comm.reduce(sy_iplusk_locsum,root=root)
syy_k_totals = duplicate_comm.reduce(syy_k_locsum, root=root)
if gijs_locsum is not None:
gijs_totals = duplicate_comm.reduce(gijs_locsum, root=root)
else:
gijs_totals = None
if mpcomm.rank == root:
return OutData_ij(t, sites, sxi_totals, syi_totals, \
szi_totals, sxj_totals, syj_totals, szj_totals, \
sy_iplusk_totals, syy_k_totals, gijs_totals)
else:
return None
def dtwa_ising_longrange_1storder(self, time_info):
comm = self.comm
N = self.latsize
(t_init, n_cycles, n_steps) = time_info
rank = comm.rank
if rank == root and self.verbose:
pprint("# Run parameters:")
pprint(vars(self), depth=2)
if rank == root and not self.verbose:
pprint("# Starting run ...")
if self.omega == 0:
t_final = t_init + n_cycles
else:
t_final = t_init + (n_cycles * (2.0* np.pi/self.omega))
dt = (t_final-t_init)/(n_steps-1.0)
t_output = np.arange(t_init, t_final, dt)
#Let each process get its chunk of n_t by round robin
nt_loc = 0
iterator = rank
while iterator < self.n_t:
nt_loc += 1
iterator += comm.size
#Scatter unique seeds for generating unique random number arrays :
#each processor gets its own nt_loc seeds, and allocates nt_loc
#initial conditions. Each i.c. is a 2N sized array
#now, each process sends its value of nt_loc to root
all_ntlocs = comm.gather(nt_loc, root=root)
#Let the root process initialize nt unique integers for random seeds
if rank == root:
all_seeds = np.arange(self.n_t, dtype=np.int64)+1
all_ntlocs = np.array(all_ntlocs)
all_displacements = np.roll(np.cumsum(all_ntlocs), root+1)
all_displacements[root] = 0 # First displacement
else:
all_seeds = None
all_displacements = None
local_seeds = np.zeros(nt_loc, dtype=np.int64)
#Root scatters nt_loc sized seed data to that particular process
comm.Scatterv([all_seeds, all_ntlocs, all_displacements,\
MPI.DOUBLE], local_seeds)
list_of_local_data = []
if self.sitedata:
list_of_local_ijdata = []
for runcount in xrange(0, nt_loc, 1):
random.seed(local_seeds[runcount] + self.seed_offset)
#According to Schachenmayer, the wigner function of the quantum
#state generates the below initial conditions classically
sx_init = np.ones(N)
sy_init = 2.0 * np.random.randint(0,2, size=N) - 1.0
sz_init = 2.0 * np.random.randint(0,2, size=N) - 1.0
#Set initial conditions for the dynamics locally to vector
#s_init and store it as [s^x,s^x,s^x, .... s^y,s^y,s^y ...,
#s^z,s^z,s^z, ...]
s_init = np.concatenate((sx_init, sy_init, sz_init))
if self.verbose:
if self.jac:
s, info = odeint(func_1storder, s_init, t_output,\
args=(self,), Dfun=jac_1storder, full_output=True)
else:
s, info = odeint(func_1storder, s_init, t_output,\
args=(self,), Dfun=None, full_output=True)
else:
if self.jac:
s = odeint(func_1storder, s_init, t_output, args=(self,),\
Dfun=jac_1storder)
else:
s = odeint(func_1storder, s_init, t_output, args=(self,),\
Dfun=None)
#Compute expectations <sx> and \sum_{ij}<sx_i sx_j> -<sx>^2 with
#wigner func at t_output values LOCALLY for each initcond and
#store them
sx_expectations = np.sum(s[:, 0:N], axis=1)
sy_expectations = np.sum(s[:, N:2*N], axis=1)
sz_expectations = np.sum(s[:, 2*N:3*N], axis=1)
if self.sitedata:
(i, j) = self.tpnt_sites
sxi, syi, szi = s[:, i], s[:, i+N], s[:, i+2*N]
sxj, syj, szj = s[:, j], s[:, j+N], s[:, j+2*N]
sxxij, syyij, szzij = sxi * sxj, syi * syj, szi * szj
sxyij, sxzij, syzij = sxi * syj, sxi * szj, syi * szj
syxij, szxij, szyij = syi * sxj, szi * sxj, szi * syj
gij = np.array([sxxij, sxyij, sxzij, syxij, syyij, syzij,\
szxij, szyij, szzij]).T.reshape(t_output.size,3,3)
sxi, syi, szi = sxi , syi , \
szi
sxj, syj, szj = sxj , syj , \
szj
#Calculate Spatial Correlations
sy_iplusk = s[:, N:2*N][:,i:] #This is a matrix
syy_k = np.array([sy_iplusk[t] * syi[t] \
for t in xrange(t_output.size)])# This is also a matrix
localdataij = OutData_ij(t_output, self.tpnt_sites, \
sxi, syi, szi,\
sxj, syj, szj,\
sy_iplusk,\
syy_k,\
gij)
list_of_local_ijdata.append(localdataij)
#Quantum spin variance maps to the classical expression
# (1/N) + (1/N^2)\sum_{i\neq j} S^x_i S^x_j - <S^x>^2 and
# (1/N) + (1/N^2)\sum_{i\neq j} S^y_i S^z_j
# since the i=j terms quantum average to unity
sx_var = (np.sum(s[:, 0:N], axis=1)**2 \
- np.sum(s[:, 0:N]**2, axis=1))
sy_var = (np.sum(s[:, N:2*N], axis=1)**2 \
- np.sum(s[:, N:2*N]**2, axis=1))
sz_var = (np.sum(s[:, 2*N:3*N], axis=1)**2 \
- np.sum(s[:, 2*N:3*N]**2, axis=1))
sxy_var = np.sum([fftconvolve(s[m, 0:N], \
s[m, N:2*N]) for m in xrange(t_output.size)], axis=1)
sxz_var = np.sum([fftconvolve(s[m, 0:N], \
s[m, 2*N:3*N]) for m in xrange(t_output.size)], axis=1)
syz_var = np.sum([fftconvolve(s[m, N:2*N], \
s[m, 2*N:3*N]) for m in xrange(t_output.size)], axis=1)
localdata = OutData(t_output, sx_expectations, sy_expectations,\
sz_expectations, sx_var, sy_var, sz_var, sxy_var, sxz_var, \
syz_var, self)
list_of_local_data.append(localdata)
#After loop above sum reduce (don't forget to average) all locally
#calculated expectations at each time to root
outdat = \
self.sum_reduce_all_data(list_of_local_data, t_output, comm)
if self.sitedata:
sij = self.sum_reduce_site_data(list_of_local_ijdata,\
t_output, self.tpnt_sites, comm)
if rank == root:
sij.normalize_data(self.n_t)
if self.file_output:
sij.dump_data()
if rank == root:
#Dump to file
outdat.normalize_data(self.n_t, N)
if self.file_output:
outdat.dump_data()
if self.verbose:
print(" ")
print("Integration output info:")
pprint(info)
print("""# Cumulative number of Jacobian evaluations
by root:""", \
np.sum(info['nje']))
print('# Done!')
return outdat
else:
return None
def dtwa_ising_longrange_2ndorder(self, time_info, sampling):
old_settings = np.seterr(all='ignore') #Prevent overflow warnings
comm=self.comm
N = self.latsize
(t_init, n_cycles, n_steps) = time_info
rank = comm.rank
if rank == root and self.verbose:
pprint("# Run parameters:")
#Copy params to another object, then delete
#the output that you don't want printed
out = copy.copy(self)
out.dsdotdg = 0.0
out.delta_eps_tensor = 0.0
out.jmat = 0.0
out.deltamn = 0.0
pprint(vars(out), depth=2)
if rank == root and not self.verbose:
pprint("# Starting run ...")
if self.omega == 0:
t_final = t_init + n_cycles
else:
t_final = t_init + (n_cycles * (2.0* np.pi/self.omega))
dt = (t_final-t_init)/(n_steps-1.0)
t_output = np.arange(t_init, t_final, dt)
#Let each process get its chunk of n_t by round robin
nt_loc = 0
iterator = rank
while iterator < self.n_t:
nt_loc += 1
iterator += comm.size
#Scatter unique seeds for generating unique random number arrays :
#each processor gets its own nt_loc seeds, and allocates nt_loc
#initial conditions. Each i.c. is a 2N sized array
#now, each process sends its value of nt_loc to root
all_ntlocs = comm.gather(nt_loc, root=root)
#Let the root process initialize nt unique integers for random seeds
if rank == root:
all_seeds = np.arange(self.n_t, dtype=np.int64)+1
all_ntlocs = np.array(all_ntlocs)
all_displacements = np.roll(np.cumsum(all_ntlocs), root+1)
all_displacements[root] = 0 # First displacement
else:
all_seeds = None
all_displacements = None
local_seeds = np.zeros(nt_loc, dtype=np.int64)
#Root scatters nt_loc sized seed data to that particular process
comm.Scatterv([all_seeds, all_ntlocs, all_displacements,\
MPI.DOUBLE],local_seeds)
list_of_local_data = []
if self.verbose:
list_of_dhwdt_abs2 = []
if self.sitedata:
list_of_local_ijdata = []
for runcount in xrange(0, nt_loc, 1):
random.seed(local_seeds[runcount] + self.seed_offset)
sx_init = np.ones(N)
if sampling == "spr":
#According to Schachenmayer, the wigner function of the quantum
#state generates the below initial conditions classically
sy_init = 2.0 * np.random.randint(0,2, size=N) - 1.0
sz_init = 2.0 * np.random.randint(0,2, size=N) - 1.0
#Set initial conditions for the dynamics locally to vector
#s_init and store it as [s^x,s^x,s^x, .... s^y,s^y,s^y ...,
#s^z,s^z,s^z, ...]
s_init_spins = np.concatenate((sx_init, sy_init, sz_init))
elif sampling == "1-0":
spin_choices = np.array([(1, 1,0),(1, 0,1),(1, -1,0),(1, 0,-1)])
spins = np.array([random.choice(spin_choices) for i in xrange(N)])
s_init_spins = spins.T.flatten()
elif sampling == "all":
spin_choices_spr = np.array([(1, 1,1),(1, 1,-1),(1, -1,1),(1, -1,-1)])
spin_choices_10 = np.array([(1, 1,0),(1, 0,1),(1, -1,0),(1, 0,-1)])
spin_choices = np.concatenate((spin_choices_10, spin_choices_spr))
spins = np.array([random.choice(spin_choices) for i in xrange(N)])
s_init_spins = spins.T.flatten()
else:
pass
# Set initial correlations to 0.
s_init_corrs = np.zeros(9*N*N)
#Redirect unwanted stdout warning messages to /dev/null
with stdout_redirected():
if self.verbose:
if self.jac:
s, info = odeint(func_2ndorder, \
np.concatenate((s_init_spins, s_init_corrs)), t_output, \
args=(self,), Dfun=jac_2ndorder, full_output=True)
else:
s, info = odeint(func_2ndorder, \
np.concatenate((s_init_spins, s_init_corrs)),t_output, \
args=(self,), Dfun=None, full_output=True)
else:
if self.jac:
s = odeint(func_2ndorder, \
np.concatenate((s_init_spins, s_init_corrs)), \
t_output, args=(self,), Dfun=jac_2ndorder)
else:
s = odeint(func_2ndorder, \
np.concatenate((s_init_spins, s_init_corrs)), t_output, \
args=(self,), Dfun=None)
#Computes |dH/dt|^2 for a particular alphavec & weighes it
#If the rms over alphavec of these are 0, then each H is const
if self.verbose:
hws = weyl_hamilt(s,t_output, self)
dhwdt = np.array([t_deriv(hw, t_output) for hw in hws])
dhwdt_abs2 = np.square(dhwdt)
list_of_dhwdt_abs2.extend(dhwdt_abs2)
s = np.array(s, dtype="float128")#Widen memory to reduce overflows
#Compute expectations <sx> and \sum_{ij}<sx_i sx_j> -<sx>^2 with
#wigner func at t_output values LOCALLY for each initcond and
#store them
sx_expectations = np.sum(s[:, 0:N], axis=1)
sy_expectations = np.sum(s[:, N:2*N], axis=1)
sz_expectations = np.sum(s[:, 2*N:3*N], axis=1)
if self.sitedata:
(i, j) = self.tpnt_sites
sxi, syi, szi = s[:, i], s[:, i+N], s[:, i+2*N]
sxi, syi, szi = sxi , syi ,\
szi
sxj, syj, szj = s[:, j], s[:, j+N], s[:, j+2*N]
sxj, syj, szj = sxj , syj ,\
szj
sview = s.view()
gij = sview[:,3*N:].reshape(\
t_output.size,3, 3, N, N)[:, :, :, i, j]
#Calculate Spatial Correlations
sy_iplusk = s[:, N:2*N][:,i:] #This is a matrix
syy_k = np.array([sy_iplusk[t] * syi[t] \
for t in xrange(t_output.size)])# This is also a matrix
localdataij = OutData_ij(t_output, self.tpnt_sites, \
sxi, syi, szi,\
sxj, syj, szj,\
sy_iplusk,\
syy_k,\
gij)
list_of_local_ijdata.append(localdataij)
#svec is the tensor s^l_\mu
#G = s[3*N:].reshape(3,3,N,N) is the tensor g^{ab}_{\mu\nu}.
s = np.array(s, dtype="float128")#Enlarge in mem
sview = s.view()
gt = sview[:, 3*N:].reshape(s.shape[0], 3, 3, N, N)
gt[:,:,:,range(N),range(N)] = 0.0 #Set diags to 0
#Quantum spin variance
sx_var = np.sum(gt[:,0,0,:,:], axis=(-1,-2))
sx_var += (np.sum(s[:, 0:N], axis=1)**2 \
- np.sum(s[:, 0:N]**2, axis=1))
sy_var = np.sum(gt[:,1,1,:,:], axis=(-1,-2))
sy_var += (np.sum(s[:, N:2*N], axis=1)**2 \
- np.sum(s[:, N:2*N]**2, axis=1))
sz_var = np.sum(gt[:,2,2,:,:], axis=(-1,-2))
sz_var += (np.sum(s[:, 2*N:3*N], axis=1)**2 \
- np.sum(s[:, 2*N:3*N]**2, axis=1))
sxy_var = np.sum(gt[:,0,1,:,:], axis=(-1,-2))
sxy_var += np.sum([fftconvolve(s[m, 0:N], s[m, N:2*N]) \
for m in xrange(t_output.size)], axis=1)
#Remove the diagonal parts
sxy_var -= np.sum(s[:, 0:N] * s[:, N:2*N], axis=1)
sxz_var = np.sum(gt[:,0,2,:,:], axis=(-1,-2))
sxz_var += np.sum([fftconvolve(s[m, 0:N], s[m, 2*N:3*N]) \
for m in xrange(t_output.size)], axis=1)
#Remove the diagonal parts
sxz_var -= np.sum(s[:, 0:N] * s[:, 2*N:3*N], axis=1)
syz_var = np.sum(gt[:,1,2,:,:], axis=(-1,-2))
syz_var += np.sum([fftconvolve(s[m, N:2*N], s[m, 2*N:3*N]) \
for m in xrange(t_output.size)], axis=1)
#Remove the diagonal parts
syz_var -= np.sum(s[:, N:2*N] * s[:, 2*N:3*N], axis=1)
localdata = OutData(t_output, sx_expectations, sy_expectations,\
sz_expectations, sx_var, sy_var, sz_var, sxy_var, sxz_var, \
syz_var, self)
list_of_local_data.append(localdata)
#After loop above sum reduce (don't forget to average) all locally
#calculated expectations at each time to root
outdat = \
self.sum_reduce_all_data(list_of_local_data, t_output, comm)
if self.verbose:
dhwdt_abs2_locsum = np.sum(list_of_dhwdt_abs2, axis=0)
dhwdt_abs2_totals = np.zeros_like(dhwdt_abs2_locsum)\
if rank == root else None
if self.sitedata:
sij = self.sum_reduce_site_data(list_of_local_ijdata, t_output,\
self.tpnt_sites, comm)
if rank == root:
sij.normalize_data(self.n_t)
sij.dump_data()
if self.verbose:
temp_comm = Intracomm(comm)
dhwdt_abs2_totals = temp_comm.reduce(dhwdt_abs2_locsum, root=root)
if rank == root:
dhwdt_abs2_totals = dhwdt_abs2_totals/(self.n_t * N * N)
dhwdt_abs_totals = np.sqrt(dhwdt_abs2_totals)
#Dump to file
if rank == root:
outdat.normalize_data(self.n_t, N)
if self.file_output:
outdat.dump_data()
if self.verbose:
print("t-deriv of Hamilt (abs square) with wigner avg: ")
print(" ")
print(tabulate({"time": t_output, \
"dhwdt_abs": dhwdt_abs_totals}, \
headers="keys", floatfmt=".6f"))
if self.jac and self.verbose:
print('# Cumulative number of Jacobian evaluations by root:', \
np.sum(info['nje']))
print('# Done!')
np.seterr(**old_settings) # reset to default
return outdat
else:
np.seterr(**old_settings) # reset to default
return None
def evolve(self, time_info, sampling="spr"):
if self.s_order:
return self.dtwa_ising_longrange_2ndorder(time_info, sampling)
else:
return self.dtwa_ising_longrange_1storder(time_info)
if __name__ == '__main__':
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
#Initiate the parameters in object
p = ParamData(latsize=101, beta=1.0)
#Initiate the DTWA system with the parameters and niter
d = Dtwa_System(p, comm, n_t=2000)
data = d.evolve((0.0, 1.0, 1000))
| gpl-2.0 | -5,499,009,293,101,981,000 | 43.714004 | 86 | 0.537847 | false |
ph147/dcf77 | reader.py | 1 | 3135 | #!/usr/bin/python
import sys
import struct
EPSILON = 0.02
SAMPLE_RATE = 44100
ZERO_BIT_IN_SECS = 0.1
ONE_BIT_IN_SECS = 0.2
NEW_MINUTE_BEEP_THRESHOLD = 1.7
def read_word(inf):
return struct.unpack('<l', inf.read(struct.calcsize('=l')))[0]
def read_words(inf):
word = read_word(inf)
while True:
yield word
word = read_word(inf)
def average(words, num):
try:
return 1.0*sum(abs(next(words)) for i in xrange(num))/num
except struct.error:
raise EOF
def sgn(num):
return -1 if num < 0 else 1 if num > 0 else 0
def steps(length):
count = 0
while True:
yield count
count += length
def in_vicinity(num, center):
return abs(num-center) < EPSILON
class SignalError(Exception):
pass
class EOF(Exception):
pass
class DCF77(object):
amplitude_factor = 0.3
block_length = 100
def __init__(self, filename=None):
if not filename:
print 'Reading from stdin...'
filename = '/dev/stdin'
else:
print 'Reading from file {}...'.format(filename)
self.filename = filename
self.lasts = [0]*3
self.data = True
self.bits = []
self.start = 0
self.end = 0
self.minute_started = False
def went_down(self, ave):
return ave < self.amplitude_factor*self.lasts[0] and not self.data
def went_up(self, ave):
return self.lasts[0] < self.amplitude_factor*ave and self.data
def start_new_minute(self):
print
if self.minute_started:
yield ''.join(self.bits)
self.bits = []
self.minute_started = True
print '*** New minute started. ***'
def process_carrier(self, step):
self.start = step
time = 1.0*(self.start-self.end)/SAMPLE_RATE
if time > NEW_MINUTE_BEEP_THRESHOLD:
for answer in self.start_new_minute():
yield answer
self.data = True
def append(self, bit):
self.bits.append(bit)
sys.stdout.write(bit)
def process_bit(self, time):
if in_vicinity(time, ZERO_BIT_IN_SECS):
self.append('0')
elif in_vicinity(time, ONE_BIT_IN_SECS):
self.append('1')
else:
raise SignalError
def process_silence(self, step):
self.end = step
time = 1.0*(self.end-self.start)/SAMPLE_RATE
if self.minute_started:
self.process_bit(time)
sys.stdout.flush()
self.data = False
def process_block(self, block, step):
if self.went_down(block):
for answer in self.process_carrier(step):
yield answer
elif self.went_up(block):
self.process_silence(step)
self.lasts.pop(0)
self.lasts.append(block)
def run(self):
with open(self.filename) as inf:
words = read_words(inf)
for step in steps(self.block_length):
ave = average(words, self.block_length)
for answer in self.process_block(ave, step):
yield answer
| mit | 750,148,069,711,881,200 | 22.75 | 74 | 0.56555 | false |
alfa-addon/addon | plugin.video.alfa/servers/crunchyroll.py | 1 | 3429 | # -*- coding: utf-8 -*-
from builtins import range
import sys
PY3 = False
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
if PY3:
#from future import standard_library
#standard_library.install_aliases()
import urllib.parse as urllib # Es muy lento en PY2. En PY3 es nativo
else:
import urllib # Usamos el nativo de PY2 que es más rápido
import base64
import struct
import zlib
from hashlib import sha1
from core import filetools
from core import jsontools
from core import httptools
from core import scrapertools
from platformcode import config, logger
GLOBAL_HEADER = {'User-Agent': 'Mozilla/5.0', 'Accept-Language': '*'}
proxy_i = "https://www.usa-proxy.org/index.php"
proxy = "https://www.usa-proxy.org/"
def test_video_exists(page_url):
logger.info("(page_url='%s')" % page_url)
global data
data = httptools.downloadpage(page_url, headers=GLOBAL_HEADER).data
#logger.error(data)
if "showmedia-trailer-notice" in data:
disp = scrapertools.find_single_match(data, '<a href="/freetrial".*?</span>.*?<span>\s*(.*?)</span>')
disp = disp.strip()
if disp:
disp = "Disponible gratuitamente: %s" % disp
return False, "[Crunchyroll] Error, se necesita cuenta premium. %s" % disp
return True, ""
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
#page_url='https://www.crunchyroll.com/es-es/one-piece/episode-891-climbing-up-a-waterfall-a-great-journey-through-the-land-of-wanos-sea-zone-786643'
logger.error("url=" + page_url)
video_urls = []
media_url = ''
file_sub = ""
idiomas = ['deDE', 'ptBR', 'frFR', 'itIT', 'enUS', 'esES', 'esLA']
index_sub = int(config.get_setting("crunchyrollsub", "crunchyroll"))
idioma_sub = idiomas[index_sub]
raw_data = scrapertools.find_single_match(data, r'"streams":(\[[^\]]+])')
if idioma_sub == 'esES' and not idioma_sub in raw_data:
idioma_sub = 'esLA'
elif idioma_sub == 'esLA' and not idioma_sub in raw_data:
idioma_sub = 'esES'
if idioma_sub not in raw_data:
idioma_sub = 'enUS'
json_data = jsontools.load(raw_data)
#logger.error(json_data)
for elem in json_data:
formato = elem.get('format', '')
if formato in ['vo_adaptive_hls', 'adaptive_hls']:
lang = elem.get('hardsub_lang', '')
audio_lang = elem.get('audio_lang', '')
if lang == idioma_sub:
media_url = elem.get('url', '')
break
if not lang and audio_lang != 'jaJP':
media_url = elem.get('url', '')
break
if not media_url:
return video_urls
m3u_data = httptools.downloadpage(media_url, headers=GLOBAL_HEADER).data.decode('utf-8')
matches = scrapertools.find_multiple_matches(m3u_data, 'TION=\d+x(\d+).*?\s(.*?)\s')
filename = scrapertools.get_filename_from_url(media_url)[-4:]
if matches:
for quality, media_url in matches:
video_urls.append(["%s %sp [crunchyroll]" % (filename, quality), media_url])
else:
video_urls.append(["m3u8 [crunchyroll]", media_url])
return video_urls
| gpl-3.0 | 2,660,880,265,300,349,000 | 33.697917 | 153 | 0.585352 | false |
pklimai/py-junos-eznc | tests/unit/test_factcache.py | 2 | 10508 | import unittest2 as unittest
from nose.plugins.attrib import attr
from mock import patch, MagicMock, call
from jnpr.junos.exception import FactLoopError
from jnpr.junos import Device
from ncclient.manager import Manager, make_device_handler
from ncclient.transport import SSHSession
__author__ = "Stacy Smith"
__credits__ = "Jeremy Schulman, Nitin Kumar"
@attr('unit')
class TestFactCache(unittest.TestCase):
@patch('ncclient.manager.connect')
def setUp(self, mock_connect):
mock_connect.side_effect = self._mock_manager_setup
self.dev = Device(host='1.1.1.1', user='rick', password='password123')
self.dev.open()
def test_factcache_unknown_fact(self):
with self.assertRaises(KeyError):
unknown = self.dev.facts['unknown']
def test_factcache_fact_loop(self):
# The personality fact calls the
# model fact.
# Change the callback for the model
# fact to be the same as the personality fact
# in order to induce a fact loop.
self.dev.facts._callbacks['model'] = \
self.dev.facts._callbacks['personality']
# Now, trying to fetch the personality
# fact should cause a FactLoopError
with self.assertRaises(FactLoopError):
personality = self.dev.facts['personality']
def test_factcache_return_unexpected_fact(self):
# Create a callback for the foo fact.
self.dev.facts._callbacks['foo'] = get_foo_bar_fact
# Now, trying to access the foo fact should cause a
# RunTimeError because the bar fact is also unexpectedly provided
with self.assertRaises(RuntimeError):
foo = self.dev.facts['foo']
@patch('jnpr.junos.factcache.warnings')
def test_factcache_nonmatching_old_and_new_fact(self, mock_warn):
# Set fact style to 'both'
self.dev._fact_style = 'both'
# Create a callback for the foo fact.
self.dev.facts._callbacks['foo'] = get_foo_fact
# Cache the new-style foo fact
self.dev.facts._cache['foo'] = 'foo'
# Set the old-style foo fact to a different value
self.dev._ofacts['foo'] = 'bar'
# Now, trying to access the foo fact should cause a
# RunTimeWarning because the values of the new and old-style facts
# do not match
foo = self.dev.facts['foo']
mock_warn.assert_has_calls([call.warn(
'New and old-style facts do not match for the foo fact.\n'
' New-style value: foo\n Old-style value: bar\n',
RuntimeWarning)])
def test_factcache_fail_to_return_expected_fact(self):
# Create a callback for the foo fact.
self.dev.facts._callbacks['foo'] = get_bar_fact
self.dev.facts._callbacks['bar'] = get_bar_fact
# Now, trying to access the foo fact should cause a
# RunTimeError because the foo fact is not provided
with self.assertRaises(RuntimeError):
foo = self.dev.facts['foo']
def test_factcache_delete_fact(self):
# Create a callback for the foo fact.
self.dev.facts._callbacks['foo'] = get_foo_fact
foo = self.dev.facts['foo']
# Now, trying to delete the foo fact should cause a
# RunTimeError
with self.assertRaises(RuntimeError):
self.dev.facts.pop('foo', None)
def test_factcache_set_fact(self):
# Create a callback for the foo fact.
self.dev.facts._callbacks['foo'] = get_foo_fact
foo = self.dev.facts['foo']
# Now, trying to set the foo fact should cause a
# RunTimeError
with self.assertRaises(RuntimeError):
self.dev.facts['foo'] = 'bar'
def test_factcache_iter_facts(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact,
'_hidden': get_foo_bar_fact}
# Now, get the length of the facts
self.assertEqual(len(list(self.dev.facts)), 2)
def test_factcache_len_facts(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact}
# Now, get the length of the facts
self.assertEqual(len(self.dev.facts), 2)
def test_factcache_string_repr(self):
# Override the callbacks to only support foo and bar facts.
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact}
# Set values for foo and bar facts
self.dev.facts._cache['foo'] = 'foo'
self.dev.facts._cache['bar'] = {'bar': 'bar'}
# Now, get the string (pretty) representation of the facts
self.assertEqual(str(self.dev.facts), "{'bar': {'bar': 'bar'}, "
"'foo': 'foo'}")
def test_factcache_repr_facts(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact}
# Now, get the length of the facts
self.assertEqual(str(self.dev.facts), "{'bar': 'bar', 'foo': 'foo'}")
def test_factcache_refresh_single_key(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact,
'_hidden': get_hidden_fact}
# Populate the cache
self.dev.facts._cache['foo'] = 'before'
self.dev.facts._cache['bar'] = 'before'
self.dev.facts._cache['_hidden'] = 'before'
# Confirm the cached values
self.assertEqual(self.dev.facts['foo'], 'before')
self.assertEqual(self.dev.facts['bar'], 'before')
self.assertEqual(self.dev.facts['_hidden'], 'before')
# Refresh just the foo fact
self.dev.facts._refresh(keys='foo')
# Confirm the values now
self.assertEqual(self.dev.facts['foo'], 'foo')
self.assertEqual(self.dev.facts['bar'], 'before')
self.assertEqual(self.dev.facts['_hidden'], 'before')
def test_factcache_refresh_two_keys(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact,
'_hidden': get_hidden_fact}
# Populate the cache
self.dev.facts._cache['foo'] = 'before'
self.dev.facts._cache['bar'] = 'before'
self.dev.facts._cache['_hidden'] = 'before'
# Confirm the cached values
self.assertEqual(self.dev.facts['foo'], 'before')
self.assertEqual(self.dev.facts['bar'], 'before')
self.assertEqual(self.dev.facts['_hidden'], 'before')
# Refresh the foo and _hidden facts
self.dev.facts._refresh(keys=('foo', '_hidden'))
# Confirm the values now
self.assertEqual(self.dev.facts['foo'], 'foo')
self.assertEqual(self.dev.facts['bar'], 'before')
self.assertEqual(self.dev.facts['_hidden'], True)
def test_factcache_refresh_unknown_fact(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'_hidden': get_hidden_fact}
# Populate the cache
self.dev.facts._cache['foo'] = 'before'
self.dev.facts._cache['_hidden'] = 'before'
# Confirm the cached values
self.assertEqual(self.dev.facts['foo'], 'before')
self.assertEqual(self.dev.facts['_hidden'], 'before')
# Refresh just the unknown bar fact which should raise a RuntimeError
with self.assertRaises(RuntimeError):
self.dev.facts._refresh(keys=('bar'))
def test_factcache_refresh_all_facts(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact,
'_hidden': get_hidden_fact}
# Populate the cache
self.dev.facts._cache['foo'] = 'before'
self.dev.facts._cache['bar'] = 'before'
self.dev.facts._cache['_hidden'] = 'before'
# Confirm the cached values
self.assertEqual(self.dev.facts['foo'], 'before')
self.assertEqual(self.dev.facts['bar'], 'before')
self.assertEqual(self.dev.facts['_hidden'], 'before')
# Refresh all facts
self.dev.facts._refresh()
# Confirm the values now
self.assertEqual(self.dev.facts['foo'], 'foo')
self.assertEqual(self.dev.facts['bar'], 'bar')
self.assertEqual(self.dev.facts['_hidden'], True)
@patch('jnpr.junos.device.warnings')
def test_factcache_refresh_exception_on_failure(self, mock_warn):
with self.assertRaises(ValueError):
# Refresh all facts with exception on failure
self.dev.facts._refresh(exception_on_failure=True)
@patch('jnpr.junos.device.warnings')
@patch('jnpr.junos.factcache.warnings')
def test_factcache_refresh_warnings_on_failure(self,
mock_warn,
mock_device_warn):
# Refresh all facts with warnings on failure
self.dev.facts._refresh(warnings_on_failure=True)
mock_warn.assert_has_calls([call.warn(
'Facts gathering is incomplete. To know the reason call '
'"dev.facts_refresh(exception_on_failure=True)"',
RuntimeWarning)])
# mock_warn.assert_called_once('Facts gathering is incomplete. '
# 'To know the reason call '
# '"dev.facts_refresh('
# 'exception_on_failure=True)"',
# RuntimeWarning)
def _mock_manager_setup(self, *args, **kwargs):
if kwargs:
device_params = kwargs['device_params']
device_handler = make_device_handler(device_params)
session = SSHSession(device_handler)
return Manager(session, device_handler)
def get_foo_fact(device):
return {'foo': 'foo'}
def get_foo_bar_fact(device):
return {'foo': 'foo',
'bar': 'bar', }
def get_bar_fact(device):
return {'bar': 'bar', }
def get_hidden_fact(device):
return {'_hidden': True, }
| apache-2.0 | 7,211,522,377,934,764,000 | 41.54251 | 78 | 0.576418 | false |
haphaeu/yoshimi | EulerProject/121.py | 1 | 2562 | from fractions import Fraction
from fractions import gcd
def nextPermLexic(perm):
# ###########################################################################
#The following algorithm generates the next permutation lexicographically
#after a given permutation. It changes the given permutation in-place.
#1- Find the largest index k such that a[k] < a[k + 1]. If no such index
# exists, the permutation is the last permutation.
#2- Find the largest index l such that a[k] < a[l]. Since k + 1 is such
# an index, l is well defined and satisfies k < l.
#3- Swap a[k] with a[l].
#4- Reverse the sequence from a[k + 1] up to and including the final
# element a[n].
#
# Written by R.Rossi, 26th/Oct/2011
#
# Reference:
# http://en.wikipedia.org/wiki/Permutation#Generation_in_lexicographic_order
# ###########################################################################
#will return the next permutation
#after 'perm' in lexicographic order
sz=len(perm)
#Step 1: find largest k st a[k]<a[k+1]
k= -666
for i in range(sz-2,-1,-1):
if perm[i] < perm[i+1]:
k=i
break
if k==-666:
#print "\nAchieved last permutation in lexicographic order"
return []
else:
#Step 2: find largest index l such that a[k] < a[l]
l=-666
if k==sz-2:
l=k+1
else:
for i in range(sz-1,k,-1):
if perm[k] < perm[i]:
l=i
break
if l==-666:
print "\nError! Oh my god, what to do?"
return []
else:
#step 3: Swap a[k] with a[l]
tmp=perm[0:k] + perm[l] + perm[k+1:l] + perm[k] + perm[l+1:]
#step 4: reverse a[k+1:]
tmp2=tmp[0:k+1] + tmp[-1:k:-1]
#done.
#save as perm
nextPerm=tmp2
return nextPerm
# ### MAIN ###
TURNS=15
MXRED=(TURNS-1)/2
winPlays=[]
#generate initial conditions
for i in range(MXRED+1):
nxt='b'*(TURNS-i)+'r'*i
while nxt:
winPlays.append(nxt)
nxt=nextPermLexic(nxt)
#sum the probabilities of all wins
ProbTot = Fraction(0,1)
prob = Fraction(1,1)
for play in winPlays:
for i,disk in enumerate(play):
if disk=='b':
prob *= Fraction(1,i+2)
else:
prob *= Fraction(i+1,i+2)
#print ProbTot, "+", prob,
ProbTot = ProbTot + prob
#print "=", ProbTot
prob = Fraction(1,1)
print "Probability of winning is", ProbTot
print "Required fund", ProbTot.denominator/ProbTot.numerator
| lgpl-3.0 | -6,066,579,934,885,888,000 | 30.243902 | 77 | 0.541374 | false |
ArteliaTelemac/PostTelemac | PostTelemac/meshlayertools/toshape/posttelemac_util_extractpts_caduc.py | 1 | 20503 | ##[01_Telemac]=group
# *************************************************************************
"""
Versions :
0.0 premier script
0.2 : un seul script pour modeleur ou non
"""
# *************************************************************************
##Type_de_traitement=selection En arriere plan;Modeler;Modeler avec creation de fichiers
##Fichier_resultat_telemac=file
##Temps_a_exploiter_fichier_max_0=number 0.0
##Pas_d_espace_0_si_tous_les_points=number 0.0
##fichier_point_avec_vecteur_vitesse=boolean False
##Parametre_vitesse_X=string UVmax
##Parametre_vitesse_Y=string VVmax
##systeme_de_projection=crs EPSG:2154
##forcage_attribut_fichier_de_sortie=string
##fichier_de_sortie_points=output vector
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from os import path
import numpy as np
from matplotlib.path import Path
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.tools.vector import VectorWriter
import matplotlib.pyplot as plt
from matplotlib import tri
from qgis.utils import *
from PyQt4.QtCore import SIGNAL, Qt
from PyQt4 import QtCore, QtGui
# from utils.files import getFileContent
# from parsers.parserSortie import getValueHistorySortie
# from parsers.parserSELAFIN import getValueHistorySLF, getValuePolylineSLF,subsetVariablesSLF
from parsers.parserSELAFIN import SELAFIN
# from parsers.parserStrings import parseArrayPaires
import threading
from time import ctime
import math
def isFileLocked(file, readLockCheck=False):
"""
Checks to see if a file is locked. Performs three checks
1. Checks if the file even exists
2. Attempts to open the file for reading. This will determine if the file has a write lock.
Write locks occur when the file is being edited or copied to, e.g. a file copy destination
3. If the readLockCheck parameter is True, attempts to rename the file. If this fails the
file is open by some other process for reading. The file can be read, but not written to
or deleted.
@param file:
@param readLockCheck:
"""
if not (os.path.exists(file)):
return False
try:
f = open(file, "r")
f.close()
except IOError:
return True
if readLockCheck:
lockFile = file + ".lckchk"
if os.path.exists(lockFile):
os.remove(lockFile)
try:
os.rename(file, lockFile)
time.sleep(1)
os.rename(lockFile, file)
except WindowsError:
return True
return False
# *************************************************************************
def workerFinished(str1):
progress.setText(str(ctime()) + " - Fin du thread - Chargement du fichier resultat")
vlayer = QgsVectorLayer(str1, os.path.basename(str1).split(".")[0], "ogr")
QgsMapLayerRegistry.instance().addMapLayer(vlayer)
class Worker_pts(QtCore.QObject):
def __init__(self, donnees_d_entree):
QtCore.QObject.__init__(self)
self.pathshp = donnees_d_entree["pathshp"]
self.mesh = donnees_d_entree["mesh"]
self.x = donnees_d_entree["x"]
self.y = donnees_d_entree["y"]
self.ztri = donnees_d_entree["ztri"]
self.vlayer = ""
self.pasespace = donnees_d_entree["pasdespace"]
self.vitesse = "0"
self.paramvalueX = donnees_d_entree["paramvalueX"]
self.paramvalueY = donnees_d_entree["paramvalueY"]
self.traitementarriereplan = donnees_d_entree["traitementarriereplan"]
fields = donnees_d_entree["champs"]
if self.paramvalueX != None:
fields.append(QgsField("UV", QVariant.Double))
fields.append(QgsField("VV", QVariant.Double))
fields.append(QgsField("norme", QVariant.Double))
fields.append(QgsField("angle", QVariant.Double))
self.vitesse = "1"
if self.traitementarriereplan == 0 or self.traitementarriereplan == 2:
self.writerw1 = QgsVectorFileWriter(
self.pathshp,
None,
donnees_d_entree["champs"],
QGis.WKBPoint,
QgsCoordinateReferenceSystem(str(donnees_d_entree["crs"])),
"ESRI Shapefile",
)
if self.traitementarriereplan == 1 or self.traitementarriereplan == 2:
self.writerw2 = VectorWriter(
donnees_d_entree["fichierdesortie_point"],
None,
donnees_d_entree["champs"],
QGis.WKBMultiPoint,
QgsCoordinateReferenceSystem(str(donnees_d_entree["crs"])),
)
def run(self):
strtxt = (
str(ctime())
+ " - Thread - repertoire : "
+ os.path.dirname(self.pathshp)
+ " - fichier : "
+ os.path.basename(self.pathshp)
)
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
fet = QgsFeature()
try:
if True:
if self.paramvalueX == None:
boolvitesse = False
else:
boolvitesse = True
# ------------------------------------- TRaitement de tous les points
if self.pasespace == 0:
noeudcount = len(self.x)
strtxt = str(ctime()) + " - Thread - Traitement des vitesses - " + str(noeudcount) + " noeuds"
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
for k in range(len(self.x)):
if k % 5000 == 0:
strtxt = str(ctime()) + " - Thread - noeud n " + str(k) + "/" + str(noeudcount)
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
if self.traitementarriereplan == 0:
self.progress.emit(int(100.0 * k / noeudcount))
else:
progress.setPercentage(int(100.0 * k / noeudcount))
fet.setGeometry(QgsGeometry.fromPoint(QgsPoint(float(self.x[k]), float(self.y[k]))))
tabattr = []
for l in range(len(self.ztri)):
tabattr.append(float(self.ztri[l][k]))
if boolvitesse:
norme = (
(float(self.ztri[self.paramvalueX][k])) ** 2.0
+ (float(self.ztri[self.paramvalueY][k])) ** 2.0
) ** (0.5)
atanUVVV = math.atan2(
float(self.ztri[self.paramvalueY][k]), float(self.ztri[self.paramvalueX][k])
)
angle = atanUVVV / math.pi * 180.0
if angle < 0:
angle = angle + 360
# angle YML
# angle = atanUVVV*180.0/math.pi+min(atanUVVV,0)/atanUVVV*360.0
tabattr.append(float(self.ztri[self.paramvalueX][k]))
tabattr.append(float(self.ztri[self.paramvalueY][k]))
tabattr.append(norme)
tabattr.append(angle)
fet.setAttributes(tabattr)
if self.traitementarriereplan == 0 or self.traitementarriereplan == 2:
self.writerw1.addFeature(fet)
if self.traitementarriereplan == 1 or self.traitementarriereplan == 2:
self.writerw2.addFeature(fet)
# ------------------------------------- Traitement du pas d'espace des points
else:
triangul = tri.Triangulation(self.x, self.y, self.mesh)
lineartri = []
for i in range(len(self.ztri)):
lineartri.append(tri.LinearTriInterpolator(triangul, self.ztri[i]))
xmin = np.min(self.x)
xmax = np.max(self.x)
ymin = np.min(self.y)
ymax = np.max(self.y)
pasx = int((xmax - xmin) / self.pasespace)
pasy = int((ymax - ymin) / self.pasespace)
strtxt = (
str(ctime())
+ " - Thread - Traitement des vitesses - pas d espace : "
+ str(self.pasespace)
+ "m - nombre de points : "
+ str(pasx)
+ "*"
+ str(pasy)
+ "="
+ str(pasx * pasy)
)
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
compt = 0
for x2 in range(pasx):
xtemp = float(xmin + x2 * self.pasespace)
for y2 in range(pasy):
compt = compt + 1
if (compt) % 5000 == 0:
strtxt = str(ctime()) + " - Thread - noeud n " + str(compt) + "/" + str(pasx * pasy)
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
if self.traitementarriereplan == 0:
self.progress.emit(int(100.0 * compt / (pasy * pasx)))
else:
progress.setPercentage(int(100.0 * compt / (pasy * pasx)))
ytemp = float(ymin + y2 * self.pasespace)
fet.setGeometry(QgsGeometry.fromPoint(QgsPoint(xtemp, ytemp)))
tabattr1 = []
if str(float(lineartri[0].__call__(xtemp, ytemp))) == "nan":
continue
for j in range(len(lineartri)):
tabattr1.append(float(lineartri[j].__call__(xtemp, ytemp)))
if boolvitesse:
VX = float(lineartri[self.paramvalueX].__call__(xtemp, ytemp))
VY = float(lineartri[self.paramvalueY].__call__(xtemp, ytemp))
norme = ((VX) ** 2.0 + (VY) ** 2.0) ** (0.5)
angle = math.atan2(VY, VX) / math.pi * 180.0
if angle < 0:
angle = angle + 360
tabattr1.append(VX)
tabattr1.append(VY)
tabattr1.append(norme)
tabattr1.append(angle)
fet.setAttributes(tabattr1)
if self.traitementarriereplan == 0 or self.traitementarriereplan == 2:
self.writerw1.addFeature(fet)
if self.traitementarriereplan == 1 or self.traitementarriereplan == 2:
self.writerw2.addFeature(fet)
# del self.writerw
except Exception, e:
strtxt = str(ctime()) + " ************ PROBLEME CALCUL DES VITESSES : " + str(e)
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
if self.traitementarriereplan == 0:
self.progress.emit(int(100.0))
else:
progress.setPercentage(int(100.0))
if self.traitementarriereplan == 0 or self.traitementarriereplan == 2:
del self.writerw1
if self.traitementarriereplan == 1 or self.traitementarriereplan == 2:
del self.writerw2
strtxt = str(ctime()) + " - Thread - fichier " + self.pathshp + " cree"
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
# self.status.emit("Fichier " + self.nomrept+ '\ '.strip()+ self.nomfilet + " cree")
if self.traitementarriereplan == 0:
self.finished.emit(self.pathshp)
if self.traitementarriereplan == 2:
t = workerFinished(self.pathshp)
progress = QtCore.pyqtSignal(int)
status = QtCore.pyqtSignal(str)
error = QtCore.pyqtSignal(str)
killed = QtCore.pyqtSignal()
finished = QtCore.pyqtSignal(str)
# ****************************************************************************
# *************** Classe de lancement du thread ***********************************
# ****************************************************************************
class traitementSelafin:
def __init__(self, donnees_d_entree):
self.donnees_d_entree = donnees_d_entree
self.thread = QtCore.QThread()
if donnees_d_entree["forcage_attribut_fichier_de_sortie"] == "":
if self.donnees_d_entree["pasdespace"] == 0:
self.donnees_d_entree["pathshp"] = os.path.join(
os.path.dirname(self.donnees_d_entree["pathselafin"]),
os.path.basename(self.donnees_d_entree["pathselafin"]).split(".")[0]
+ "_points_t_"
+ str(int(self.donnees_d_entree["temps"]))
+ str(".shp"),
)
else:
self.donnees_d_entree["pathshp"] = os.path.join(
os.path.dirname(self.donnees_d_entree["pathselafin"]),
os.path.basename(self.donnees_d_entree["pathselafin"]).split(".")[0]
+ "_points_"
+ str(int(self.donnees_d_entree["pasdespace"]))
+ "m_t_"
+ str(int(self.donnees_d_entree["temps"]))
+ str(".shp"),
)
else:
self.donnees_d_entree["pathshp"] = os.path.join(
os.path.dirname(self.donnees_d_entree["pathselafin"]),
os.path.basename(self.donnees_d_entree["pathselafin"]).split(".")[0]
+ "_"
+ str(self.donnees_d_entree["forcage_attribut_fichier_de_sortie"])
+ str(".shp"),
)
if self.donnees_d_entree["fichier_point_avec_vecteur_vitesse"]:
self.donnees_d_entree["Parametre_vitesse_X"] = donnees_d_entree["Parametre_vitesse_X"]
self.donnees_d_entree["Parametre_vitesse_Y"] = donnees_d_entree["Parametre_vitesse_Y"]
else:
self.donnees_d_entree["Parametre_vitesse_X"] = None
self.donnees_d_entree["Parametre_vitesse_Y"] = None
self.worker = ""
def main1(self):
progress.setPercentage(0)
progress.setText(str(ctime()) + " - Initialisation - Debut du script")
# Chargement du fichier .res****************************************
slf = SELAFIN(self.donnees_d_entree["pathselafin"])
# Recherche du temps a traiter ***********************************************
test = False
for i, time in enumerate(slf.tags["times"]):
progress.setText(
str(ctime()) + " - Initialisation - Temps present dans le fichier : " + str(np.float64(time))
)
# print str(i) +" "+ str(time) + str(type(time))
if float(time) == float(self.donnees_d_entree["temps"]):
test = True
values = slf.getVALUES(i)
if test:
progress.setText(
str(ctime()) + " - Initialisation - Temps traite : " + str(np.float64(self.donnees_d_entree["temps"]))
)
else:
raise GeoAlgorithmExecutionException(
str(ctime())
+ " - Initialisation - Erreur : \
Temps non trouve"
)
# Recherche de la variable a traiter ****************************************
test = [False, False]
tabparam = []
donnees_d_entree["champs"] = QgsFields()
for i, name in enumerate(slf.VARNAMES):
progress.setText(str(ctime()) + " - Initialisation - Variable dans le fichier res : " + name.strip())
tabparam.append([i, name.strip()])
donnees_d_entree["champs"].append(QgsField(str(name.strip()).translate(None, "?,!.;"), QVariant.Double))
if self.donnees_d_entree["Parametre_vitesse_X"] != None:
if str(name).strip() == self.donnees_d_entree["Parametre_vitesse_X"].strip():
test[0] = True
self.donnees_d_entree["paramvalueX"] = i
if str(name).strip() == self.donnees_d_entree["Parametre_vitesse_Y"].strip():
test[1] = True
self.donnees_d_entree["paramvalueY"] = i
else:
self.donnees_d_entree["paramvalueX"] = None
self.donnees_d_entree["paramvalueY"] = None
if self.donnees_d_entree["Parametre_vitesse_X"] != None:
if test == [True, True]:
progress.setText(
str(ctime())
+ " - Initialisation - Parametre trouvee : "
+ str(tabparam[self.donnees_d_entree["paramvalueX"]][1]).strip()
+ " "
+ str(tabparam[self.donnees_d_entree["paramvalueY"]][1]).strip()
)
else:
raise GeoAlgorithmExecutionException(
str(ctime())
+ " - Initialisation - Erreur : \
Parametre vitesse non trouve"
)
# Chargement de la topologie du .res ********************************************
self.donnees_d_entree["mesh"] = np.array(slf.IKLE3)
self.donnees_d_entree["x"] = slf.MESHX
self.donnees_d_entree["y"] = slf.MESHY
# Verifie que le shp n existe pas
if isFileLocked(self.donnees_d_entree["pathshp"], True):
raise GeoAlgorithmExecutionException(
str(ctime())
+ " - Initialisation - Erreur :\
Fichier shape deja charge !!"
)
# Chargement des donnees ***********************************
self.donnees_d_entree["ztri"] = []
for i in range(len(tabparam)):
self.donnees_d_entree["ztri"].append(values[i])
# Lancement du thread **************************************************************************************
self.worker = Worker(donnees_d_entree)
if donnees_d_entree["traitementarriereplan"] == 0:
self.worker.moveToThread(self.thread)
self.thread.started.connect(self.worker.run)
self.worker.progress.connect(progress.setPercentage)
self.worker.status.connect(progress.setText)
self.worker.finished.connect(workerFinished)
self.worker.finished.connect(self.worker.deleteLater)
self.thread.finished.connect(self.thread.deleteLater)
self.worker.finished.connect(self.thread.quit)
champ = QgsFields()
writercontour = VectorWriter(
self.donnees_d_entree["fichierdesortie_point"],
None,
champ,
QGis.WKBMultiPoint,
QgsCoordinateReferenceSystem(str(self.donnees_d_entree["crs"])),
)
self.thread.start()
else:
self.worker.run()
# *************************************************************************
# ************** Initialisation des variables ****************************************
# *************************************************************************
| gpl-3.0 | -5,462,225,697,454,298,000 | 42.809829 | 118 | 0.485441 | false |
NinjaMSP/crossbar | crossbar/twisted/processutil.py | 1 | 4681 | #####################################################################################
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Unless a separate license agreement exists between you and Crossbar.io GmbH (e.g.
# you have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import
from twisted.internet.endpoints import _WrapIProtocol, ProcessEndpoint
from twisted.internet.address import _ProcessAddress
from twisted.internet import defer
from twisted.python.runtime import platform
__all__ = ('WorkerProcessEndpoint',)
if platform.isWindows():
# On Windows, we're only using FDs 0, 1, and 2.
class _WorkerWrapIProtocol(_WrapIProtocol):
"""
Wraps an IProtocol into an IProcessProtocol which forwards data
received on Worker._log_fds to WorkerProcess.log().
"""
def childDataReceived(self, childFD, data):
"""
Some data has come in from the process child. If it's one of our
log FDs, log it. Otherwise, let _WrapIProtocol deal with it.
"""
# track bytes received per child FD
self._worker.track_stats(childFD, len(data))
if childFD in self._worker._log_fds:
self._worker.log(childFD, data)
else:
_WrapIProtocol.childDataReceived(self, childFD, data)
else:
# On UNIX-likes, we're logging FD1/2, and using FD3 for our own
# communication.
class _WorkerWrapIProtocol(_WrapIProtocol):
"""
Wraps an IProtocol into an IProcessProtocol which forwards data
received on Worker._log_fds to WorkerProcess.log().
"""
def childDataReceived(self, childFD, data):
"""
Some data has come in from the process child. If it's one of our
log FDs, log it. If it's on FD3, send it to the WAMP connection.
Otherwise, let _WrapIProtocol deal with it.
"""
# track bytes received per child FD
self._worker.track_stats(childFD, len(data))
if childFD in self._worker._log_fds:
self._worker.log(childFD, data)
elif childFD == 3:
self.protocol.dataReceived(data)
else:
_WrapIProtocol.childDataReceived(self, childFD, data)
class WorkerProcessEndpoint(ProcessEndpoint):
"""
A custom process endpoint for workers.
:see: http://twistedmatrix.com/documents/current/api/twisted.internet.endpoints.ProcessEndpoint.html
"""
def __init__(self, *args, **kwargs):
"""
Ctor.
:param worker: The worker this endpoint is being used for.
:type worker: instance of WorkerProcess
"""
self._worker = kwargs.pop('worker')
ProcessEndpoint.__init__(self, *args, **kwargs)
def connect(self, protocolFactory):
"""
See base class.
"""
proto = protocolFactory.buildProtocol(_ProcessAddress())
try:
wrapped = _WorkerWrapIProtocol(proto, self._executable, self._errFlag)
wrapped._worker = self._worker
self._spawnProcess(wrapped,
self._executable, self._args, self._env,
self._path, self._uid, self._gid, self._usePTY,
self._childFDs)
except:
return defer.fail()
else:
return defer.succeed(proto)
| agpl-3.0 | 1,370,651,899,697,463,800 | 37.68595 | 104 | 0.620594 | false |
MatthewWilkes/mw4068-packaging | src/melange/src/soc/views/helper/params.py | 1 | 16606 | #!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Params related methods.
"""
__authors__ = [
'"Madhusudan.C.S" <[email protected]>',
'"Mario Ferraro" <[email protected]>',
'"Sverre Rabbelier" <[email protected]>',
]
import copy
from django import forms
from django.utils.translation import ugettext
from soc.logic import cleaning
from soc.logic import dicts
from soc.models import linkable
from soc.views import helper
from soc.views.helper import access
from soc.views.helper import dynaform
from soc.views.helper import redirects
from soc.views.helper import widgets
DEF_LIST_DESCRIPTION_FMT = ugettext(
'List of %(name_plural)s.')
DEF_CREATE_INSTRUCTION_MSG_FMT = ugettext(
'Please use this form to select a %(name).')
DEF_SUBMIT_MSG_PARAM_NAME = 's'
DEF_SUBMIT_MSG_PROFILE_SAVED = 0
DEF_SUBMIT_MSG_CANNOT_DELETE_ENTITY = 1
# list with all js scripts used for documentary purposes
DEF_JS_USES_LIST = [
'jq',
'jq_ajaqQueue',
'jq_autocomplete',
'jq_bgiframe',
'jq_grid',
'jq_purr',
'jq_spin',
'jq_datetimepicker',
'jq_progressbar',
'jq_thickbox',
'jq_ui_core',
'jlinq',
'json',
'menu',
'melange',
'melangelist',
'melangetooltip',
'melangeautocomplete',
'tinymce',
]
DEF_FIELD_INIT_PARAMS = ['required', 'widget', 'label', 'initial', 'help_text',
'error_messages', 'show_hidden_initial']
def constructParams(params):
"""Constructs a new params dictionary based on params.
Params usage:
The params dictionary is passed to getCreateForm and getEditForm,
see their docstring on how they use it.
rights: The rights value is merged with a default rights
dictionary and then used as rights value.
url_name: The url_name value is used in constructing several
redirects as the first part of the url.
module_name: The module_name value is used in constructing the
location of several templates. It is expected that it matches
the part after "/templates/soc/" for this View.
name_plural: The name_plural argument is provided to the
LIST_DESCRIPTION when constructing the list_description field.
extra_dynainclude: The extra_dynainclude value is used when
constructing the create_dynainclude value.
extra_dynaexclude: The extra_dynaexclude value is used when
constructing the create_dynaexclude value.
logic: The logic value is used as argument to save the scope_logic
and create a create form.
"""
logic = params['logic']
if params.get('rights'):
rights = params['rights']
else:
rights = access.Checker(params)
rights['unspecified'] = ['deny']
rights['allow'] = ['allow']
rights['any_access'] = ['checkIsLoggedIn']
rights['show'] = ['checkIsUser']
rights['create'] = ['checkIsDeveloper']
rights['edit'] = ['checkIsDeveloper']
rights['delete'] = ['checkIsDeveloper']
rights['list'] = ['checkIsDeveloper']
rights['pick'] = ['checkIsUser'] # TODO(SRabbelier): proper check
new_params = {}
new_params['scope_logic'] = logic.getScopeLogic()
if 'name_short' not in params:
params['name_short'] = params['name']
if 'name_plural' not in params:
params['name_plural'] = params['name'] + 's'
if 'module_name' not in params:
params['module_name'] = params['name_short'].replace(' ', '_').lower()
if 'url_name' not in params:
params['url_name'] = params['module_name']
if 'document_prefix' not in params:
params['document_prefix'] = params['url_name']
# Do not expand edit_redirect to allow it to be overwritten without suffix
new_params['edit_redirect'] = '/%(url_name)s/edit/%(suffix)s'
new_params['missing_redirect'] = '/%(url_name)s/create' % params
new_params['delete_redirect'] = '/%(url_name)s/list' % params
new_params['invite_redirect'] = '/request/list'
# new_params['cancel_redirect'] = '/%(url_name)s/list' % params
new_params['public_redirect'] = None
new_params['sidebar'] = None
new_params['sidebar_grouping'] = 'main'
new_params['sidebar_defaults'] = []
new_params['sidebar_developer'] = [
# TODO(SRabbelier): remove create once new list code is in
('/%s/create', 'New %(name)s', 'create'),
('/%s/list', 'List %(name_plural)s', 'list'),
]
new_params['sidebar_additional'] = []
names_sans_link_id = [i for i in logic.getKeyFieldNames() if i != 'link_id']
sans_link_id_pattern = getPattern(names_sans_link_id,
linkable.SCOPE_PATH_ARG_PATTERN)
new_params['link_id_arg_pattern'] = linkable.LINK_ID_ARG_PATTERN
new_params['link_id_pattern_core'] = linkable.LINK_ID_PATTERN_CORE
new_params['scope_path_pattern'] = getScopePattern(params)
new_params['sans_link_id_pattern'] = sans_link_id_pattern
new_params['django_patterns'] = None
new_params['extra_django_patterns'] = []
new_params['django_patterns_defaults'] = []
# Defines the module package that the view is in. If it is not
# already defined in the respective view, it defaults to
# soc.views.models
if not params.get('module_package'):
new_params['module_package'] = 'soc.views.models'
if not params.get('no_edit'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>edit)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.edit', 'Edit %(name_short)s')]
if not params.get('no_delete'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>delete)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.delete', 'Delete %(name_short)s')]
if not params.get('no_show'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>show)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.public', 'Show %(name_short)s')]
if not params.get('no_admin'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>admin)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.admin',
'Show %(name_short)s (admin)')]
if not params.get('no_create_raw'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>create)$',
'%(module_package)s.%(module_name)s.create', 'Create %(name_short)s')]
if not params.get('no_create_with_scope'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>create)/%(scope)s$',
'%(module_package)s.%(module_name)s.create', 'Create %(name_short)s')]
if not params.get('no_create_with_key_fields'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>create)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.create', 'Create %(name_short)s')]
if not params.get('no_list_raw'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>list)$',
'%(module_package)s.%(module_name)s.list', 'List %(name_plural)s')]
if params.get('pickable'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>pick)$',
'%(module_package)s.%(module_name)s.pick', 'Pick %(name_short)s')]
if params.get('export_content_type'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>export)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.export', 'Export %(name_short)s')]
if params.get('sans_link_id_create'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>create)/%(sans_link_id)s$',
'%(module_package)s.%(module_name)s.create', 'Create %(name_short)s')]
if params.get('sans_link_id_list'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>list)/%(sans_link_id)s$',
'%(module_package)s.%(module_name)s.list', 'List %(name_plural)s')]
if params.get('sans_link_id_public_list'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>list_public)/%(sans_link_id)s$',
'%(module_package)s.%(module_name)s.list_public',
'List %(name_plural)s')]
new_params['public_template'] = 'soc/%(module_name)s/public.html' % params
new_params['export_template'] = 'soc/export.html'
new_params['create_template'] = 'soc/models/edit.html'
new_params['edit_template'] = 'soc/models/edit.html'
new_params['admin_template'] = 'soc/models/admin.html'
new_params['list_template'] = 'soc/models/list.html'
new_params['invite_template'] = 'soc/models/invite.html'
new_params['context'] = None
new_params['cache_pick'] = False
new_params['export_content_type'] = None
new_params['export_extension'] = '.txt'
new_params['csv_fieldnames'] = []
# TODO: Use only the js modules needed instead of js_uses_all
new_params['js_uses_all'] = DEF_JS_USES_LIST
new_params['js_uses_list'] = ['jq', 'menu']
new_params['js_uses_show'] = ['jq', 'menu']
new_params['js_uses_edit'] = ['jq', 'menu', 'tinymce', 'jq_purr',
'jq_spin', 'jq_autocomplete']
new_params['error_public'] = 'soc/%(module_name)s/error.html' % params
new_params['error_export'] = new_params['error_public']
new_params['error_edit'] = new_params['error_public']
new_params['public_row_action'] = {
"type": "redirect_custom",
"parameters": dict(new_window=False),
}
new_params['public_row_extra'] = lambda entity, *args: {
"link": redirects.getEditRedirect(entity, params),
}
# TODO(ljvderijk): refactor this out of there
new_params['list_params'] = {
'list_description': 'description',
}
new_params['list_description'] = DEF_LIST_DESCRIPTION_FMT % params
new_params['save_message'] = [ugettext('%(name)s saved.' % params),
ugettext('Cannot delete %(name)s.' % params)]
new_params['submit_msg_param_name'] = DEF_SUBMIT_MSG_PARAM_NAME
new_params['edit_params'] = {
DEF_SUBMIT_MSG_PARAM_NAME: DEF_SUBMIT_MSG_PROFILE_SAVED,
}
new_params['cannot_delete_params'] = {
DEF_SUBMIT_MSG_PARAM_NAME: DEF_SUBMIT_MSG_CANNOT_DELETE_ENTITY,
}
new_params['dynabase'] = helper.forms.BaseForm
create_dynaproperties = {
'clean_link_id': cleaning.clean_link_id('link_id'),
'clean_feed_url': cleaning.clean_feed_url,
}
create_dynaproperties.update(params.get('create_extra_dynaproperties', {}))
# dynafields override any dynaproperties
create_dynafields = getDynaFields(params.get('create_dynafields', {}))
create_dynaproperties = dicts.merge(create_dynafields, create_dynaproperties)
new_params['references'] = []
new_params['create_dynainclude'] = [] + params.get('extra_dynainclude', [])
new_params['create_dynaexclude'] = ['scope', 'scope_path'] + \
params.get('extra_dynaexclude', [])
new_params['create_dynaproperties'] = create_dynaproperties
edit_dynaproperties = {
'clean_link_id': cleaning.clean_link_id('link_id'),
'link_id': forms.CharField(widget=helper.widgets.ReadOnlyInput()),
}
edit_dynaproperties.update(params.get('edit_extra_dynaproperties', {}))
# dynafields override any dynaproperties
edit_dynafields = getDynaFields(params.get('edit_dynafields', {}))
edit_dynaproperties = dicts.merge(edit_dynafields, edit_dynaproperties)
new_params['edit_dynainclude'] = None
new_params['edit_dynaexclude'] = None
new_params['edit_dynaproperties'] = edit_dynaproperties
params = dicts.merge(params, new_params)
# These need to be constructed separately, because they require
# parameters that can be defined either in params, or new_params.
if not 'create_form' in params:
params['create_form'] = getCreateForm(params, logic.getModel())
if not 'edit_form' in params:
params['edit_form'] = getEditForm(params, params['create_form'])
if not 'admin_form' in params:
params['admin_form'] = getAdminForm(params['edit_form'])
if not 'key_fields_pattern' in params:
params['key_fields_pattern'] = getKeyFieldsPattern(params)
# merge already done by access.Checker
params['rights'] = rights
return params
def getDynaFields(fields):
"""Constructs a new DynaField using params.
Args:
params: the params dictionary used to extract the dyanfields
param_name: the name of the parameter to use
"""
dynafields = {}
# generate the dynafields
for field in fields:
base = field.pop('base')
name = field.pop('name')
passthrough = field.pop('passthrough', DEF_FIELD_INIT_PARAMS)
dynafield = dynaform.newDynaField(field, base, passthrough)
dynafields[name] = dynafield()
return dynafields
def getCreateForm(params, model):
"""Constructs a new CreateForm using params.
Params usage:
dynabase: The dynabase value is used as the base argument to
dynaform.newDynaForm.
logic: The logic value is used to get the model argument to newDynaForm.
create_dynainclude: same as dynabase, but as dynainclude argument
create_dynaexclude: same as dynabase, but as dynaexclude argument
create_dynaproperties: same as dynabase, but as dynaproperties argument
"""
create_form = dynaform.newDynaForm(
dynabase = params['dynabase'],
dynamodel = model,
dynainclude = params['create_dynainclude'],
dynaexclude = params['create_dynaexclude'],
dynaproperties = params['create_dynaproperties'],
)
if 'extra_key_order' in params:
for field in params['extra_key_order']:
if field in create_form.base_fields.keyOrder:
create_form.base_fields.keyOrder.remove(field)
create_form.base_fields.keyOrder.extend(params['extra_key_order'])
return create_form
def getEditForm(params, base_form):
"""Constructs a new EditForm using params.
Params usage:
create_form: The dynabase value is used as the dynaform argument
to dyanform.extendDynaForm.
edit_dynainclude: same as create_form, but as dynainclude argument
edit_dynaexclude: same as create_form, but as dynaexclude argument
edit_dynaproperties: same as create_form, but as dynaproperties argument
"""
edit_form = dynaform.extendDynaForm(
dynaform = base_form,
dynainclude = params['edit_dynainclude'],
dynaexclude = params['edit_dynaexclude'],
dynaproperties = params['edit_dynaproperties'],
)
return edit_form
def getAdminForm(base_form):
"""Constructs a new AdminForm from base_form.
"""
# extend _and_ deepcopy the base_fields to do a proper copy
admin_form = dynaform.extendDynaForm(dynaform = base_form)
admin_form.base_fields = copy.deepcopy(admin_form.base_fields)
# replace all widgets with PTW's
for _, value in admin_form.base_fields.iteritems():
if not isinstance(value, forms.fields.Field):
continue
value.widget = widgets.PlainTextWidget()
return admin_form
def getKeyFieldsPattern(params):
"""Returns the Django pattern for this View's entity.
"""
logic = params['logic']
if logic.isIdBased():
return r"(?P<id>[0-9]*)"
names = logic.getKeyFieldNames()
scope_path_pattern = params['scope_path_pattern']
return getPattern(names, scope_path_pattern)
def getPattern(names, scope_path_pattern):
"""Returns the Django patterns for the specified names.
Args:
names: the field names that should be included in the pattern
scope_path_pattern: the pattern to use if the name is 'scope_path'
"""
patterns = []
for name in names:
if name == 'scope_path':
pattern = scope_path_pattern
else:
pattern = r'(?P<%s>%s)' % (name, linkable.LINK_ID_PATTERN_CORE)
patterns.append(pattern)
result = '/'.join(patterns)
return result
def getScopePattern(params):
"""Returns the Scope pattern for this entity.
"""
logic = params['logic']
depth = logic.getScopeDepth()
if depth is None:
return linkable.SCOPE_PATH_ARG_PATTERN
regexps = [linkable.LINK_ID_PATTERN_CORE for _ in range(depth)]
regexp = '/'.join(regexps)
return r'(?P<scope_path>%s)' % regexp
| apache-2.0 | 4,683,945,472,841,034,000 | 33.239175 | 80 | 0.665422 | false |
HIPS/optofit | cosyne/make_figure_1.py | 1 | 17433 |
import os
import copy
import cPickle
import itertools
import numpy as np
seed = np.random.randint(2**16)
# seed = 2958
seed = 58187
#seed = 60017
print "Seed: ", seed
import matplotlib.pyplot as plt
from matplotlib.patches import Path, PathPatch
from hips.inference.particle_mcmc import *
from optofit.cneuron.compartment import SquidCompartment
from optofit.cinference.pmcmc import *
from hips.plotting.layout import *
import brewer2mpl
colors = brewer2mpl.get_map('Set1', 'Qualitative', 9).mpl_colors
logistic = lambda x: 1.0/(1+np.exp(-x))
logit = lambda p: np.log(p/(1-p))
# Set the random seed for reproducibility
np.random.seed(seed)
# Make a simple compartment
hypers = {
'C' : 1.0,
'V0' : -60.0,
'g_leak' : 0.03,
'E_leak' : -65.0}
gp1_hypers = {'D': 2,
'sig' : 1,
'g_gp' : 12.0,
'E_gp' : 50.0,
'alpha_0': 1.0,
'beta_0' : 2.0,
'sigma_kernel': 1.0}
gp2_hypers = {'D' : 1,
'sig' : 1,
'g_gp' : 3.60,
# 'g_gp' : 0,
'E_gp' : -77.0,
'alpha_0': 1.0,
'beta_0' : 2.0,
'sigma_kernel': 1.0}
squid_hypers = {
'C' : 1.0,
'V0' : -60.0,
'g_leak' : 0.03,
'E_leak' : -65.0,
'g_na' : 12.0,
# 'g_na' : 0.0,
'E_na' : 50.0,
'g_kdr' : 3.60,
'E_kdr' : -77.0
}
def sample_squid_model():
squid_body = SquidCompartment(name='body', hypers=squid_hypers)
# Initialize the model
D, I = squid_body.initialize_offsets()
# Set the recording duration
t_start = 0
t_stop = 300.
dt = 0.1
t = np.arange(t_start, t_stop, dt)
T = len(t)
# Make input with an injected current from 500-600ms
inpt = np.zeros((T, I))
inpt[20/dt:40/dt,:] = 3.
inpt[120/dt:160/dt,:] = 5.
inpt[220/dt:280/dt,:] = 7.
inpt += np.random.randn(T, I)
# Set the initial distribution to be Gaussian around the steady state
z0 = np.zeros(D)
squid_body.steady_state(z0)
init = GaussianInitialDistribution(z0, 0.1**2 * np.eye(D))
# Set the proposal distribution using Hodgkin Huxley dynamics
# TODO: Fix the hack which requires us to know the number of particles
N = 100
sigmas = 0.0001*np.ones(D)
# Set the voltage transition dynamics to be a bit noisier
sigmas[squid_body.x_offset] = 0.25
prop = HodgkinHuxleyProposal(T, N, D, squid_body, sigmas, t, inpt)
# Set the observation model to observe only the voltage
etas = np.ones(1)
observed_dims = np.array([squid_body.x_offset]).astype(np.int32)
lkhd = PartialGaussianLikelihood(observed_dims, etas)
# Initialize the latent state matrix to sample N=1 particle
z = np.zeros((T,N,D))
z[0,0,:] = init.sample()
# Initialize the output matrix
x = np.zeros((T,D))
# Sample the latent state sequence
for i in np.arange(0,T-1):
# The interface kinda sucks. We have to tell it that
# the first particle is always its ancestor
prop.sample_next(z, i, np.zeros((N,), dtype=np.int32))
# Sample observations
for i in np.arange(0,T):
lkhd.sample(z,x,i,0)
# Extract the first (and in this case only) particle
z = z[:,0,:].copy(order='C')
# Downsample
t_ds = 0.1
intvl = int(t_ds / dt)
td = t[::intvl].copy('C')
zd = z[::intvl, :].copy('C')
xd = x[::intvl, :].copy('C')
inptd = inpt[::intvl].copy('C')
return td, zd, xd, inptd
def sausage_plot(ax, t, z_mean, z_std, lw=1, alpha=0.5, color='r'):
"""
Make a sausage plot
:param ax:
:param t:
:param z_mean:
:param z_std:
:return:
"""
T = len(t)
z_env = np.zeros((T*2,2))
z_env[:,0] = np.concatenate((t, t[::-1]))
z_env[:,1] = np.concatenate((z_mean + z_std, z_mean[::-1] - z_std[::-1]))
ax.add_patch(PathPatch(Path(z_env),
facecolor=color,
alpha=alpha,
edgecolor='none',
linewidth=0))
ax.plot(t, z_mean, color=color, lw=lw)
def make_figure_1(t, inpt, z_true, z_smpls, gpna_smpls, gpk_smpls):
"""
Make figure 1.
:param t:
:param z_true:
:param z_smpls:
:param gpna_smpls:
:param gpk_smpls:
:return:
"""
# Parse out the true latent states
V_true = z_true[:,0]
m_true = z_true[:,1]
h_true = z_true[:,2]
n_true = z_true[:,3]
na_true = m_true**3 * h_true
k_true = n_true**4
# Extract the inferred states
offset = 6
z_mean = z_smpls[offset:,...].mean(0)
z_std = z_smpls[offset:,...].std(0)
V_inf_mean = z_smpls[offset:,:,0].mean(0)
V_inf_std = z_smpls[offset:,:,0].std(0)
na_inf_mean = logistic(z_smpls[offset:,:,1]).mean(0)
na_inf_std = logistic(z_smpls[offset:,:,1]).std(0)
k_inf_mean = logistic(z_smpls[offset:,:,3]).mean(0)
k_inf_std = logistic(z_smpls[offset:,:,3]).std(0)
# Make the figure
fig = create_figure((6.5,3))
# Plot the true and inferred voltage
V_ax = create_axis_at_location(fig, 0.75, 2.375, 5.25, 0.5,
transparent=True, box=False)
V_ax.plot(t, V_true, 'k', lw=2)
sausage_plot(V_ax, t, V_inf_mean, V_inf_std, color=colors[0])
V_ax.set_ylabel('$V \mathrm{ [mV]}$')
# Plot the true and inferred sodium channel state
na_ax = create_axis_at_location(fig, 0.75, 1.625, 5.25, 0.5,
transparent=True, box=False)
na_ax.plot(t, na_true, 'k', lw=2)
sausage_plot(na_ax, t, na_inf_mean, na_inf_std, color=colors[0])
na_ax.set_ylabel('$\sigma(z_{Na})$')
na_ax.set_ylim([0,0.3])
# Plot the true and inferred sodium channel state
k_ax = create_axis_at_location(fig, 0.75, .875, 5.25, 0.5,
transparent=True, box=False)
k_ax.plot(t, k_true, 'k', lw=2)
sausage_plot(k_ax, t, k_inf_mean, k_inf_std, color=colors[0])
k_ax.set_ylabel('$\sigma(z_{K})$')
k_ax.set_ylim([0,1])
# Plot the driving current
I_ax = create_axis_at_location(fig, 0.75, 0.375, 5.25, 0.25,
transparent=True, box=False)
I_ax.plot(t, inpt, 'k', lw=2)
I_ax.set_ylabel('$I \mathrm{}$')
I_ax.set_yticks([0,4,8])
I_ax.set_ylim([-2,10])
I_ax.set_xlabel('$\mathrm{time [ms]}$')
plt.savefig(os.path.join('cosyne', 'figure1.pdf'))
plt.ioff()
plt.show()
def make_figure_2(gpk_smpls):
grid = 100
z_min = logit(0.001)
z_max = logit(0.999)
V_min = -80.
V_max = 50.
Z = np.array(list(
itertools.product(*([np.linspace(z_min, z_max, grid) for _ in range(1)]
+ [np.linspace(V_min, V_max, grid)]))))
h_smpls = []
for gps in gpk_smpls:
m_pred, _, _, _ = gps[0].predict(Z)
h_smpls.append(m_pred)
h_mean = np.array(h_smpls).mean(0)
h_mean = h_mean.reshape((grid, grid))
fig = create_figure((2,2))
ax = create_axis_at_location(fig, .5, .5, 1, 1, box=True, transparent=True)
print "h_lim: ", np.amin(h_mean), " ", np.amax(h_mean)
im = ax.imshow(h_mean, extent=(V_min, V_max, z_max, z_min), cmap='RdGy',
vmin=-3, vmax=3)
ax.set_aspect((V_max-V_min)/(z_max-z_min))
ax.set_ylabel('$z_{K}$')
ax.set_xlabel('$V$')
ax.set_title('$\\frac{\mathrm{d}z_{K}}{\mathrm{d}t}(z_{K},V)$')
ax.set_xticks([-80, -40, 0, 40])
fig.savefig('dk_dt.pdf')
def make_figure_3():
z_min = logit(0.001)
z_max = logit(0.999)
V_min = -80.
V_max = 50.
dlogit = lambda x: 1./(x*(1-x))
g = lambda x: x**4
ginv = lambda u: u**(1./4)
dg_dx = lambda x: 4*x**3
u_to_x = lambda u: ginv(logistic(u))
x_to_u = lambda x: logit(g(x))
uu = np.linspace(-6,0,1000)
xx = u_to_x(uu)
#g = lambda x: x
#ginv = lambda u: u
#dg_dx = lambda x: 1.0
# Compute dynamics du/dt
alpha = lambda V: 0.01 * (10.01-V) / (np.exp((10.01-V)/10.) - 1)
beta = lambda V: 0.125 * np.exp(-V/80.)
dx_dt = lambda x,V: alpha(V)*(1-x) - beta(V) * x
du_dt = lambda u,V: dlogit(g(u_to_x(u))) * dg_dx(u_to_x(u)) * dx_dt(u_to_x(u),V)
# Plot the change in u as a function of u and V
V = np.linspace(0,(V_max-V_min),100)
fig = create_figure((2,2))
ax = create_axis_at_location(fig, .5, .5, 1, 1, box=True, transparent=True)
ax.imshow(du_dt(uu[:,None], V[None,:]),
extent=[V_min, V_max, uu[-1], uu[0]],
interpolation="none",
cmap='RdGy')
ax.set_xlabel('V')
ax.set_aspect((V_max-V_min)/(z_max-z_min))
ax.set_ylabel('u')
ax.set_title('du_dt(u,V)')
# ax2 = fig.add_subplot(1,2,2)
# ax2.imshow(dx_dt(xx[:,None], V[None,:]),
# extent=[V[0], V[-1], xx[-1], xx[0]],
# interpolation="none",
# cmap=plt.cm.Reds)
# ax2.set_aspect(100)
# ax2.set_xlabel('V')
# ax2.set_ylabel('x')
# ax2.set_title('dx_dt(x,V)')
plt.ioff()
plt.show()
def make_figure_4():
logit = lambda x: np.log(x / (1-x))
logistic = lambda u: np.exp(u) / (1 + np.exp(u))
dlogit = lambda x: 1./(x*(1-x))
g = lambda x: x**4
ginv = lambda u: u**(1./4)
dg_dx = lambda x: 4*x**3
u_to_x = lambda u: ginv(logistic(u))
x_to_u = lambda x: logit(g(x))
uu = np.linspace(-6,6,1000)
xx = u_to_x(uu)
# Compute dynamics du/dt
alpha = lambda V: 0.01 * (10.01-V) / (np.exp((10.01-V)/10.) - 1)
beta = lambda V: 0.125 * np.exp(-V/80.)
dx_dt = lambda x,V: alpha(V)*(1-x) - beta(V) * x
du_dt = lambda u,V: dlogit(g(u_to_x(u))) * dg_dx(u_to_x(u)) * dx_dt(u_to_x(u),V)
# Plot the change in u as a function of u and V
V = np.linspace(0,100,100)
fig = plt.figure()
ax1 = fig.add_subplot(1,2,1)
ax1.imshow(du_dt(uu[:,None], V[None,:]),
extent=[V[0], V[-1], uu[-1], uu[0]],
interpolation="none",
cmap=plt.cm.Reds)
ax1.set_aspect(20)
ax1.set_xlabel('V')
ax1.set_ylabel('u')
ax1.set_title('du_dt(u,V)')
ax2 = fig.add_subplot(1,2,2)
ax2.imshow(dx_dt(xx[:,None], V[None,:]),
extent=[V[0], V[-1], xx[-1], xx[0]],
interpolation="none",
cmap=plt.cm.Reds)
ax2.set_aspect(100)
ax2.set_xlabel('V')
ax2.set_ylabel('x')
ax2.set_title('dx_dt(x,V)')
plt.show()
def make_figure_5(gpk_smpls):
g = lambda x: x**4
ginv = lambda u: u**(1./4)
dg_dx = lambda x: 4*x**3
u_to_x = lambda u: ginv(logistic(u))
x_to_u = lambda x: logit(g(x))
dlogit = lambda x: 1./(x*(1-x))
uu = np.linspace(-6,6,100)
xx = u_to_x(uu)
# Compute dynamics du/dt
alpha = lambda V: 0.01 * (10.01-V) / (np.exp((10.01-V)/10.) - 1)
beta = lambda V: 0.125 * np.exp(-V/80.)
dx_dt = lambda x,V: alpha(V)*(1-x) - beta(V) * x
du_dt = lambda u,V: dlogit(g(u_to_x(u))) * dg_dx(u_to_x(u)) * dx_dt(u_to_x(u),V)
grid = 100
z_min = logit(0.001)
z_max = logit(0.999)
V_min = -80
V_max = 50
zz = np.linspace(z_min, z_max, grid)
V_gp = np.linspace(V_min, V_max, grid)
Z = np.array(list(
itertools.product(*([zz for _ in range(1)]
+ [V_gp]))))
h_smpls = []
for gps in gpk_smpls:
m_pred, _, _, _ = gps[0].predict(Z)
h_smpls.append(m_pred)
h_mean = np.array(h_smpls).mean(0)
h_mean = h_mean.reshape((grid, grid))
# Plot the change in u as a function of u and V
def dsig(z):
sigz = logistic(z)
return np.multiply(sigz, 1 - sigz)
df_dt = lambda z, dzdt: np.multiply(dsig(z), dzdt)
fig = plt.figure()
ax1 = fig.add_subplot(2,2,1)
dudt = du_dt(uu[:,None], V_gp[None,:])
v_max = max((np.max(dudt), np.max(h_mean)))
v_min = min((np.min(dudt), np.min(h_mean)))
ax1.imshow(du_dt(uu[:,None], V_gp[None,:]),
extent=[V_gp[0], V_gp[-1], uu[-1], uu[0]],
interpolation="none",
cmap=plt.cm.Reds,
vmin=v_min,
vmax=v_max)
ax1.set_aspect(20)
ax1.set_xlabel('V')
ax1.set_ylabel('latent state')
ax1.set_title('Ground Truth: dz_dt(z,V)')
ax2 = fig.add_subplot(2,2,3)
ax2.imshow(h_mean,
extent=[V_gp[0], V_gp[-1], uu[-1], uu[0]],
interpolation="none",
cmap=plt.cm.Reds,
vmin=v_min,
vmax=v_max)
ax2.set_aspect(20)
ax2.set_xlabel('V')
ax2.set_ylabel('latent state')
ax2.set_title('Inferred: dz_dt(z,V)')
ax1 = fig.add_subplot(2,2,2)
ax1.imshow(uu[:, None] * dg_dx(u_to_x(uu[:, None])) * dx_dt(u_to_x(uu[:, None]), V_gp[None, :]+60),
extent=[V_gp[0], V_gp[-1], xx[-1], xx[0]],
interpolation="none",
cmap=plt.cm.Reds,
vmin=-1,
vmax=.5)
ax1.set_aspect(100)
ax1.set_xlabel('V')
ax1.set_ylabel('open fraction')
ax1.set_title('Ground Truth: df_dt(f,V)')
ax2 = fig.add_subplot(2,2,4)
ax2.imshow(df_dt(np.array([zz for a in range(grid)]).transpose(), h_smpls[0].reshape((grid, grid))),
extent=[V_gp[0], V_gp[-1], xx[-1], xx[0]],
interpolation="none",
cmap=plt.cm.Reds,
vmin=-1,
vmax=.5)
ax2.set_aspect(100)
ax2.set_xlabel('V')
ax2.set_ylabel('open fraction')
ax2.set_title('Inferred: df_dt(f,V)')
plt.show()
def plot_at_x(ax, index):
mean = uu[:, None] * dg_dx(u_to_x(uu[:, None])) * dx_dt(u_to_x(uu[:, None]), V_gp[None, :]+60)
mean = mean[index, :]
#std = 0.0001 * np.ones(mean.shape)
voltage = V_gp
color = 'r'
ax.plot(voltage, mean, color=color)
#ax.fill_between(voltage, mean - std, mean + std, color=color, alpha = 0.5)
mean, _, dzdt_low, dzdt_high = gpk_smpls[7][0].predict(Z) #62
mean = mean.reshape((grid, grid))
dzdt_low = dzdt_low.reshape((grid, grid))
dzdt_high = dzdt_high.reshape((grid, grid))
zs = np.array([zz for b in range(grid)]).transpose()
dfdt_mean = df_dt(zs, mean)
dfdt_low = df_dt(zs, dzdt_low)
dfdt_high = df_dt(zs, dzdt_high)
color = 'b'
ax.plot(voltage, dfdt_mean[index, :], color=color)
ax.fill_between(voltage, dfdt_low[index, :], dfdt_high[index, :], color=color, alpha = 0.5)
f, axs = plt.subplots(9, sharex=True)
for i in range(len(axs)):
plot_at_x(axs[i], i*2 + 42)
plt.show()
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.imshow((uu[:, None] * dg_dx(u_to_x(uu[:, None])) * dx_dt(u_to_x(uu[:, None]), V_gp[None, :]+60)) - h_mean,
extent=[V_gp[0], V_gp[-1], xx[-1], xx[0]],
cmap=plt.cm.RdGy,
vmin=-.5,
vmax=.5,
)
ax.set_aspect(100)
plt.show()
def make_figure_7(z_smpls, gpk_smpls):
g = lambda x: x**4
ginv = lambda u: u**(1./4)
dg_dx = lambda x: 4*x**3
u_to_x = lambda u: ginv(logistic(u))
x_to_u = lambda x: logit(g(x))
dlogit = lambda x: 1./(x*(1-x))
uu = np.linspace(-6,6,100)
xx = u_to_x(uu)
# Compute dynamics du/dt
alpha = lambda V: 0.01 * (10.01-V) / (np.exp((10.01-V)/10.) - 1)
beta = lambda V: 0.125 * np.exp(-V/80.)
dx_dt = lambda x,V: alpha(V)*(1-x) - beta(V) * x
du_dt = lambda u,V: dlogit(g(u_to_x(u))) * dg_dx(u_to_x(u)) * dx_dt(u_to_x(u),V)
grid = 100
z_min = logit(0.001)
z_max = logit(0.999)
V_min = -80
V_max = 50
zz = np.linspace(z_min, z_max, grid)
V_gp = np.linspace(V_min, V_max, grid)
Z = np.array(list(
itertools.product(*([zz for _ in range(1)]
+ [V_gp]))))
h_smpls = []
for gps in gpk_smpls:
m_pred, _, _, _ = gps[0].predict(Z)
h_smpls.append(m_pred)
h_mean = np.array(h_smpls).mean(0)
h_mean = h_mean.reshape((grid, grid))
# Plot the change in u as a function of u and V
def dsig(z):
sigz = logistic(z)
return np.multiply(sigz, 1 - sigz)
df_dt = lambda z, dzdt: np.multiply(dsig(z), dzdt)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.imshow((uu[:, None] * dg_dx(u_to_x(uu[:, None])) * dx_dt(u_to_x(uu[:, None]), V_gp[None, :]+60)) - df_dt(np.array([zz for a in range(grid)]).transpose(), h_mean),
extent=[V_gp[0], V_gp[-1], xx[-1], xx[0]],
cmap=plt.cm.RdGy
)
ax.set_aspect(100)
ax.scatter(z_smpls[:11, :, 0].reshape((11*3000)), logistic(z_smpls[:11, :, 3].reshape((11*3000))))
ax.set_title("Errors")
plt.show()
# Simulate the squid compartment to get the ground truth
t, z_true, x, inpt = sample_squid_model()
# Load the results of the pMCMC inference
with open('squid2_results5.pkl', 'r') as f:
z_smpls, gpna_smpls, gpk_smpls = cPickle.load(f)
burn = 30
z_smpls = z_smpls[burn:]
gpna_smpls = gpna_smpls[burn:]
gpk_smpls = gpk_smpls[burn:]
make_figure_1(t, inpt, z_true, z_smpls, gpna_smpls, gpk_smpls)
#make_figure_2(gpk_smpls)
#make_figure_3()
#make_figure_4()
make_figure_5(gpk_smpls)
make_figure_7(z_smpls, gpk_smpls)
| gpl-2.0 | 7,677,128,331,014,776,000 | 29.265625 | 169 | 0.5224 | false |
Metonimie/benchmark-scoreboard | src/models/__init__.py | 1 | 1802 | """
Author: Denis Nutiu <[email protected]>
This file is part of scoreboard-benchmark.
scoreboard-benchmark is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
scoreboard-benchmark is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with scoreboard-benchmark . If not, see <http://www.gnu.org/licenses/>.
"""
from flask_sqlalchemy import SQLAlchemy
import sqlalchemy_utils
db = SQLAlchemy()
class Result(db.Model):
"""
The result model will store benchmark results.
"""
__tablename__ = 'results'
id = db.Column(db.Integer, primary_key=True, index=True)
name = db.Column(db.String(50))
gpu = db.Column(db.String(256))
cpu = db.Column(db.String(256))
log = db.Column(db.Text)
score = db.Column(db.Integer, index=True)
ip = db.Column(sqlalchemy_utils.IPAddressType)
created = db.Column(db.DateTime(timezone=True), server_default=db.func.now()) # Update time created server time.
def __init__(self, name="Anonymous", gpu=None, cpu=None, log=None, ip=None, score=1):
self.name = name
self.gpu = gpu
self.cpu = cpu
self.log = log
self.score = score
self.ip = ip
def __repr__(self):
return self.gpu
__table_args__ = (
db.CheckConstraint(score > 0, name="positive_score_constraint"),
{}
)
| lgpl-3.0 | 8,096,559,075,137,097,000 | 33.653846 | 117 | 0.668147 | false |
hsavolai/vmlab | src/kiwi/log.py | 1 | 5433 | #
# Kiwi: a Framework and Enhanced Widgets for Python
#
# Copyright (C) 2005-2006 Async Open Source
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
# Author(s): Johan Dahlin <[email protected]>
#
"""
Extension to the logging module
This module defines a couple of extensions to the logging module included
in the python standard distribution.
It creates an additional logging handler that print log records on the
standard output. This handler is only showing records which has a level
set to logging.WARNING or higher by default.
The messages printed by this handler can be modified by using the environment
variable called KIWI_LOG.
The syntax for the string which KIWI_LOG points to is the following::
domain ':' level [, domain ':', level]
domain can contain wildcards such as * and ?
level is an integer 1-5 which defines the minimal level:
- B{5}: DEBUG
- B{4}: INFO
- B{3}: WARNING
- B{2}: ERROR
- B{1}: CRITICAL
Examples::
KIWI_LOG="stoq*:5"
will print all the messages in a domain starting with stoq with DEBUG or higher::
KIWI_LOG="kiwi*:4,stoq.*:5"
will print all the messages with INFO or higher in all domains starting with kiwi,
and all the messages in the stoq.* domains which are DEBUG or higher
Inspiration for the syntax is taken from the U{debugging facilities<http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer/html/gstreamer-GstInfo.html#id2857358>} of the
U{GStreamer<http://www.gstreamer.net>} multimedia framework.
"""
import fnmatch
import logging
import os
# Globals
_console = None
_filter = None
class LogError(Exception):
pass
class Logger(object):
# Backwards compatibility, we should probably replace the callsites
# with import logging; logging.getLogger(name)
def __new__(self, name):
return logging.getLogger(name)
class _Logger(logging.Logger):
def __call__(self, message, *args, **kwargs):
self.info(message, *args, **kwargs)
logging.setLoggerClass(_Logger)
class ReversedGlobalFilter(logging.Filter):
"""
It's like a reversed filter, the default behavior
is to not show the message, you need to add custom filters for all
the records you wish to see
"""
def __init__(self):
logging.Filter.__init__(self)
self.filters = []
def add_filter(self, f, level=logging.DEBUG):
self.filters.append((f, level))
def filter(self, record):
for f, level in self.filters:
if (record.levelno >= level and
fnmatch.fnmatch(record.name, f)):
return True
return False
def set_log_file(filename, mask=None):
"""
Set the filename used for logging.
@param filename:
@param mask: optional
"""
file_handler = logging.FileHandler(filename, 'w')
file_handler.setFormatter(logging.Formatter(
'%(asctime)s %(name)-18s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'))
root = logging.getLogger()
root.addHandler(file_handler)
if mask:
file_filter = ReversedGlobalFilter()
file_filter.add_filter(mask, logging.DEBUG)
file_handler.addFilter(file_filter)
return file_handler.stream
def set_log_level(name, level):
"""
Set the log level.
@param name: logging category
@param level: level
"""
global _filter
_filter.add_filter(name, level)
def _read_log_levels(console_filter):
log_levels = {}
# bootstrap issue, cannot depend on kiwi.environ
log_level = os.environ.get('KIWI_LOG')
if not log_level:
return log_levels
for part in log_level.split(','):
if not ':' in part:
continue
if part.count(':') > 1:
raise LogError("too many : in part %s" % part)
name, level = part.split(':')
try:
level = int(level)
except ValueError:
raise LogError("invalid level: %s" % level)
if level < 0 or level > 5:
raise LogError("level must be between 0 and 5")
level = 50 - (level * 10)
console_filter.add_filter(name, level)
def _create_console():
global _filter, _console
console = logging.StreamHandler()
console.setFormatter(logging.Formatter(
"%(asctime)s %(name)-20s %(message)s", datefmt='%T'))
root = logging.getLogger()
root.addHandler(console)
root.setLevel(logging.DEBUG)
console_filter = ReversedGlobalFilter()
# Always display warnings or higher on the console
console_filter.add_filter('*', logging.WARNING)
console.addFilter(console_filter)
_read_log_levels(console_filter)
# Set globals
_filter = console_filter
_console = console
_create_console()
kiwi_log = Logger('kiwi')
| gpl-3.0 | -9,216,188,398,400,686,000 | 27.746032 | 181 | 0.674581 | false |
DedMemez/ODS-August-2017 | safezone/GZPlayground.py | 1 | 3107 | # Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.safezone.GZPlayground
from direct.fsm import State
from toontown.safezone import GolfKart
from toontown.toonbase import ToontownGlobals, TTLocalizer
from toontown.toontowngui import TTDialog
import Playground
import sys
class GZPlayground(Playground.Playground):
def __init__(self, loader, parentFSM, doneEvent):
Playground.Playground.__init__(self, loader, parentFSM, doneEvent)
self.parentFSM = parentFSM
self.golfKartBlockDoneEvent = 'golfKartBlockDone'
self.fsm.addState(State.State('golfKartBlock', self.enterGolfKartBlock, self.exitGolfKartBlock, ['walk']))
state = self.fsm.getStateNamed('walk')
state.addTransition('golfKartBlock')
self.golfKartDoneEvent = 'golfKartDone'
self.trolley = None
self.warningDialog = None
return
def destroyWarningDialog(self):
if self.warningDialog:
self.warningDialog.destroy()
self.warningDialog = None
return
def warningDone(self, *args):
self.destroyWarningDialog()
self.fsm.request('walk')
def enterGolfKartBlock(self, golfKart):
if sys.platform == 'android':
base.localAvatar.b_setAnimState('neutral', 1)
self.destroyWarningDialog()
self.warningDialog = TTDialog.TTDialog(text=TTLocalizer.AndroidGolfMessage, command=self.warningDone, style=TTDialog.Acknowledge)
self.warningDialog.show()
return
base.localAvatar.laffMeter.start()
base.localAvatar.b_setAnimState('off', 1)
self.accept(self.golfKartDoneEvent, self.handleGolfKartDone)
self.trolley = GolfKart.GolfKart(self, self.fsm, self.golfKartDoneEvent, golfKart.getDoId())
self.trolley.load()
self.trolley.enter()
def exitGolfKartBlock(self):
base.localAvatar.laffMeter.stop()
self.destroyWarningDialog()
self.ignore(self.golfKartDoneEvent)
if self.trolley:
self.trolley.unload()
self.trolley.exit()
self.trolley = None
return
def detectedGolfKartCollision(self, golfKart):
self.notify.debug('detectedGolfkartCollision()')
self.fsm.request('golfKartBlock', [golfKart])
def handleGolfKartDone(self, doneStatus):
self.notify.debug('handling golf kart done event')
mode = doneStatus['mode']
if mode == 'reject':
self.fsm.request('walk')
elif mode == 'exit':
self.fsm.request('walk')
elif mode == 'golfcourse':
self.doneStatus = {'loader': 'golfcourse',
'where': 'golfcourse',
'hoodId': self.loader.hood.id,
'zoneId': doneStatus['zoneId'],
'shardId': None,
'courseId': doneStatus['courseId']}
messenger.send(self.doneEvent)
else:
self.notify.error('Unknown mode: ' + mode + ' in handleGolfKartDone')
return | apache-2.0 | 5,831,154,515,158,108,000 | 37.858974 | 141 | 0.629868 | false |
mic4ael/indico | indico/modules/events/timetable/blueprint.py | 1 | 6046 | # This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from indico.modules.events.timetable.controllers.display import (RHTimetable, RHTimetableEntryInfo,
RHTimetableExportDefaultPDF, RHTimetableExportPDF)
from indico.modules.events.timetable.controllers.legacy import (RHLegacyTimetableAddBreak,
RHLegacyTimetableAddContribution,
RHLegacyTimetableAddSession,
RHLegacyTimetableAddSessionBlock,
RHLegacyTimetableBreakREST,
RHLegacyTimetableDeleteEntry,
RHLegacyTimetableEditEntry,
RHLegacyTimetableEditEntryDateTime,
RHLegacyTimetableEditEntryTime,
RHLegacyTimetableEditSession, RHLegacyTimetableFitBlock,
RHLegacyTimetableGetUnscheduledContributions,
RHLegacyTimetableMoveEntry, RHLegacyTimetableReschedule,
RHLegacyTimetableScheduleContribution,
RHLegacyTimetableShiftEntries,
RHLegacyTimetableSwapEntries)
from indico.modules.events.timetable.controllers.manage import (RHCloneContribution, RHManageSessionTimetable,
RHManageTimetable, RHManageTimetableEntryInfo,
RHTimetableREST)
from indico.web.flask.util import make_compat_redirect_func
from indico.web.flask.wrappers import IndicoBlueprint
_bp = IndicoBlueprint('timetable', __name__, template_folder='templates', virtual_template_folder='events/timetable',
url_prefix='/event/<confId>')
# Management
_bp.add_url_rule('/manage/timetable/', 'management', RHManageTimetable)
_bp.add_url_rule('/manage/timetable/', 'timetable_rest', RHTimetableREST, methods=('POST',))
_bp.add_url_rule('/manage/timetable/<int:entry_id>', 'timetable_rest', RHTimetableREST, methods=('PATCH', 'DELETE'))
_bp.add_url_rule('/manage/timetable/session/<int:session_id>/', 'manage_session', RHManageSessionTimetable)
# Timetable legacy operations
_bp.add_url_rule('/manage/timetable/add-session', 'add_session', RHLegacyTimetableAddSession, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/timetable/break/<int:break_id>', 'legacy_break_rest', RHLegacyTimetableBreakREST,
methods=('PATCH',))
with _bp.add_prefixed_rules('/manage/timetable/session/<int:session_id>', '/manage/timetable'):
_bp.add_url_rule('/', 'session_rest', RHLegacyTimetableEditSession,
methods=('PATCH',))
_bp.add_url_rule('/entry/<int:entry_id>/info', 'entry_info_manage', RHManageTimetableEntryInfo)
_bp.add_url_rule('/entry/<int:entry_id>/delete', 'delete_entry', RHLegacyTimetableDeleteEntry, methods=('POST',))
_bp.add_url_rule('/entry/<int:entry_id>/move', 'move_entry', RHLegacyTimetableMoveEntry,
methods=('GET', 'POST'))
_bp.add_url_rule('/entry/<int:entry_id>/shift', 'shift_entries', RHLegacyTimetableShiftEntries, methods=('POST',))
_bp.add_url_rule('/entry/<int:entry_id>/swap', 'swap_entries', RHLegacyTimetableSwapEntries, methods=('POST',))
_bp.add_url_rule('/entry/<int:entry_id>/edit/', 'edit_entry', RHLegacyTimetableEditEntry, methods=('GET', 'POST'))
_bp.add_url_rule('/entry/<int:entry_id>/edit/time', 'edit_entry_time', RHLegacyTimetableEditEntryTime,
methods=('GET', 'POST'))
_bp.add_url_rule('/entry/<int:entry_id>/edit/datetime', 'edit_entry_datetime', RHLegacyTimetableEditEntryDateTime,
methods=('POST',))
_bp.add_url_rule('/block/<block_id>/schedule', 'schedule', RHLegacyTimetableScheduleContribution, methods=('POST',))
_bp.add_url_rule('/block/<block_id>/fit', 'fit_session_block', RHLegacyTimetableFitBlock, methods=('POST',))
_bp.add_url_rule('/not-scheduled', 'not_scheduled', RHLegacyTimetableGetUnscheduledContributions)
_bp.add_url_rule('/schedule', 'schedule', RHLegacyTimetableScheduleContribution, methods=('POST',))
_bp.add_url_rule('/reschedule', 'reschedule', RHLegacyTimetableReschedule, methods=('POST',))
_bp.add_url_rule('/add-break', 'add_break', RHLegacyTimetableAddBreak, methods=('GET', 'POST'))
_bp.add_url_rule('/add-contribution', 'add_contribution', RHLegacyTimetableAddContribution, methods=('GET', 'POST'))
_bp.add_url_rule('/add-session-block', 'add_session_block', RHLegacyTimetableAddSessionBlock,
methods=('GET', 'POST'))
_bp.add_url_rule('/clone-contribution', 'clone_contribution', RHCloneContribution, methods=('POST',))
# Display
_bp.add_url_rule('/timetable/', 'timetable', RHTimetable)
_bp.add_url_rule('/timetable/pdf', 'export_pdf', RHTimetableExportPDF, methods=('GET', 'POST'))
_bp.add_url_rule('/timetable/timetable.pdf', 'export_default_pdf', RHTimetableExportDefaultPDF)
_bp.add_url_rule('/timetable/entry/<int:entry_id>/info', 'entry_info', RHTimetableEntryInfo)
# Legacy URLs
_compat_bp = IndicoBlueprint('compat_timetable', __name__)
_compat_bp.add_url_rule('/conferenceTimeTable.py', 'timetable_modpython', make_compat_redirect_func(_bp, 'timetable'))
| mit | 2,053,347,587,523,897,600 | 73.641975 | 120 | 0.606351 | false |
Arcanemagus/SickRage | tests/sickrage_tests/show/history_tests.py | 1 | 3166 | # coding=utf-8
# This file is part of SickRage.
#
# URL: https://sick-rage.github.io
# Git: https://github.com/Sick-Rage/Sick-Rage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
"""
Test history
"""
from __future__ import print_function, unicode_literals
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')))
from sickbeard.common import Quality
from sickrage.show.History import History
import six
class HistoryTests(unittest.TestCase):
"""
Test history
"""
def test_get_actions(self):
"""
Tests whether or not the different kinds of actions an episode can have are returned correctly
"""
test_cases = {
None: [],
'': [],
'wrong': [],
'downloaded': Quality.DOWNLOADED,
'Downloaded': Quality.DOWNLOADED,
'snatched': Quality.SNATCHED,
'Snatched': Quality.SNATCHED,
}
unicode_test_cases = {
'': [],
'wrong': [],
'downloaded': Quality.DOWNLOADED,
'Downloaded': Quality.DOWNLOADED,
'snatched': Quality.SNATCHED,
'Snatched': Quality.SNATCHED,
}
for tests in test_cases, unicode_test_cases:
for (action, result) in six.iteritems(tests):
self.assertEqual(History._get_actions(action), result) # pylint: disable=protected-access
def test_get_limit(self):
"""
Tests the static get limit method which should return the limit on the amount of elements that should be shown/returned
"""
test_cases = {
None: 0,
'': 0,
'0': 0,
'5': 5,
'-5': 0,
'1.5': 0,
'-1.5': 0,
5: 5,
-5: 0,
1.5: 1,
-1.5: 0,
}
unicode_test_cases = {
'': 0,
'0': 0,
'5': 5,
'-5': 0,
'1.5': 0,
'-1.5': 0,
}
for tests in test_cases, unicode_test_cases:
for (action, result) in six.iteritems(tests):
self.assertEqual(History._get_limit(action), result) # pylint: disable=protected-access
if __name__ == '__main__':
print('=====> Testing {0}'.format(__file__))
SUITE = unittest.TestLoader().loadTestsFromTestCase(HistoryTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| gpl-3.0 | 4,324,015,508,264,668,700 | 28.588785 | 127 | 0.575174 | false |
savant-nz/carbon | Scripts/SCons/Compilers/Clang.sconscript.py | 1 | 2022 | #
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not
# distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Creates and returns a build environment that uses Clang. This is done by altering the GCC build environment.
import os
import sys
Import('*')
env = SConscript('GCC.sconscript.py')
env['CC'] = 'clang'
env['CXX'] = 'clang++'
env['LINK'] = 'clang++'
env['CCFLAGS'] += ['-stdlib=libc++', '-Wno-undefined-func-template', '-Wno-undefined-var-template']
env['LINKFLAGS'] += ['-stdlib=libc++']
# Make color diagnostics work when piping Clang output through SCons
if 'TERM' in os.environ:
env['ENV']['TERM'] = os.environ['TERM']
env['CCFLAGS'] += ['-fcolor-diagnostics']
# Access the toolchain through xcrun when building on macOS
if sys.platform == 'darwin':
for key in ['CC', 'CXX', 'LINK', 'AR', 'AS', 'RANLIB']:
env[key] = 'xcrun ' + env[key]
# Extra warnings for strict builds
if isStrictBuild:
env['CCFLAGS'] += ['-Weverything', '-Wno-c++98-compat', '-Wno-disabled-macro-expansion', '-Wno-documentation',
'-Wno-documentation-unknown-command', '-Wno-exit-time-destructors', '-Wno-float-equal',
'-Wno-format-nonliteral', '-Wno-global-constructors', '-Wno-header-hygiene',
'-Wno-implicit-fallthrough', '-Wno-keyword-macro', '-Wno-missing-noreturn',
'-Wno-missing-prototypes', '-Wno-nullable-to-nonnull-conversion', '-Wno-over-aligned',
'-Wno-padded', '-Wno-sign-conversion', '-Wno-switch-enum', '-Wno-unused-template',
'-Wno-weak-vtables']
# Alter GCC's precompiled header support to pass a -include-pch through to Clang
def UsePrecompiledHeader(self, header, **keywords):
self.BuildPrecompiledHeader(header, **keywords)
self['CCFLAGS'] += ['-Xclang', '-include-pch', '-Xclang', self['GCH']]
env.AddMethod(UsePrecompiledHeader)
Return('env')
| mpl-2.0 | 6,866,951,723,079,990,000 | 39.44 | 115 | 0.64095 | false |
mrunge/openstack_horizon | openstack_horizon/dashboards/identity/roles/urls.py | 1 | 1066 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_horizon.dashboards.identity.roles import views
urlpatterns = patterns(
'openstack_horizon.dashboards.identity.roles.views',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<role_id>[^/]+)/update/$',
views.UpdateView.as_view(), name='update'),
url(r'^create/$', views.CreateView.as_view(), name='create'))
| apache-2.0 | -6,960,979,221,053,401,000 | 41.64 | 78 | 0.713884 | false |
jberci/resolwe | resolwe/elastic/signals.py | 1 | 1451 | """.. Ignore pydocstyle D400.
=======================
Elastic Signal Handlers
=======================
"""
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from guardian.models import GroupObjectPermission, UserObjectPermission
from .builder import index_builder
def _process_permission(perm):
"""Rebuild indexes affected by the given permission."""
# XXX: Optimize: rebuild only permissions, not whole document
codename = perm.permission.codename
if not codename.startswith('view') and not codename.startswith('owner'):
return
index_builder.build(perm.content_object)
@receiver(post_save, sender=UserObjectPermission)
def add_user_permission(sender, instance, **kwargs):
"""Process indexes after adding user permission."""
_process_permission(instance)
@receiver(post_save, sender=GroupObjectPermission)
def add_group_permission(sender, instance, **kwargs):
"""Process indexes after adding group permission."""
_process_permission(instance)
@receiver(post_delete, sender=UserObjectPermission)
def remove_user_permission(sender, instance, **kwargs):
"""Process indexes after removing user permission."""
_process_permission(instance)
@receiver(post_delete, sender=GroupObjectPermission)
def remove_group_permission(sender, instance, **kwargs):
"""Process indexes after removing group permission."""
_process_permission(instance)
| apache-2.0 | 3,300,003,933,398,636,000 | 29.87234 | 76 | 0.73122 | false |
CptSpaceToaster/memegen | memegen/services/template.py | 1 | 1885 | import logging
from ._base import Service
from ..domain import Template
log = logging.getLogger(__name__)
class TemplateService(Service):
def __init__(self, template_store, **kwargs):
super().__init__(**kwargs)
self.template_store = template_store
def all(self):
"""Get all templates."""
templates = self.template_store.filter()
return templates
def find(self, key):
"""Find a template with a matching key."""
key = Template.strip(key)
# Find an exact match
template = self.template_store.read(key)
if template:
return template
# Else, find an alias match
for template in self.all():
if key in template.aliases_stripped:
return template
# Else, no match
raise self.exceptions.not_found
def validate(self):
"""Ensure all template are valid and conflict-free."""
templates = self.all()
keys = {template.key: template for template in templates}
for template in templates:
log.info("checking template '%s' ...", template)
if not template.validate():
return False
for alias in template.aliases:
log.info("checking alias '%s' -> '%s' ...", alias, template.key)
if alias not in template.aliases_lowercase:
msg = "alias '%s' should be lowercase characters or dashes"
log.error(msg, alias)
return False
try:
existing = keys[alias]
except KeyError:
keys[alias] = template
else:
msg = "alias '%s' already used in template: %s"
log.error(msg, alias, existing)
return False
return True
| mit | -175,650,737,700,038,140 | 30.416667 | 80 | 0.537931 | false |
dc3-plaso/plaso | tests/analysis/chrome_extension.py | 1 | 6839 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the chrome extension analysis plugin."""
import os
import unittest
from plaso.analysis import chrome_extension
from tests import test_lib as shared_test_lib
from tests.analysis import test_lib
class MockChromeExtensionPlugin(chrome_extension.ChromeExtensionPlugin):
"""Chrome extension analysis plugin used for testing."""
NAME = 'chrome_extension_test'
def _GetChromeWebStorePage(self, extension_identifier):
"""Retrieves the page for the extension from the Chrome store website.
Args:
extension_identifier (str): Chrome extension identifier.
Returns:
str: page content or None.
"""
chrome_web_store_file = shared_test_lib.GetTestFilePath([
u'chrome_extensions', extension_identifier])
if not os.path.exists(chrome_web_store_file):
return
with open(chrome_web_store_file, 'rb') as file_object:
page_content = file_object.read()
return page_content.decode(u'utf-8')
def _GetTestFilePath(self, path_segments):
"""Retrieves the path of a test file in the test data directory.
Args:
path_segments (list[str]): path segments inside the test data directory.
Returns:
str: path of the test file.
"""
# Note that we need to pass the individual path segments to os.path.join
# and not a list.
return os.path.join(self._TEST_DATA_PATH, *path_segments)
class ChromeExtensionTest(test_lib.AnalysisPluginTestCase):
"""Tests for the chrome extension analysis plugin."""
# pylint: disable=protected-access
_MACOSX_PATHS = [
u'/Users/dude/Libary/Application Data/Google/Chrome/Default/Extensions',
(u'/Users/dude/Libary/Application Data/Google/Chrome/Default/Extensions/'
u'apdfllckaahabafndbhieahigkjlhalf'),
u'/private/var/log/system.log',
u'/Users/frank/Library/Application Data/Google/Chrome/Default',
u'/Users/hans/Library/Application Data/Google/Chrome/Default',
(u'/Users/frank/Library/Application Data/Google/Chrome/Default/'
u'Extensions/pjkljhegncpnkpknbcohdijeoejaedia'),
u'/Users/frank/Library/Application Data/Google/Chrome/Default/Extensions']
_WINDOWS_PATHS = [
u'C:\\Users\\Dude\\SomeFolder\\Chrome\\Default\\Extensions',
(u'C:\\Users\\Dude\\SomeNoneStandardFolder\\Chrome\\Default\\Extensions\\'
u'hmjkmjkepdijhoojdojkdfohbdgmmhki'),
(u'C:\\Users\\frank\\AppData\\Local\\Google\\Chrome\\Extensions\\'
u'blpcfgokakmgnkcojhhkbfbldkacnbeo'),
u'C:\\Users\\frank\\AppData\\Local\\Google\\Chrome\\Extensions',
(u'C:\\Users\\frank\\AppData\\Local\\Google\\Chrome\\Extensions\\'
u'icppfcnhkcmnfdhfhphakoifcfokfdhg'),
u'C:\\Windows\\System32',
u'C:\\Stuff/with path separator\\Folder']
_MACOSX_USERS = [
{u'name': u'root', u'path': u'/var/root', u'sid': u'0'},
{u'name': u'frank', u'path': u'/Users/frank', u'sid': u'4052'},
{u'name': u'hans', u'path': u'/Users/hans', u'sid': u'4352'},
{u'name': u'dude', u'path': u'/Users/dude', u'sid': u'1123'}]
_WINDOWS_USERS = [
{u'name': u'dude', u'path': u'C:\\Users\\dude', u'sid': u'S-1'},
{u'name': u'frank', u'path': u'C:\\Users\\frank', u'sid': u'S-2'}]
@shared_test_lib.skipUnlessHasTestFile([u'chrome_extensions'])
def testGetPathSegmentSeparator(self):
"""Tests the _GetPathSegmentSeparator function."""
plugin = MockChromeExtensionPlugin()
for path in self._MACOSX_PATHS:
path_segment_separator = plugin._GetPathSegmentSeparator(path)
self.assertEqual(path_segment_separator, u'/')
for path in self._WINDOWS_PATHS:
path_segment_separator = plugin._GetPathSegmentSeparator(path)
self.assertEqual(path_segment_separator, u'\\')
@shared_test_lib.skipUnlessHasTestFile([u'chrome_extensions'])
def testExamineEventAndCompileReportMacOSXPaths(self):
"""Tests the ExamineEvent and CompileReport functions on Mac OS X paths."""
events = []
for path in self._MACOSX_PATHS:
event_dictionary = {
u'data_type': u'fs:stat',
u'filename': path,
u'timestamp': 12345,
u'timestamp_desc': u'Some stuff'}
event = self._CreateTestEventObject(event_dictionary)
events.append(event)
plugin = MockChromeExtensionPlugin()
storage_writer = self._AnalyzeEvents(
events, plugin, knowledge_base_values={u'users': self._MACOSX_USERS})
self.assertEqual(len(storage_writer.analysis_reports), 1)
analysis_report = storage_writer.analysis_reports[0]
self.assertEqual(plugin._sep, u'/')
# Due to the behavior of the join one additional empty string at the end
# is needed to create the last empty line.
expected_text = u'\n'.join([
u' == USER: dude ==',
u' Google Drive [apdfllckaahabafndbhieahigkjlhalf]',
u'',
u' == USER: frank ==',
u' Gmail [pjkljhegncpnkpknbcohdijeoejaedia]',
u'',
u''])
self.assertEqual(analysis_report.text, expected_text)
self.assertEqual(analysis_report.plugin_name, 'chrome_extension_test')
expected_keys = set([u'frank', u'dude'])
self.assertEqual(set(analysis_report.report_dict.keys()), expected_keys)
@shared_test_lib.skipUnlessHasTestFile([u'chrome_extensions'])
def testExamineEventAndCompileReportWindowsPaths(self):
"""Tests the ExamineEvent and CompileReport functions on Windows paths."""
events = []
for path in self._WINDOWS_PATHS:
event_dictionary = {
u'data_type': u'fs:stat',
u'filename': path,
u'timestamp': 12345,
u'timestamp_desc': u'Some stuff'}
event = self._CreateTestEventObject(event_dictionary)
events.append(event)
plugin = MockChromeExtensionPlugin()
storage_writer = self._AnalyzeEvents(
events, plugin, knowledge_base_values={u'users': self._WINDOWS_USERS})
self.assertEqual(len(storage_writer.analysis_reports), 1)
analysis_report = storage_writer.analysis_reports[0]
self.assertEqual(plugin._sep, u'\\')
# Due to the behavior of the join one additional empty string at the end
# is needed to create the last empty line.
expected_text = u'\n'.join([
u' == USER: dude ==',
u' Google Keep - notes and lists [hmjkmjkepdijhoojdojkdfohbdgmmhki]',
u'',
u' == USER: frank ==',
u' Google Play Music [icppfcnhkcmnfdhfhphakoifcfokfdhg]',
u' YouTube [blpcfgokakmgnkcojhhkbfbldkacnbeo]',
u'',
u''])
self.assertEqual(analysis_report.text, expected_text)
self.assertEqual(analysis_report.plugin_name, 'chrome_extension_test')
expected_keys = set([u'frank', u'dude'])
self.assertEqual(set(analysis_report.report_dict.keys()), expected_keys)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -3,100,256,595,166,724,600 | 35.37766 | 80 | 0.669542 | false |
0--key/lib | portfolio/2013_OrSys/dispatcher.py | 1 | 6451 | from flask import Flask, render_template, session, redirect, url_for, request
from flask import logging, g
from settings import users
from functions import fetch_pending_orders_data, fetch_products_data,\
fetch_suppliers_data, get_user_id, suppDataCheck, suppDataInsert, \
suppDataUpdate, setActiveTab, appendProduct, checkProduct, throw_product,\
fetch_invoices_data, removeProduct, getSupplierData, revocateOrder,\
sendPurchaseOrder, fetch_held_products, checkOTLock, grasp_product,\
eliminate_product
app = Flask(__name__)
@app.route('/')
def index():
"""Composes operator dashboard"""
if 'username' in session:
user = session['username']
logo = users.get(user).get('img')
else:
return redirect(url_for('login'))
if 'active_tab' not in session: # active tab defining
session['active_tab'] = 'orders' # <-- initial value
o_dataset, pii_data = fetch_pending_orders_data() # compose
otl = checkOTLock()
agg_products = fetch_products_data(pii_data) # tabs
supp_tab_data = fetch_suppliers_data()
p_invoices_tab_data = fetch_invoices_data('pending')
sent_PO_tab_data = fetch_invoices_data('sent')
heldP_tab_data = fetch_held_products()
a_tab = setActiveTab(session['active_tab'])
return render_template(
'index.htm', user=user, logo=logo, orders=o_dataset, o_t_lock=otl,
orders_agg=agg_products, agg_products_qty=len(agg_products),
active=a_tab, supp_data=supp_tab_data, pItab=p_invoices_tab_data,
sItab = sent_PO_tab_data, hTd = heldP_tab_data
)
@app.route('/login', methods=['GET', 'POST'])
def login():
"""A primitive authentication feature"""
if request.method == 'POST':
input_username = request.form['username']
input_password = request.form['password']
if (input_username in users and
users.get(input_username).get('password') == input_password):
session['username'] = input_username
session['userID'] = get_user_id(input_username)
return redirect(url_for('index'))
return render_template('login.htm')
@app.route('/logout')
def logout():
"""LogOut implementation"""
session.pop('username', None)
return redirect(url_for('login'))
@app.route('/addNewSupplier')
def addS_modal_form():
"""Modal for upload data about a new supplier"""
app.logger.debug('This is SupplierForm modal')
sup_data = {'city': 'Sydney', 'province': 'New South Wales'}
return render_template('addNewSupplierForm.htm', sup_data=sup_data)
@app.route('/editSupplier', methods=['GET'])
def editS_modal_form():
"""Modal for upload data about a new supplier"""
app.logger.debug('This is editSupplierForm')
sup_data = getSupplierData(request.args.get('s_id'))
return render_template('editSupplierForm.htm', sup_data=sup_data)
@app.route('/SupplierDataFiller', methods=['GET', 'POST'])
def supplierDataFill():
"""Manipulation with the input data and redirect"""
app.logger.debug('This is supplier data filler')
if request.method == 'POST':
(pure_data, check_up) = suppDataCheck(request.form)
if check_up == 'new':
suppDataInsert(pure_data, session['userID'])
session['active_tab'] = 'supplier'
elif check_up == 'known':
suppDataUpdate(pure_data, session['userID'])
session['active_tab'] = 'supplier'
elif check_up == 'update':
suppDataUpdate(pure_data, session['userID'])
session['active_tab'] = 'supplier'
return redirect(url_for('index'))
@app.route('/appendItem', methods=['GET', 'POST'])
def appendItem():
"""Includes product into invoice and redirect"""
app.logger.debug('This is appendItem to PO process')
if request.method == 'POST':
(prod_properties, check_up) = checkProduct(request.form)
if check_up:
appendProduct(prod_properties, session['userID'])
session['active_tab'] = 'p_agg'
return redirect(url_for('index'))
@app.route('/removeItem', methods=['GET', 'POST'])
def freeItem():
"""Removes product out from invoice and redirect"""
app.logger.debug('This is freeItem out from PO process')
if request.method == 'POST':
removeProduct(session['userID'], request.form['piID'])
session['active_tab'] = 'invoices'
return redirect(url_for('index'))
@app.route('/toggleOrder', methods=['GET'])
def toggleOrder():
"""Exclude or include order and its products out from
processing and redirect to index page"""
o_id = request.args.get('o_id')
app.logger.debug('This is revOrder id=%s' % (o_id,))
revocateOrder(o_id, session['username'])
session['active_tab'] = 'orders'
return redirect(url_for('index'))
@app.route('/sendPO', methods=['GET'])
def sendPurOrder():
"""Organize application output"""
i_id = request.args.get('i_id')
app.logger.debug('This is send purchase order with id=%s' % (i_id,))
sendPurchaseOrder(i_id, session['username'])
session['active_tab'] = 'invoices'
return redirect(url_for('index'))
@app.route('/graspProduct', methods=['GET'])
def graspProduct():
"""Move product to the pail"""
sku = request.args.get('p_id')
app.logger.debug('This is grasp product with sku=%s and userID=%s' %
(sku, session['userID']))
result = grasp_product(sku, session['userID'])
session['active_tab'] = 'p_agg'
return redirect(url_for('index'))
@app.route('/throwProduct', methods=['GET'])
def throwProduct():
"""Move product to the agg product tab"""
pipID = request.args.get('p_id')
app.logger.debug('This is throw product with ID=%s out from product pail \
and userID=%s' % (pipID, session['userID']))
result = throw_product(pipID, session['userID'])
session['active_tab'] = 'p_agg'
return redirect(url_for('index'))
@app.route('/eliminateProduct', methods=['GET'])
def eliminateProduct():
"""Move product to the trash"""
pipID = request.args.get('p_id')
app.logger.debug('This is eliminate product with ID=%s out from product\
pail and userID=%s' % (pipID, session['userID']))
result = eliminate_product(pipID, session['userID'])
session['active_tab'] = 'p_agg'
return redirect(url_for('index'))
app.secret_key = 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT'
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
| apache-2.0 | 8,784,297,107,164,680,000 | 35.862857 | 79 | 0.647341 | false |
liqd/adhocracy3.mercator | src/adhocracy_core/adhocracy_core/sheets/embed.py | 2 | 2962 | """Embed Sheet."""
import os.path
from colander import deferred
from deform.widget import TextAreaWidget
from pyramid.interfaces import IRequest
from pyramid.renderers import render
from zope.interface import Interface
from adhocracy_core.interfaces import API_ROUTE_NAME
from adhocracy_core.interfaces import ISheet
from adhocracy_core.interfaces import IResource
from adhocracy_core.sheets import add_sheet_to_registry
from adhocracy_core.sheets import sheet_meta
from adhocracy_core.schema import MappingSchema
from adhocracy_core.schema import Text
from adhocracy_core.schema import URL
class IEmbed(ISheet):
"""Market interface for the embed sheet."""
class IEmbedCodeConfig(Interface):
"""Interface for embed code config mappings."""
def embed_code_config_adapter(context: IResource,
request: IRequest) -> {}:
"""Return mapping to render `adhocracy_core:templates/embed_code.html`."""
settings = request.registry['config']
frontend_url = settings.adhocracy.frontend_url
sdk_url = os.path.join(frontend_url, 'AdhocracySDK.js')
path = request.resource_url(context, route_name=API_ROUTE_NAME)
# TODO use frontend.locale instead
locale = settings.configurator.pyramid.default_locale_name
return {'sdk_url': sdk_url,
'frontend_url': frontend_url,
'path': path,
'widget': '',
'autoresize': 'false',
'locale': locale,
'autourl': 'false',
'initial_url': '',
'nocenter': 'true',
'noheader': 'false',
'style': 'height: 650px',
}
@deferred
def deferred_default_embed_code(node: MappingSchema, kw: dict) -> str:
"""Return html code to embed the current `context` resource."""
context = kw['context']
request = kw.get('request', None)
if request is None:
return ''
mapping = request.registry.getMultiAdapter((context, request),
IEmbedCodeConfig)
code = render('adhocracy_core:templates/embed_code.html.mako', mapping)
return code
class EmbedSchema(MappingSchema):
"""Embed sheet data structure.
`embed_code`: html code to embed the `context` resource in web pages.
`external_url`: canonical URL that embeds the `context` resource.
"""
embed_code = Text(default=deferred_default_embed_code,
widget=TextAreaWidget(rows=10),
)
external_url = URL()
embed_meta = sheet_meta._replace(isheet=IEmbed,
schema_class=EmbedSchema,
)
def includeme(config):
"""Register sheets and embed code config adapter."""
add_sheet_to_registry(embed_meta, config.registry)
config.registry.registerAdapter(embed_code_config_adapter,
(IResource, IRequest),
IEmbedCodeConfig)
| agpl-3.0 | -6,785,438,061,732,240,000 | 33.045977 | 78 | 0.63707 | false |
javiercantero/streamlink | src/streamlink/plugins/kingkong.py | 1 | 2561 | import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http, validate
from streamlink.stream import HTTPStream, HLSStream
API_URL = "https://g-api.langlive.com/webapi/v1/room/info?room_id={0}"
VOD_API_URL = (
"https://g-api.langlive.com/webapi/v1/replayer/detail?live_id={0}")
STATUS_ONLINE = 1
STATUS_OFFLINE = 0
STREAM_WEIGHTS = {
"360P": 360,
"480P": 480,
"720P": 720,
"source": 1080
}
_url_re = re.compile(r"""
https://www\.kingkong\.com\.tw/
(?:
video/(?P<vid>[0-9]+G[0-9A-Za-z]+)|
(?P<channel>[0-9]+)
)
""", re.VERBOSE)
_room_schema = validate.Schema(
{
"data": {
"live_info": {
"live_status": int,
"stream_items": [{
"title": validate.text,
"video": validate.any('', validate.url(
scheme="https",
path=validate.endswith(".flv")
))
}]
}
}
},
validate.get("data")
)
_vod_schema = validate.Schema(
{
"data": {
"live_info": {
"video": validate.text
}
}
},
validate.get("data")
)
class Kingkong(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
@classmethod
def stream_weight(cls, stream):
if stream in STREAM_WEIGHTS:
return STREAM_WEIGHTS[stream], "kingkong"
return Plugin.stream_weight(stream)
def _get_streams(self):
match = _url_re.match(self.url)
vid = match.group("vid")
if vid:
res = http.get(VOD_API_URL.format(vid))
data = http.json(res, schema=_vod_schema)
yield "source", HLSStream(
self.session, data["live_info"]["video"])
return
channel = match.group("channel")
res = http.get(API_URL.format(channel))
room = http.json(res, schema=_room_schema)
if not room:
self.logger.info("Not a valid room url.")
return
live_info = room["live_info"]
if live_info["live_status"] != STATUS_ONLINE:
self.logger.info("Stream currently unavailable.")
return
for item in live_info["stream_items"]:
quality = item["title"]
if quality == u"\u6700\u4f73": # "Best" in Chinese
quality = "source"
yield quality, HTTPStream(self.session, item["video"])
__plugin__ = Kingkong
| bsd-2-clause | 8,535,942,782,514,645,000 | 25.132653 | 71 | 0.5205 | false |
aarpon/pIceImarisConnector | pIceImarisConnector/test/TestPIcePyramidalCellXT.py | 1 | 13121 | # <CustomTools>
# <Menu>
# <Item name="pIceImarisConnector: Test PyramidalCell" icon="Python3" tooltip="Test function for pIceImarisConnector using the PyramidalCell demo dataset.">
# <Command>Python3XT::TestPIcePyramidalCellXT(%i)</Command>
# </Item>
# </Menu>
# </CustomTools>
import os
import numpy as np
from pIceImarisConnector import pIceImarisConnector
def TestPIcePyramidalCellXT(aImarisId):
# Instantiate the pIceImarisConnector object
conn = pIceImarisConnector(aImarisId)
# Open the PyramidalCell file
# =======================================================s==================
print('Load file...')
conn.loadPyramidalCellTestDataset()
# Check that there is something loaded
# =========================================================================
print('Test that the file was loaded...')
assert(conn.mImarisApplication.GetDataSet().GetSizeX() > 0)
# Check the extends
# =========================================================================
print('Check the dataset extends...')
EXTENDS = (-0.1140, 57.8398, -0.1140, 57.8398, -0.1510, 20.6310)
extends = conn.getExtends()
assert(all([abs(x - y) < 1e-4 for x, y in zip(EXTENDS, extends)]))
minX, maxX, minY, maxY, minZ, maxZ = conn.getExtends()
assert(all([abs(x - y) < 1e-4 for x, y in \
zip(EXTENDS, (minX, maxX, minY, maxY, minZ, maxZ))]))
# Check the voxel size
# =========================================================================
print('Check the voxel size...')
VOXELSIZES = (0.2273, 0.2282, 0.3012)
voxelSizes = conn.getVoxelSizes()
assert(all([abs(x - y) < 1e-4 for x, y in zip(VOXELSIZES, voxelSizes)]))
vX, vY, vZ = conn.getVoxelSizes()
assert(all([abs(x - y) < 1e-4 for x, y in zip(VOXELSIZES, (vX, vY, vZ))]))
# Check the dataset size
#
# X = 255
# Y = 254
# Z = 69
# C = 1
# T = 1
#
# =========================================================================
print('Check the dataset size...')
DATASETSIZE = (255, 254, 69, 1, 1)
sizes = conn.getSizes()
assert(DATASETSIZE == sizes)
sizeX, sizeY, sizeZ, sizeC, sizeT = conn.getSizes()
assert(sizeX == DATASETSIZE[0])
assert(sizeY == DATASETSIZE[1])
assert(sizeZ == DATASETSIZE[2])
assert(sizeC == DATASETSIZE[3])
assert(sizeT == DATASETSIZE[4])
# Get a spot object, its coordinates and check the unit conversions
# =========================================================================
print('Count all children at root level...')
children = conn.getAllSurpassChildren(False) # No recursion
assert(len(children) == 4)
# If the casting in getAllSurpassChildren() works, spot is an actual
# spot object, and not an IDataItem. If the casting worked, the object will
# have a method 'GetPositionsXYZ'.
print('Test autocasting...')
child = conn.getAllSurpassChildren(False, 'Spots')
assert(len(child) == 1)
spot = child[0]
assert(callable(getattr(spot, 'GetPositionsXYZ')) is True)
# Get the coordinates
pos = spot.GetPositionsXYZ()
# These are the expected spot coordinates
print('Check spot coordinates and conversions units<->pixels...')
POS = [
[18.5396, 1.4178, 8.7341],
[39.6139, 14.8819, 9.0352],
[35.1155, 9.4574, 9.0352],
[12.3907, 21.6221, 11.7459]]
assert(np.all(abs(np.array(pos) - np.array(POS)) < 1e-4))
# Convert
posV = conn.mapPositionsUnitsToVoxels(pos)
posU = conn.mapPositionsVoxelsToUnits(posV)
# Check the conversion
assert(np.all(abs(np.array(posU) - np.array(POS)) < 1e-4))
# Test filtering the selection
# =========================================================================
print('Test filtering the surpass selection by type...')
# "Select" the spots object
conn.mImarisApplication.SetSurpassSelection(children[3])
# Now get it back, first with the right filter, then with the wrong one
assert(isinstance(conn.getSurpassSelection('Spots'), type(children[3])))
assert(conn.getSurpassSelection('Surfaces') is None)
# Test creating and adding new spots
# =========================================================================
print('Test creation of new spots...')
vSpotsData = spot.Get()
coords = (np.array(vSpotsData.mPositionsXYZ) + 1.00).tolist()
timeIndices = vSpotsData.mIndicesT
radii = vSpotsData.mRadii
conn.createAndSetSpots(coords, timeIndices, radii, 'Test', np.random.uniform(0, 1, 4))
spots = conn.getAllSurpassChildren(False, 'Spots')
assert(len(spots) == 2)
# Check the filtering and recursion of object finding
# =========================================================================
print('Get all 7 children with recursion (no filtering)...')
children = conn.getAllSurpassChildren(True)
assert(len(children) == 7)
print('Check that there is exactly 1 Light Source...')
children = conn.getAllSurpassChildren(True, 'LightSource')
assert(len(children) == 1)
print('Check that there is exactly 1 Frame...')
children = conn.getAllSurpassChildren(True, 'Frame')
assert(len(children) == 1)
print('Check that there is exactly 1 Volume...')
children = conn.getAllSurpassChildren(True, 'Volume')
assert(len(children) == 1)
print('Check that there are exactly 2 Spots...')
children = conn.getAllSurpassChildren(True, 'Spots')
assert(len(children) == 2)
print('Check that there is exactly 1 Surface...')
children = conn.getAllSurpassChildren(True, 'Surfaces')
assert(len(children) == 1)
print('Check that there is exactly 1 Measurement Point...')
children = conn.getAllSurpassChildren(True, 'MeasurementPoints')
assert(len(children) == 1)
# Get the type
# =========================================================================
print('Get and check the datatype...')
datatype = conn.getNumpyDatatype()
assert(datatype == np.uint8)
# Get the data volume
# =========================================================================
print('Get the data volume...')
stack = conn.getDataVolume(0, 0)
# Get and check a data slice
print('Get and check a data slice...')
slice = conn.getDataSlice(34, 0, 0)
assert(np.all(stack[34, :, :] == slice))
print('Check the data volume type...')
assert(stack.dtype == conn.getNumpyDatatype())
# Check the sizes
print('Check the data volume size...')
x = stack.shape[2]
y = stack.shape[1]
z = stack.shape[0]
assert(x == DATASETSIZE[0])
assert(y == DATASETSIZE[1])
assert(z == DATASETSIZE[2])
# Get the data volume by explicitly passing an iDataSet object
# =========================================================================
print('Get the data volume by explicitly passing an iDataSet object...')
stack = conn.getDataVolume(0, 0, conn.mImarisApplication.GetDataSet())
# Get a slice
print('Get and check a data slice by explicitly passing an iDataSet object...')
slice = conn.getDataSlice(34, 0, 0, conn.mImarisApplication.GetDataSet())
assert(np.all(stack[34, :, :] == slice))
print('Check the data volume type...')
assert(stack.dtype == conn.getNumpyDatatype())
# Check the sizes
print('Check the data volume size...')
x = stack.shape[2]
y = stack.shape[1]
z = stack.shape[0]
assert(x == DATASETSIZE[0])
assert(y == DATASETSIZE[1])
assert(z == DATASETSIZE[2])
# Check the getDataSubVolume() vs. the getDataVolume methods
# =========================================================================
print('Check that subvolumes are extracted correctly...')
# Remember that with Numpy, to get the values between x0 and x, you must
# use this notation: x0 : x + 1
subVolume = conn.getDataSubVolume(112, 77, 38, 0, 0, 10, 10, 2)
subStack = stack[38: 40, 77: 87, 112: 122]
assert(np.array_equal(subStack, subVolume))
# Check the boundaries
# =========================================================================
print('Check subvolume boundaries...')
subVolume = conn.getDataSubVolume(0, 0, 0, 0, 0, x, y, z)
sX = subVolume.shape[2]
sY = subVolume.shape[1]
sZ = subVolume.shape[0]
assert(sX == DATASETSIZE[0])
assert(sY == DATASETSIZE[1])
assert(sZ == DATASETSIZE[2])
# Get the rotation matrix from the camera angle
# =========================================================================
print('Get the rotation matrix from the camera angle...')
R_D = np.array(\
[\
[0.8471, 0.2345, -0.4769, 0.0000], \
[-0.1484, 0.9661, 0.2115, 0.0000], \
[0.5103, -0.1084, 0.8532, 0.0000], \
[0.0000, 0.0000, 0.0000, 1.0000]], dtype=np.float32)
R, isI = conn.getSurpassCameraRotationMatrix()
assert(np.all((abs(R - R_D) < 1e-4)))
# Check getting/setting colors and transparency
# =========================================================================
print('Check getting/setting colors and transparency...')
children = conn.getAllSurpassChildren(True, 'Spots')
spots = children[0]
# We prepare some color/transparency combinations to circle through
clr = [ \
[1, 0, 0, 0.00], # Red, transparency = 0
[0, 1, 0, 0.00], # Green, transparency = 0
[0, 0, 1, 0.00], # Blue, transparency = 0
[1, 1, 0, 0.00], # Yellow, transparency = 0
[1, 0, 1, 0.00], # Purple, transparency = 0
[1, 0, 1, 0.25], # Purple, transparency = 0.25
[1, 0, 1, 0.50], # Purple, transparency = 0.50
[1, 0, 1, 0.75], # Purple, transparency = 0.75
[1, 0, 1, 1.00]] # Purple, transparency = 1.00
for c in clr:
# Set the RGBA color
spots.SetColorRGBA(conn.mapRgbaVectorToScalar(c))
# Get the RGBA color
current = conn.mapRgbaScalarToVector(spots.GetColorRGBA())
# Compare (rounding errors allowed)
assert(all([abs(x - y) < 1e-2 for x, y in zip(c, current)]))
# Copy channel a couple of times
# ==========================================================================
print('Test copying channels...')
conn.mImarisApplication.GetDataSet().SetChannelName(0, 'One')
conn.copyChannels(0)
conn.copyChannels([0, 1])
conn.copyChannels([0, 2])
conn.copyChannels(3)
channelNames = conn.getChannelNames()
assert(channelNames[0] == 'One')
assert(channelNames[1] == 'Copy of One')
assert(channelNames[2] == 'Copy of One')
assert(channelNames[3] == 'Copy of Copy of One')
assert(channelNames[4] == 'Copy of One')
assert(channelNames[5] == 'Copy of Copy of One')
assert(channelNames[6] == 'Copy of Copy of Copy of One')
# Create a dataset
# =========================================================================
print('Create a dataset (replace existing one)...')
conn.createDataSet('uint16', 100, 200, 50, 3, 10, 0.20, 0.25, 0.5, 0.1)
# Check sizes
# =========================================================================
print('Check sizes...')
sizes = conn.getSizes()
assert(sizes[0] == 100)
assert(sizes[1] == 200)
assert(sizes[2] == 50)
assert(sizes[3] == 3)
assert(sizes[4] == 10)
# Check voxel sizes
# =========================================================================
print('Check voxel sizes...')
voxelSizes = conn.getVoxelSizes()
assert(voxelSizes[0] == 0.2)
assert(voxelSizes[1] == 0.25)
assert(voxelSizes[2] == 0.5)
# Send a data volume
# =========================================================================
print('Send volume (force dataset creation)...')
stack = np.zeros((100, 200, 50), dtype = np.uint16)
conn.setDataVolume(stack, 0, 0)
# Test retrieving volume and slice for a non 8-bit dataset
print('Test retrieving volume and slice for a non 8-bit dataset...')
volume16 = conn.getDataVolume(0, 0)
slice16 = conn.getDataSlice(1, 0, 0)
assert(np.all(volume16[1, :, :] == slice16))
# Check the time delta
# =========================================================================
print('Check time interval...')
assert(conn.mImarisApplication.GetDataSet().GetTimePointsDelta() == 0.1)
# Check transferring volume data
# =========================================================================
print('Check two-way data volume transfer...')
data = np.zeros((2, 255, 255), dtype=np.uint8)
x = np.linspace(1, 255, 255)
y = np.linspace(1, 255, 255)
xv, yv = np.meshgrid(x, y)
data[0, :, :] = x
data[1, :, :] = y
data = data[:, :, 1:255] # Make it not square in xy
conn.createDataSet('uint8', 254, 255, 2, 1, 1)
conn.setDataVolume(data, 0, 0)
dataOut = conn.getDataVolume(0, 0)
assert(np.array_equal(data, dataOut))
| gpl-2.0 | -2,556,390,905,004,136,400 | 37.478006 | 159 | 0.540584 | false |
MRtrix3/mrtrix3 | lib/mrtrix3/fsl.py | 1 | 6782 | # Copyright (c) 2008-2021 the MRtrix3 contributors.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Covered Software is provided under this License on an "as is"
# basis, without warranty of any kind, either expressed, implied, or
# statutory, including, without limitation, warranties that the
# Covered Software is free of defects, merchantable, fit for a
# particular purpose or non-infringing.
# See the Mozilla Public License v. 2.0 for more details.
#
# For more details, see http://www.mrtrix.org/.
import os
from distutils.spawn import find_executable
from mrtrix3 import MRtrixError
_SUFFIX = ''
# Functions that may be useful for scripts that interface with FMRIB FSL tools
# FSL's run_first_all script can be difficult to wrap, since it does not provide
# a meaningful return code, and may run via SGE, which then requires waiting for
# the output files to appear.
def check_first(prefix, structures): #pylint: disable=unused-variable
from mrtrix3 import app, path #pylint: disable=import-outside-toplevel
vtk_files = [ prefix + '-' + struct + '_first.vtk' for struct in structures ]
existing_file_count = sum([ os.path.exists(filename) for filename in vtk_files ])
if existing_file_count != len(vtk_files):
if 'SGE_ROOT' in os.environ and os.environ['SGE_ROOT']:
app.console('FSL FIRST job may have been run via SGE; awaiting completion')
app.console('(note however that FIRST may fail silently, and hence this script may hang indefinitely)')
path.wait_for(vtk_files)
else:
app.DO_CLEANUP = False
raise MRtrixError('FSL FIRST has failed; ' + ('only ' if existing_file_count else '') + str(existing_file_count) + ' of ' + str(len(vtk_files)) + ' structures were segmented successfully (check ' + path.to_scratch('first.logs', False) + ')')
# Get the name of the binary file that should be invoked to run eddy;
# this depends on both whether or not the user has requested that the CUDA
# version of eddy be used, and the various names that this command could
# conceivably be installed as.
def eddy_binary(cuda): #pylint: disable=unused-variable
from mrtrix3 import app #pylint: disable=import-outside-toplevel
if cuda:
if find_executable('eddy_cuda'):
app.debug('Selected soft-linked CUDA version (\'eddy_cuda\')')
return 'eddy_cuda'
# Cuda versions are now provided with a CUDA trailing version number
# Users may not necessarily create a softlink to one of these and
# call it "eddy_cuda"
# Therefore, hunt through PATH looking for them; if more than one,
# select the one with the highest version number
binaries = [ ]
for directory in os.environ['PATH'].split(os.pathsep):
if os.path.isdir(directory):
for entry in os.listdir(directory):
if entry.startswith('eddy_cuda'):
binaries.append(entry)
max_version = 0.0
exe_path = ''
for entry in binaries:
try:
version = float(entry.lstrip('eddy_cuda'))
if version > max_version:
max_version = version
exe_path = entry
except:
pass
if exe_path:
app.debug('CUDA version ' + str(max_version) + ': ' + exe_path)
return exe_path
app.debug('No CUDA version of eddy found')
return ''
for candidate in [ 'eddy_openmp', 'eddy_cpu', 'eddy', 'fsl5.0-eddy' ]:
if find_executable(candidate):
app.debug(candidate)
return candidate
app.debug('No CPU version of eddy found')
return ''
# In some FSL installations, all binaries get prepended with "fsl5.0-". This function
# makes it more convenient to locate these commands.
# Note that if FSL 4 and 5 are installed side-by-side, the approach taken in this
# function will select the version 5 executable.
def exe_name(name): #pylint: disable=unused-variable
from mrtrix3 import app #pylint: disable=import-outside-toplevel
if find_executable(name):
output = name
elif find_executable('fsl5.0-' + name):
output = 'fsl5.0-' + name
app.warn('Using FSL binary \"' + output + '\" rather than \"' + name + '\"; suggest checking FSL installation')
else:
raise MRtrixError('Could not find FSL program \"' + name + '\"; please verify FSL install')
app.debug(output)
return output
# In some versions of FSL, even though we try to predict the names of image files that
# FSL commands will generate based on the suffix() function, the FSL binaries themselves
# ignore the FSLOUTPUTTYPE environment variable. Therefore, the safest approach is:
# Whenever receiving an output image from an FSL command, explicitly search for the path
def find_image(name): #pylint: disable=unused-variable
from mrtrix3 import app #pylint: disable=import-outside-toplevel
prefix = os.path.join(os.path.dirname(name), os.path.basename(name).split('.')[0])
if os.path.isfile(prefix + suffix()):
app.debug('Image at expected location: \"' + prefix + suffix() + '\"')
return prefix + suffix()
for suf in ['.nii', '.nii.gz', '.img']:
if os.path.isfile(prefix + suf):
app.debug('Expected image at \"' + prefix + suffix() + '\", but found at \"' + prefix + suf + '\"')
return prefix + suf
raise MRtrixError('Unable to find FSL output file for path \"' + name + '\"')
# For many FSL commands, the format of any output images will depend on the string
# stored in 'FSLOUTPUTTYPE'. This may even override a filename extension provided
# to the relevant command. Therefore use this function to 'guess' what the names
# of images provided by FSL commands will be.
def suffix(): #pylint: disable=unused-variable
from mrtrix3 import app #pylint: disable=import-outside-toplevel
global _SUFFIX
if _SUFFIX:
return _SUFFIX
fsl_output_type = os.environ.get('FSLOUTPUTTYPE', '')
if fsl_output_type == 'NIFTI':
app.debug('NIFTI -> .nii')
_SUFFIX = '.nii'
elif fsl_output_type == 'NIFTI_GZ':
app.debug('NIFTI_GZ -> .nii.gz')
_SUFFIX = '.nii.gz'
elif fsl_output_type == 'NIFTI_PAIR':
app.debug('NIFTI_PAIR -> .img')
_SUFFIX = '.img'
elif fsl_output_type == 'NIFTI_PAIR_GZ':
raise MRtrixError('MRtrix3 does not support compressed NIFTI pairs; please change FSLOUTPUTTYPE environment variable')
elif fsl_output_type:
app.warn('Unrecognised value for environment variable FSLOUTPUTTYPE (\"' + fsl_output_type + '\"): Expecting compressed NIfTIs, but FSL commands may fail')
_SUFFIX = '.nii.gz'
else:
app.warn('Environment variable FSLOUTPUTTYPE not set; FSL commands may fail, or script may fail to locate FSL command outputs')
_SUFFIX = '.nii.gz'
return _SUFFIX
| mpl-2.0 | 6,801,620,265,861,604,000 | 42.754839 | 247 | 0.692716 | false |
ESS-LLP/erpnext-healthcare | erpnext/regional/italy/utils.py | 1 | 13784 | from __future__ import unicode_literals
import frappe, json, os
from frappe.utils import flt, cstr
from erpnext.controllers.taxes_and_totals import get_itemised_tax
from frappe import _
from frappe.utils.file_manager import save_file, remove_file
from frappe.desk.form.load import get_attachments
from erpnext.regional.italy import state_codes
def update_itemised_tax_data(doc):
if not doc.taxes: return
itemised_tax = get_itemised_tax(doc.taxes)
for row in doc.items:
tax_rate = 0.0
if itemised_tax.get(row.item_code):
tax_rate = sum([tax.get('tax_rate', 0) for d, tax in itemised_tax.get(row.item_code).items()])
row.tax_rate = flt(tax_rate, row.precision("tax_rate"))
row.tax_amount = flt((row.net_amount * tax_rate) / 100, row.precision("net_amount"))
row.total_amount = flt((row.net_amount + row.tax_amount), row.precision("total_amount"))
@frappe.whitelist()
def export_invoices(filters=None):
saved_xmls = []
invoices = frappe.get_all("Sales Invoice", filters=get_conditions(filters), fields=["*"])
for invoice in invoices:
attachments = get_e_invoice_attachments(invoice)
saved_xmls += [attachment.file_name for attachment in attachments]
zip_filename = "{0}-einvoices.zip".format(frappe.utils.get_datetime().strftime("%Y%m%d_%H%M%S"))
download_zip(saved_xmls, zip_filename)
@frappe.whitelist()
def prepare_invoice(invoice, progressive_number):
#set company information
company = frappe.get_doc("Company", invoice.company)
invoice.progressive_number = progressive_number
invoice.unamended_name = get_unamended_name(invoice)
invoice.company_data = company
company_address = frappe.get_doc("Address", invoice.company_address)
invoice.company_address_data = company_address
#Set invoice type
if invoice.is_return and invoice.return_against:
invoice.type_of_document = "TD04" #Credit Note (Nota di Credito)
invoice.return_against_unamended = get_unamended_name(frappe.get_doc("Sales Invoice", invoice.return_against))
else:
invoice.type_of_document = "TD01" #Sales Invoice (Fattura)
#set customer information
invoice.customer_data = frappe.get_doc("Customer", invoice.customer)
customer_address = frappe.get_doc("Address", invoice.customer_address)
invoice.customer_address_data = customer_address
if invoice.shipping_address_name:
invoice.shipping_address_data = frappe.get_doc("Address", invoice.shipping_address_name)
if invoice.customer_data.is_public_administration:
invoice.transmission_format_code = "FPA12"
else:
invoice.transmission_format_code = "FPR12"
invoice.e_invoice_items = [item for item in invoice.items]
tax_data = get_invoice_summary(invoice.e_invoice_items, invoice.taxes)
invoice.tax_data = tax_data
#Check if stamp duty (Bollo) of 2 EUR exists.
stamp_duty_charge_row = next((tax for tax in invoice.taxes if tax.charge_type == _("Actual") and tax.tax_amount == 2.0 ), None)
if stamp_duty_charge_row:
invoice.stamp_duty = stamp_duty_charge_row.tax_amount
for item in invoice.e_invoice_items:
if item.tax_rate == 0.0 and item.tax_amount == 0.0:
item.tax_exemption_reason = tax_data["0.0"]["tax_exemption_reason"]
return invoice
def get_conditions(filters):
filters = json.loads(filters)
conditions = {"docstatus": 1}
if filters.get("company"): conditions["company"] = filters["company"]
if filters.get("customer"): conditions["customer"] = filters["customer"]
if filters.get("from_date"): conditions["posting_date"] = (">=", filters["from_date"])
if filters.get("to_date"): conditions["posting_date"] = ("<=", filters["to_date"])
if filters.get("from_date") and filters.get("to_date"):
conditions["posting_date"] = ("between", [filters.get("from_date"), filters.get("to_date")])
return conditions
#TODO: Use function from frappe once PR #6853 is merged.
def download_zip(files, output_filename):
from zipfile import ZipFile
input_files = [frappe.get_site_path('private', 'files', filename) for filename in files]
output_path = frappe.get_site_path('private', 'files', output_filename)
with ZipFile(output_path, 'w') as output_zip:
for input_file in input_files:
output_zip.write(input_file, arcname=os.path.basename(input_file))
with open(output_path, 'rb') as fileobj:
filedata = fileobj.read()
frappe.local.response.filename = output_filename
frappe.local.response.filecontent = filedata
frappe.local.response.type = "download"
def get_invoice_summary(items, taxes):
summary_data = frappe._dict()
for tax in taxes:
#Include only VAT charges.
if tax.charge_type == "Actual":
continue
#Charges to appear as items in the e-invoice.
if tax.charge_type in ["On Previous Row Total", "On Previous Row Amount"]:
reference_row = next((row for row in taxes if row.idx == int(tax.row_id or 0)), None)
if reference_row:
items.append(
frappe._dict(
idx=len(items)+1,
item_code=reference_row.description,
item_name=reference_row.description,
rate=reference_row.tax_amount,
qty=1.0,
amount=reference_row.tax_amount,
stock_uom=frappe.db.get_single_value("Stock Settings", "stock_uom") or _("Nos"),
tax_rate=tax.rate,
tax_amount=(reference_row.tax_amount * tax.rate) / 100,
net_amount=reference_row.tax_amount,
taxable_amount=reference_row.tax_amount,
item_tax_rate="{}",
charges=True
)
)
#Check item tax rates if tax rate is zero.
if tax.rate == 0:
for item in items:
item_tax_rate = json.loads(item.item_tax_rate)
if tax.account_head in item_tax_rate:
key = cstr(item_tax_rate[tax.account_head])
summary_data.setdefault(key, {"tax_amount": 0.0, "taxable_amount": 0.0, "tax_exemption_reason": "", "tax_exemption_law": ""})
summary_data[key]["tax_amount"] += item.tax_amount
summary_data[key]["taxable_amount"] += item.net_amount
if key == "0.0":
summary_data[key]["tax_exemption_reason"] = tax.tax_exemption_reason
summary_data[key]["tax_exemption_law"] = tax.tax_exemption_law
if summary_data == {}: #Implies that Zero VAT has not been set on any item.
summary_data.setdefault("0.0", {"tax_amount": 0.0, "taxable_amount": tax.total,
"tax_exemption_reason": tax.tax_exemption_reason, "tax_exemption_law": tax.tax_exemption_law})
else:
item_wise_tax_detail = json.loads(tax.item_wise_tax_detail)
for rate_item in [tax_item for tax_item in item_wise_tax_detail.items() if tax_item[1][0] == tax.rate]:
key = cstr(tax.rate)
if not summary_data.get(key): summary_data.setdefault(key, {"tax_amount": 0.0, "taxable_amount": 0.0})
summary_data[key]["tax_amount"] += rate_item[1][1]
summary_data[key]["taxable_amount"] += sum([item.net_amount for item in items if item.item_code == rate_item[0]])
for item in items:
key = cstr(tax.rate)
if item.get("charges"):
if not summary_data.get(key): summary_data.setdefault(key, {"taxable_amount": 0.0})
summary_data[key]["taxable_amount"] += item.taxable_amount
return summary_data
#Preflight for successful e-invoice export.
def sales_invoice_validate(doc):
#Validate company
if doc.doctype != 'Sales Invoice':
return
if not doc.company_address:
frappe.throw(_("Please set an Address on the Company '%s'" % doc.company), title=_("E-Invoicing Information Missing"))
else:
validate_address(doc.company_address)
company_fiscal_regime = frappe.get_cached_value("Company", doc.company, 'fiscal_regime')
if not company_fiscal_regime:
frappe.throw(_("Fiscal Regime is mandatory, kindly set the fiscal regime in the company {0}")
.format(doc.company))
else:
doc.company_fiscal_regime = company_fiscal_regime
if not doc.company_tax_id and not doc.company_fiscal_code:
frappe.throw(_("Please set either the Tax ID or Fiscal Code on Company '%s'" % doc.company), title=_("E-Invoicing Information Missing"))
#Validate customer details
customer_type, is_public_administration = frappe.db.get_value("Customer", doc.customer, ["customer_type", "is_public_administration"])
if customer_type == _("Individual"):
if not doc.customer_fiscal_code:
frappe.throw(_("Please set Fiscal Code for the customer '%s'" % doc.customer), title=_("E-Invoicing Information Missing"))
else:
if is_public_administration:
if not doc.customer_fiscal_code:
frappe.throw(_("Please set Fiscal Code for the public administration '%s'" % doc.customer), title=_("E-Invoicing Information Missing"))
else:
if not doc.tax_id:
frappe.throw(_("Please set Tax ID for the customer '%s'" % doc.customer), title=_("E-Invoicing Information Missing"))
if not doc.customer_address:
frappe.throw(_("Please set the Customer Address"), title=_("E-Invoicing Information Missing"))
else:
validate_address(doc.customer_address)
if not len(doc.taxes):
frappe.throw(_("Please set at least one row in the Taxes and Charges Table"), title=_("E-Invoicing Information Missing"))
else:
for row in doc.taxes:
if row.rate == 0 and row.tax_amount == 0 and not row.tax_exemption_reason:
frappe.throw(_("Row {0}: Please set at Tax Exemption Reason in Sales Taxes and Charges".format(row.idx)),
title=_("E-Invoicing Information Missing"))
for schedule in doc.payment_schedule:
if schedule.mode_of_payment and not schedule.mode_of_payment_code:
schedule.mode_of_payment_code = frappe.get_cached_value('Mode of Payment',
schedule.mode_of_payment, 'mode_of_payment_code')
#Ensure payment details are valid for e-invoice.
def sales_invoice_on_submit(doc, method):
#Validate payment details
if get_company_country(doc.company) not in ['Italy',
'Italia', 'Italian Republic', 'Repubblica Italiana']:
return
if not len(doc.payment_schedule):
frappe.throw(_("Please set the Payment Schedule"), title=_("E-Invoicing Information Missing"))
else:
for schedule in doc.payment_schedule:
if not schedule.mode_of_payment:
frappe.throw(_("Row {0}: Please set the Mode of Payment in Payment Schedule".format(schedule.idx)),
title=_("E-Invoicing Information Missing"))
elif not frappe.db.get_value("Mode of Payment", schedule.mode_of_payment, "mode_of_payment_code"):
frappe.throw(_("Row {0}: Please set the correct code on Mode of Payment {1}".format(schedule.idx, schedule.mode_of_payment)),
title=_("E-Invoicing Information Missing"))
prepare_and_attach_invoice(doc)
def prepare_and_attach_invoice(doc, replace=False):
progressive_name, progressive_number = get_progressive_name_and_number(doc, replace)
invoice = prepare_invoice(doc, progressive_number)
invoice_xml = frappe.render_template('erpnext/regional/italy/e-invoice.xml', context={"doc": invoice}, is_path=True)
invoice_xml = invoice_xml.replace("&", "&")
xml_filename = progressive_name + ".xml"
return save_file(xml_filename, invoice_xml, dt=doc.doctype, dn=doc.name, is_private=True)
@frappe.whitelist()
def generate_single_invoice(docname):
doc = frappe.get_doc("Sales Invoice", docname)
e_invoice = prepare_and_attach_invoice(doc, True)
content = None
with open(frappe.get_site_path('private', 'files', e_invoice.file_name), "r") as f:
content = f.read()
frappe.local.response.filename = e_invoice.file_name
frappe.local.response.filecontent = content
frappe.local.response.type = "download"
#Delete e-invoice attachment on cancel.
def sales_invoice_on_cancel(doc, method):
if get_company_country(doc.company) not in ['Italy',
'Italia', 'Italian Republic', 'Repubblica Italiana']:
return
for attachment in get_e_invoice_attachments(doc):
remove_file(attachment.name, attached_to_doctype=doc.doctype, attached_to_name=doc.name)
def get_company_country(company):
return frappe.get_cached_value('Company', company, 'country')
def get_e_invoice_attachments(invoice):
out = []
attachments = get_attachments(invoice.doctype, invoice.name)
company_tax_id = invoice.company_tax_id if invoice.company_tax_id.startswith("IT") else "IT" + invoice.company_tax_id
for attachment in attachments:
if attachment.file_name and attachment.file_name.startswith(company_tax_id) and attachment.file_name.endswith(".xml"):
out.append(attachment)
return out
def validate_address(address_name):
fields = ["pincode", "city", "country_code"]
data = frappe.get_cached_value("Address", address_name, fields, as_dict=1) or {}
for field in fields:
if not data.get(field):
frappe.throw(_("Please set {0} for address {1}".format(field.replace('-',''), address_name)),
title=_("E-Invoicing Information Missing"))
def get_unamended_name(doc):
attributes = ["naming_series", "amended_from"]
for attribute in attributes:
if not hasattr(doc, attribute):
return doc.name
if doc.amended_from:
return "-".join(doc.name.split("-")[:-1])
else:
return doc.name
def get_progressive_name_and_number(doc, replace=False):
if replace:
for attachment in get_e_invoice_attachments(doc):
remove_file(attachment.name, attached_to_doctype=doc.doctype, attached_to_name=doc.name)
filename = attachment.file_name.split(".xml")[0]
return filename, filename.split("_")[1]
company_tax_id = doc.company_tax_id if doc.company_tax_id.startswith("IT") else "IT" + doc.company_tax_id
progressive_name = frappe.model.naming.make_autoname(company_tax_id + "_.#####")
progressive_number = progressive_name.split("_")[1]
return progressive_name, progressive_number
def set_state_code(doc, method):
if doc.get('country_code'):
doc.country_code = doc.country_code.upper()
if not doc.get('state'):
return
if not (hasattr(doc, "state_code") and doc.country in ["Italy", "Italia", "Italian Republic", "Repubblica Italiana"]):
return
state_codes_lower = {key.lower():value for key,value in state_codes.items()}
state = doc.get('state','').lower()
if state_codes_lower.get(state):
doc.state_code = state_codes_lower.get(state)
| gpl-3.0 | -7,304,240,070,135,029,000 | 38.495702 | 139 | 0.714089 | false |
GoogleChrome/chromium-dashboard | pages/intentpreview_test.py | 1 | 6198 | # Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from __future__ import print_function
import testing_config # Must be imported before the module under test.
import mock
import flask
import werkzeug
from pages import intentpreview
from internals import models
class IntentEmailPreviewHandlerTest(testing_config.CustomTestCase):
def setUp(self):
self.feature_1 = models.Feature(
name='feature one', summary='sum', category=1, visibility=1,
standardization=1, web_dev_views=1, impl_status_chrome=1,
intent_stage=models.INTENT_IMPLEMENT)
self.feature_1.put()
self.request_path = '/admin/features/launch/%d/%d?intent' % (
models.INTENT_SHIP, self.feature_1.key.integer_id())
self.handler = intentpreview.IntentEmailPreviewHandler()
def tearDown(self):
self.feature_1.key.delete()
def test_get__anon(self):
"""Anon cannot view this preview features, gets redirected to login."""
testing_config.sign_out()
feature_id = self.feature_1.key.integer_id()
with intentpreview.app.test_request_context(self.request_path):
actual_response = self.handler.get_template_data(feature_id=feature_id)
self.assertEqual('302 FOUND', actual_response.status)
def test_get__no_existing(self):
"""Trying to view a feature that does not exist gives a 404."""
testing_config.sign_in('[email protected]', 123567890)
bad_feature_id = self.feature_1.key.integer_id() + 1
with intentpreview.app.test_request_context(self.request_path):
with self.assertRaises(werkzeug.exceptions.NotFound):
self.handler.get_template_data(feature_id=bad_feature_id)
def test_get__no_stage_specified(self):
"""Allowed user can preview intent email for a feature using an old URL."""
request_path = (
'/admin/features/launch/%d?intent' % self.feature_1.key.integer_id())
testing_config.sign_in('[email protected]', 123567890)
feature_id = self.feature_1.key.integer_id()
with intentpreview.app.test_request_context(self.request_path):
actual_data = self.handler.get_template_data(feature_id=feature_id)
self.assertIn('feature', actual_data)
self.assertEqual('feature one', actual_data['feature']['name'])
def test_get__normal(self):
"""Allowed user can preview intent email for a feature."""
testing_config.sign_in('[email protected]', 123567890)
feature_id = self.feature_1.key.integer_id()
with intentpreview.app.test_request_context(self.request_path):
actual_data = self.handler.get_template_data(feature_id=feature_id)
self.assertIn('feature', actual_data)
self.assertEqual('feature one', actual_data['feature']['name'])
def test_get_page_data(self):
"""page_data has correct values."""
feature_id = self.feature_1.key.integer_id()
with intentpreview.app.test_request_context(self.request_path):
page_data = self.handler.get_page_data(
feature_id, self.feature_1, models.INTENT_IMPLEMENT)
self.assertEqual(
'http://localhost/feature/%d' % feature_id,
page_data['default_url'])
self.assertEqual(
['motivation'],
page_data['sections_to_show'])
self.assertEqual(
'Intent to Prototype',
page_data['subject_prefix'])
def test_compute_subject_prefix__incubate_new_feature(self):
"""We offer users the correct subject line for each intent stage."""
self.assertEqual(
'Intent stage "None"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_NONE))
self.assertEqual(
'Intent stage "Start incubating"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_INCUBATE))
self.assertEqual(
'Intent to Prototype',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_IMPLEMENT))
self.assertEqual(
'Ready for Trial',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_EXPERIMENT))
self.assertEqual(
'Intent stage "Evaluate readiness to ship"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_IMPLEMENT_SHIP))
self.assertEqual(
'Intent to Experiment',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_EXTEND_TRIAL))
self.assertEqual(
'Intent to Ship',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_SHIP))
self.assertEqual(
'Intent to Extend Deprecation Trial',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_REMOVED))
self.assertEqual(
'Intent stage "Shipped"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_SHIPPED))
self.assertEqual(
'Intent stage "Parked"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_PARKED))
def test_compute_subject_prefix__deprecate_feature(self):
"""We offer users the correct subject line for each intent stage."""
self.feature_1.feature_type = models.FEATURE_TYPE_DEPRECATION_ID
self.assertEqual(
'Intent stage "None"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_NONE))
self.assertEqual(
'Intent to Deprecate and Remove',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_INCUBATE))
self.assertEqual(
'Request for Deprecation Trial',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_EXTEND_TRIAL)) | apache-2.0 | 5,732,362,012,227,741,000 | 36.569697 | 79 | 0.68264 | false |
kmadathil/sanskrit_parser | sanskrit_parser/base/maheshvara_sutra.py | 1 | 8576 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Intro
======
Get varnas in a pratyahara:
.. code:: python
>>> from sanskrit_parser.base.maheshvara_sutra import MaheshvaraSutras
>>> MS = MaheshvaraSutras()
>>> jaS = SanskritImmutableString('jaS', encoding=SLP1)
>>> print(MS.getPratyahara(jaS))
jabagaqada
Check if a varna is in a pratyahara:
.. code:: python
>>> g = SanskritImmutableString('g')
>>> print(MS.isInPratyahara(jaS, g))
True
>>> k = SanskritImmutableString('k')
>>> print(MS.isInPratyahara(jaS, k))
False
Command line usage
==================
::
$ python -m sanskrit_parser.base.maheshvara_sutra --encoding SLP1 --pratyahara jaS
aiuR fxk eoN EOc hayavaraw laR YamaNaRanam JaBaY GaQaDaz jabagaqadaS KaPaCaWaTacawatav kapay Sazasar hal
जश्
जबगडद
"""
from __future__ import print_function
from . import sanskrit_base
import re
import six
class MaheshvaraSutras(object):
"""
Singleton MaheshvaraSutras class
Attributes:
MS(SanskritImmutableString) : Internal representation of mAheshvara sutras
MSS(str) : Canonical (SLP1) representation
"""
def __init__(self):
"""
Initialize Maheshvara Sutras object
"""
# Note that a space is deliberately left after each it to help in
# demarcating them.
self.MS = sanskrit_base.SanskritImmutableString(
u'अइउण् ऋऌक् एओङ् ऐऔच् हयवरट् लण् ञमङणनम् झभञ् घढधष् जबगडदश् खफछठथचटतव् कपय् शषसर् हल् ',
sanskrit_base.DEVANAGARI)
# SLP1 version for internal operations
self.MSS = self.MS.canonical()
def __str__(self):
# Use SLP1 for default string output
return self.MSS
def getPratyahara(self, p, longp=True, remove_a=False, dirghas=False):
"""
Return list of varnas covered by a pratyahara
Args:
p(:class:SanskritImmutableString): Pratyahara
longp(boolean :optional:): When True (default), uses long pratyaharas
remove_a(boolean :optional:): When True, removes intermediate 'a'.This is better for computational use
dirghas(boolean :optional:) When True (default=False) adds dirgha vowels to the returned varnas
Returns:
(SanskritImmutableString): List of varnas to the same encoding as p
"""
# SLP1 encoded pratyahara string
ps = p.canonical()
# it - halantyam
pit = ps[-1]
# Non it - all except it
pnit = ps[:-1]
# Non it position
pnpos = self.MSS.find(pnit)
# It position - space added to match it marker in internal
# representation
if longp: # Find last occurence of it
pitpos = self.MSS.rfind(pit + ' ', pnpos)
else: # Find first occurence of it
pitpos = self.MSS.find(pit + ' ', pnpos)
# Substring. This includes intermediate its and spaces
ts = self.MSS[pnpos:pitpos]
# Replace its and spaces
ts = re.sub('. ', '', ts)
# Remove अकारः मुखसुखार्थः
if remove_a:
ts = ts[0] + ts[1:].replace('a', '')
# Add dIrgha vowels if requested
if dirghas:
ts = ts.replace('a', 'aA').replace('i', 'iI').replace('u', 'uU').replace('f', 'fF').replace('x', 'xX')
return sanskrit_base.SanskritImmutableString(ts, sanskrit_base.SLP1)
def isInPratyahara(self, p, v, longp=True):
"""
Checks whether a given varna is in a pratyahara
Args:
p(SanskritImmutableString): Pratyahara
v(SanskritImmutableString): Varna
longp(boolean :optional:): When True (default), uses long pratyaharas
Returns
boolean: Is v in p?
"""
vs = v.canonical()
# १ . १ . ६९ अणुदित् सवर्णस्य चाप्रत्ययः
# So, we change long and pluta vowels to short ones in the input string
# Replace long vowels with short ones (note SLP1 encoding)
vs = re.sub('[AIUFX]+', lambda m: m.group(0).lower(), vs)
# Remove pluta
vs = vs.replace('3', '')
# Convert Pratyahara into String
# the 'a' varna needs special treatment - we remove the
# अकारः मुखसुखार्थः before searching!
pos = self.getPratyahara(p, longp, remove_a=vs[0] == 'a').canonical()
# Check if varna String is in Pratyahara String
return (pos.find(vs) != -1)
def isSavarna(self, v, a):
"""
Checks whether a given varna "a" is savarna to another "v"
Args:
v(SanskritImmutableString): Varna Indicator
a(SanskritImmutableString): Varna
v can be a svara (in which case we return True irrespective of length
of a)
v can be an udit, in which we return True for anything in the group
v can be tapara in which we return true only for the right length
Returns
boolean: Is v savarna to p?
"""
ac = a.canonical()
vc = v.canonical()
# Single
if len(vc) == 1:
# १ . १ . ६९ अणुदित् सवर्णस्य चाप्रत्ययः
# So, we change long and pluta vowels to short ones in the input string
# Replace long vowels with short ones (note SLP1 encoding)
ac = re.sub('[AIUFX]+', lambda m: m.group(0).lower(), ac)
# Remove pluta
ac = ac.replace('3', '')
vc = re.sub('[AIUFX]+', lambda m: m.group(0).lower(), vc)
# Remove pluta
vc = vc.replace('3', '')
return ac == vc
elif vc[-1] == "t":
# taparastatkAlasya
return ac == vc[:-1]
# FIXME implment tkArsya para interpretation
elif vc[-1] == "u":
# १ . १ . ६९ अणुदित् सवर्णस्य चाप्रत्ययः
if vc[0] == "k":
vc = "kKgGN"
elif vc[0] == "c":
vc = "cCjJY"
elif vc[0] == "w":
vc = "wWqQR"
elif vc[0] == "t":
vc = "tTdDn"
elif vc[0] == "p":
vc = "pPbBm"
return ac in vc
else:
return ac in vc
if __name__ == "__main__":
import argparse
def getArgs():
"""
Argparse routine.
Returns args variable
"""
# Parser Setup
parser = argparse.ArgumentParser(description='SanskritImmutableString')
# Pratyahara - print out the list of varnas in this
parser.add_argument('--pratyahara', type=str, default="ik")
# Varna. Optional. Check if this varna is in pratyahara above
parser.add_argument('--varna', type=str, default=None)
# Encoding Optional
parser.add_argument('--encoding', type=str, default=None)
# Short pratyaharas
parser.add_argument('--short', action='store_true')
# Remove intermediate as
parser.add_argument('--remove-a', action='store_true')
# Include dIrghas when returning the pratyAhAra
parser.add_argument('--dirghas', action='store_true', default=False)
parser.add_argument('--output-slp1', action='store_true')
return parser.parse_args()
def main():
args = getArgs()
m = MaheshvaraSutras()
print(m)
if args.encoding is not None:
e = sanskrit_base.SCHEMES[args.encoding]
else:
e = None
p = sanskrit_base.SanskritImmutableString(args.pratyahara, e)
longp = not args.short
if args.output_slp1 is False:
print(six.text_type(p.devanagari()))
print(six.text_type(m.getPratyahara(p, longp, args.remove_a, args.dirghas).devanagari()))
else:
print(six.text_type(p.canonical()))
print(six.text_type(m.getPratyahara(p, longp, args.remove_a, args.dirghas).canonical()))
if args.varna is not None:
v = sanskrit_base.SanskritImmutableString(args.varna, e)
print(u"Is {} in {}?".format(v.devanagari(),
p.devanagari()))
print(m.isInPratyahara(p, v, longp))
main()
| mit | -1,527,397,545,974,053,400 | 33.369748 | 116 | 0.566259 | false |
to266/hyperspy | hyperspy/_components/expression.py | 1 | 4851 | from functools import wraps
from hyperspy.component import Component
_CLASS_DOC = \
"""%s component (created with Expression).
.. math::
f(x) = %s
"""
def _fill_function_args(fn):
@wraps(fn)
def fn_wrapped(self, x):
return fn(x, *[p.value for p in self.parameters])
return fn_wrapped
class Expression(Component):
def __init__(self, expression, name, position=None, module="numpy",
autodoc=True, **kwargs):
"""Create a component from a string expression.
It automatically generates the partial derivatives and the
class docstring.
Parameters
----------
expression: str
Component function in SymPy text expression format. See the SymPy
documentation for details. The only additional constraint is that
the variable must be `x`. Also, in `module` is "numexpr" the
functions are limited to those that numexpr support. See its
documentation for details.
name : str
Name of the component.
position: str, optional
The parameter name that defines the position of the component if
applicable. It enables adjusting the position of the component
interactively in a model.
module: {"numpy", "numexpr"}, default "numpy"
Module used to evaluate the function. numexpr is often faster but
it supports less functions.
**kwargs
Keyword arguments can be used to initialise the value of the
parameters.
Methods
-------
recompile: useful to recompile the function and gradient with a
a different module.
Examples
--------
The following creates a Gaussian component and set the initial value
of the parameters:
>>> hs.model.components.Expression(
... expression="height * exp(-(x - x0) ** 2 * 4 * log(2)/ fwhm ** 2)",
... name="Gaussian",
... height=1,
... fwhm=1,
... x0=0,
... position="x0",)
"""
import sympy
self._str_expression = expression
self.compile_function(module=module)
# Initialise component
Component.__init__(self, self._parameter_strings)
self._whitelist['expression'] = ('init', expression)
self._whitelist['name'] = ('init', name)
self._whitelist['position'] = ('init', position)
self._whitelist['module'] = ('init', module)
self.name = name
# Set the position parameter
if position:
self._position = getattr(self, position)
# Set the initial value of the parameters
if kwargs:
for kwarg, value in kwargs.items():
setattr(getattr(self, kwarg), 'value', value)
if autodoc:
self.__doc__ = _CLASS_DOC % (
name, sympy.latex(sympy.sympify(expression)))
def function(self, x):
return self._f(x, *[p.value for p in self.parameters])
def compile_function(self, module="numpy"):
import sympy
from sympy.utilities.lambdify import lambdify
expr = sympy.sympify(self._str_expression)
rvars = sympy.symbols([s.name for s in expr.free_symbols], real=True)
real_expr = expr.subs(
{orig: real_ for (orig, real_) in zip(expr.free_symbols, rvars)})
# just replace with the assumption that all our variables are real
expr = real_expr
eval_expr = expr.evalf()
# Extract parameters
parameters = [
symbol for symbol in expr.free_symbols if symbol.name != "x"]
parameters.sort(key=lambda x: x.name) # to have a reliable order
# Extract x
x, = [symbol for symbol in expr.free_symbols if symbol.name == "x"]
# Create compiled function
self._f = lambdify([x] + parameters, eval_expr,
modules=module, dummify=False)
parnames = [symbol.name for symbol in parameters]
self._parameter_strings = parnames
for parameter in parameters:
grad_expr = sympy.diff(eval_expr, parameter)
setattr(self,
"_f_grad_%s" % parameter.name,
lambdify([x] + parameters,
grad_expr.evalf(),
modules=module,
dummify=False)
)
setattr(self,
"grad_%s" % parameter.name,
_fill_function_args(
getattr(
self,
"_f_grad_%s" %
parameter.name)).__get__(
self,
Expression)
)
| gpl-3.0 | 4,370,389,324,684,439,000 | 33.404255 | 78 | 0.545867 | false |
tanonev/codewebs | src/unitTestServer/CodewebsUnitTestDaemon.py | 1 | 2384 | #!/usr/bin/env python
import os.path
import sys
import logging
configFile = os.path.join(os.path.dirname(os.path.realpath(__file__)),'localconfig')
config = {}
with open(configFile) as fid:
rows = fid.readlines()
for r in rows:
row = r.strip().split()
config[row[0]] = row[1]
HWID = int(config['HW'])
PARTID = int(config['PART'])
LOCALPATH = config['PATH']
INSTALLPATH = config['INSTALLPATH']
HOST = config['HOST']
sys.path.append(LOCALPATH)
from src.octaveUnitTesting.UnitTester import UnitTester
import pika
class CodewebsUnitTestDaemon(object):
def __init__(self):
logdir = os.path.join(INSTALLPATH,'log', 'UnitTestServer')
logfilename = os.path.join(logdir,'log_' + str(HWID) + '_' + str(PARTID) + '.log')
logging.basicConfig(filename=logfilename, format='%(asctime)s %(message)s', \
datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG)
logging.debug('Setting up connection.')
self.connection = pika.BlockingConnection(pika.ConnectionParameters(
host=HOST))
self.channel = self.connection.channel()
self.channel.queue_declare(queue='codewebs_unittest_queue')
self.tester = UnitTester(HWID, PARTID)
self.tester.refreshWorkingDir()
logging.debug('Ready to rumble!')
def onRequest(self, ch, method, props, body):
logging.debug(' [.] Request received, running unit tests.')
self.tester.loadCode(body)
output, correct = self.tester.run()
ch.basic_publish(exchange='',
routing_key=props.reply_to,
properties=pika.BasicProperties(correlation_id = \
props.correlation_id),
body=str(correct))
if correct == True:
logging.debug('\t\t... Result: passed!')
else:
logging.debug('\t\t... Result: failed!')
ch.basic_ack(delivery_tag = method.delivery_tag)
self.tester.refreshWorkingDir()
def run(self):
self.channel.basic_qos(prefetch_count=1)
self.channel.basic_consume(self.onRequest, queue='codewebs_unittest_queue')
logging.debug(' [x] Awaiting RPC requests')
self.channel.start_consuming()
if __name__ == '__main__':
d = CodewebsUnitTestDaemon()
d.run()
| mit | -4,013,881,637,374,198,300 | 32.577465 | 90 | 0.60193 | false |
rwl/PyCIM | PyCIM/PrettyPrintXML.py | 1 | 1717 | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from xml.etree.cElementTree import parse, tostring
def xmlpp(source):
root = parse(source).getroot()
indent(root)
return tostring(root)
def indent(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i | mit | 6,270,850,627,351,509,000 | 40.902439 | 78 | 0.699476 | false |
Enteee/pdml2flow | pdml2flow/flow.py | 1 | 3496 | # vim: set fenc=utf8 ts=4 sw=4 et :
import json
import dict2xml
from .autovivification import AutoVivification
from .conf import Conf
from .utils import call_plugin
from .logging import *
class Flow():
# The overall frame time
newest_overall_frame_time = 0
@staticmethod
def get_flow_id(frame):
flowid = [frame[d] for d in Conf.FLOW_DEF]
valid = any([type(i) is not AutoVivification for i in flowid])
# check if flowid is empty
if not valid:
return None
return str(flowid)
def __init__(self, first_frame):
first_frame_time = first_frame[Conf.FRAME_TIME]
self.__newest_frame_time = self.__first_frame_time = first_frame_time
self.__id = self.get_flow_id(first_frame)
if Conf.FRAMES_ARRAY:
self.__frames = []
else:
self.__frames = AutoVivification()
self.__framecount = 0
for plugin in Conf.PLUGINS:
call_plugin(
plugin,
'flow_new',
self,
first_frame.cast_dicts(dict)
)
self.add_frame(first_frame)
def __hash__(self):
return hash(self.__id)
def __eq__(self, other):
return self.__id == other.__id
@property
def id(self):
return self.__id
@property
def frames(self):
# clean the frame data
if Conf.FRAMES_ARRAY:
self.__frames = [
f.clean_empty()
for f in self.__frames
]
ret = [
f.cast_dicts(dict)
for f in self.__frames
]
else:
self.__frames = self.__frames.clean_empty()
ret = self.__frames.cast_dicts(dict)
return ret
@property
def first_frame_time(self):
return self.__first_frame_time
@property
def newest_frame_time(self):
return self.__newest_frame_time
@property
def framecount(self):
return self.__framecount
def add_frame(self, frame):
# check if frame expands flow length
frame_time = frame[Conf.FRAME_TIME]
self.__first_frame_time = min(self.__first_frame_time, frame_time)
self.__newest_frame_time = max(self.__newest_frame_time, frame_time)
self.__framecount += 1
# Extract data
if Conf.FRAMES_ARRAY:
self.__frames.append(
frame.clean_empty()
)
else:
self.__frames.merge(
frame.clean_empty()
)
if Conf.COMPRESS_DATA:
self.__frames = self.__frames.compress()
debug(
'flow duration: {}'.format(
self.__newest_frame_time - self.__first_frame_time
)
)
for plugin in Conf.PLUGINS:
call_plugin(
plugin,
'frame_new',
frame.cast_dicts(dict),
self
)
def not_expired(self):
return self.__newest_frame_time > (Flow.newest_overall_frame_time - Conf.FLOW_BUFFER_TIME)
def expired(self):
for plugin in Conf.PLUGINS:
call_plugin(
plugin,
'flow_expired',
self
)
self.end()
def end(self):
for plugin in Conf.PLUGINS:
call_plugin(
plugin,
'flow_end',
self
)
| apache-2.0 | -4,154,306,614,671,607,300 | 24.705882 | 98 | 0.507151 | false |
jyejare/robottelo | robottelo/cli/base.py | 1 | 16328 | """Generic base class for cli hammer commands."""
import logging
import re
from wait_for import wait_for
from robottelo import ssh
from robottelo.cli import hammer
from robottelo.config import settings
class CLIError(Exception):
"""Indicates that a CLI command could not be run."""
class CLIBaseError(Exception):
"""Indicates that a CLI command has finished with return code different
from zero.
:param return_code: CLI command return code
:param stderr: contents of the ``stderr``
:param msg: explanation of the error
"""
def __init__(self, return_code, stderr, msg):
self.return_code = return_code
self.stderr = stderr
self.msg = msg
super(CLIBaseError, self).__init__(msg)
self.message = msg
def __str__(self):
"""Include class name, return_code, stderr and msg to string repr so
assertRaisesRegexp can be used to assert error present on any
attribute
"""
return repr(self)
def __repr__(self):
"""Include class name return_code, stderr and msg to improve logging
"""
return '{}(return_code={!r}, stderr={!r}, msg={!r}'.format(
type(self).__name__, self.return_code, self.stderr, self.msg
)
class CLIReturnCodeError(CLIBaseError):
"""Error to be raised when an error occurs due to some validation error
when execution hammer cli.
See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details
"""
class CLIDataBaseError(CLIBaseError):
"""Error to be raised when an error occurs due to some missing parameter
which cause a data base error on hammer
See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details
"""
class Base(object):
"""
@param command_base: base command of hammer.
Output of recent `hammer --help`::
activation-key Manipulate activation keys.
architecture Manipulate architectures.
auth Foreman connection login/logout.
auth-source Manipulate auth sources.
bootdisk Download boot disks
capsule Manipulate capsule
compute-resource Manipulate compute resources.
content-host Manipulate content hosts on the server
content-report View Content Reports
content-view Manipulate content views.
defaults Defaults management
docker Manipulate docker content
domain Manipulate domains.
environment Manipulate environments.
erratum Manipulate errata
fact Search facts.
filter Manage permission filters.
global-parameter Manipulate global parameters.
gpg Manipulate GPG Key actions on the server
host Manipulate hosts.
host-collection Manipulate host collections
hostgroup Manipulate hostgroups.
import Import data exported from a Red Hat Sat..
lifecycle-environment Manipulate lifecycle_environments
location Manipulate locations.
medium Manipulate installation media.
model Manipulate hardware models.
organization Manipulate organizations
os Manipulate operating system.
ostree-branch Manipulate ostree branches
package Manipulate packages.
package-group Manipulate package groups
partition-table Manipulate partition tables.
ping Get the status of the server
product Manipulate products.
proxy Manipulate smart proxies.
puppet-class Search puppet modules.
puppet-module View Puppet Module details.
report Browse and read reports.
repository Manipulate repositories
repository-set Manipulate repository sets on the server
role Manage user roles.
sc-param Manipulate smart class parameters.
settings Change server settings.
shell Interactive shell
subnet Manipulate subnets.
subscription Manipulate subscriptions.
sync-plan Manipulate sync plans
task Tasks related actions.
template Manipulate provisioning templates.
user Manipulate users.
user-group Manage user groups.
@since: 27.Nov.2013
"""
command_base = None # each inherited instance should define this
command_sub = None # specific to instance, like: create, update, etc
command_requires_org = False # True when command requires organization-id
logger = logging.getLogger('robottelo')
_db_error_regex = re.compile(r'.*INSERT INTO|.*SELECT .*FROM|.*violates foreign key')
@classmethod
def _handle_response(cls, response, ignore_stderr=None):
"""Verify ``return_code`` of the CLI command.
Check for a non-zero return code or any stderr contents.
:param response: a ``SSHCommandResult`` object, returned by
:mod:`robottelo.ssh.command`.
:param ignore_stderr: indicates whether to throw a warning in logs if
``stderr`` is not empty.
:returns: contents of ``stdout``.
:raises robottelo.cli.base.CLIReturnCodeError: If return code is
different from zero.
"""
if response.return_code != 0:
full_msg = (
'Command "{0} {1}" finished with return_code {2}\n'
'stderr contains following message:\n{3}'.format(
cls.command_base, cls.command_sub, response.return_code, response.stderr
)
)
error_data = (response.return_code, response.stderr, full_msg)
if cls._db_error_regex.search(full_msg):
raise CLIDataBaseError(*error_data)
raise CLIReturnCodeError(*error_data)
if len(response.stderr) != 0 and not ignore_stderr:
cls.logger.warning('stderr contains following message:\n{0}'.format(response.stderr))
return response.stdout
@classmethod
def add_operating_system(cls, options=None):
"""
Adds OS to record.
"""
cls.command_sub = 'add-operatingsystem'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def create(cls, options=None, timeout=None):
"""
Creates a new record using the arguments passed via dictionary.
"""
cls.command_sub = 'create'
if options is None:
options = {}
result = cls.execute(cls._construct_command(options), output_format='csv', timeout=timeout)
# Extract new object ID if it was successfully created
if len(result) > 0 and 'id' in result[0]:
obj_id = result[0]['id']
# Fetch new object
# Some Katello obj require the organization-id for subcommands
info_options = {'id': obj_id}
if cls.command_requires_org:
if 'organization-id' not in options:
tmpl = 'organization-id option is required for {0}.create'
raise CLIError(tmpl.format(cls.__name__))
info_options['organization-id'] = options['organization-id']
# organization creation can take some time
if cls.command_base == 'organization':
new_obj, _ = wait_for(
lambda: cls.info(info_options),
timeout=300,
delay=5,
silent_failure=True,
handle_exception=True,
)
else:
new_obj = cls.info(info_options)
# stdout should be a dictionary containing the object
if len(new_obj) > 0:
result = new_obj
return result
@classmethod
def delete(cls, options=None, timeout=None):
"""Deletes existing record."""
cls.command_sub = 'delete'
return cls.execute(cls._construct_command(options), ignore_stderr=True, timeout=timeout)
@classmethod
def delete_parameter(cls, options=None):
"""
Deletes parameter from record.
"""
cls.command_sub = 'delete-parameter'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def dump(cls, options=None):
"""
Displays the content for existing partition table.
"""
cls.command_sub = 'dump'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def _get_username_password(cls, username=None, password=None):
"""Lookup for the username and password for cli command in following
order:
1. ``user`` or ``password`` parameters
2. ``foreman_admin_username`` or ``foreman_admin_password`` attributes
3. foreman.admin.username or foreman.admin.password configuration
:return: A tuple with the username and password found
:rtype: tuple
"""
if username is None:
try:
username = getattr(cls, 'foreman_admin_username')
except AttributeError:
username = settings.server.admin_username
if password is None:
try:
password = getattr(cls, 'foreman_admin_password')
except AttributeError:
password = settings.server.admin_password
return (username, password)
@classmethod
def execute(
cls,
command,
user=None,
password=None,
output_format=None,
timeout=None,
ignore_stderr=None,
return_raw_response=None,
connection_timeout=None,
):
"""Executes the cli ``command`` on the server via ssh"""
user, password = cls._get_username_password(user, password)
time_hammer = False
if settings.performance:
time_hammer = settings.performance.time_hammer
# add time to measure hammer performance
cmd = 'LANG={0} {1} hammer -v {2} {3} {4} {5}'.format(
settings.locale,
'time -p' if time_hammer else '',
'-u {0}'.format(user) if user is not None else '--interactive no',
'-p {0}'.format(password) if password is not None else '',
'--output={0}'.format(output_format) if output_format else '',
command,
)
response = ssh.command(
cmd.encode('utf-8'),
output_format=output_format,
timeout=timeout,
connection_timeout=connection_timeout,
)
if return_raw_response:
return response
else:
return cls._handle_response(response, ignore_stderr=ignore_stderr)
@classmethod
def exists(cls, options=None, search=None):
"""Search for an entity using the query ``search[0]="search[1]"``
Will be used the ``list`` command with the ``--search`` option to do
the search.
If ``options`` argument already have a search key, then the ``search``
argument will not be evaluated. Which allows different search query.
"""
if options is None:
options = {}
if search is not None and 'search' not in options:
options.update({'search': '{0}=\\"{1}\\"'.format(search[0], search[1])})
result = cls.list(options)
if result:
result = result[0]
return result
@classmethod
def info(cls, options=None, output_format=None, return_raw_response=None):
"""Reads the entity information."""
cls.command_sub = 'info'
if options is None:
options = {}
if cls.command_requires_org and 'organization-id' not in options:
raise CLIError('organization-id option is required for {0}.info'.format(cls.__name__))
result = cls.execute(
command=cls._construct_command(options),
output_format=output_format,
return_raw_response=return_raw_response,
)
if not return_raw_response and output_format != 'json':
result = hammer.parse_info(result)
return result
@classmethod
def list(cls, options=None, per_page=True, output_format='csv'):
"""
List information.
@param options: ID (sometimes name works as well) to retrieve info.
"""
cls.command_sub = 'list'
if options is None:
options = {}
if 'per-page' not in options and per_page:
options['per-page'] = 10000
if cls.command_requires_org and 'organization-id' not in options:
raise CLIError('organization-id option is required for {0}.list'.format(cls.__name__))
result = cls.execute(cls._construct_command(options), output_format=output_format)
return result
@classmethod
def puppetclasses(cls, options=None):
"""
Lists all puppet classes.
"""
cls.command_sub = 'puppet-classes'
result = cls.execute(cls._construct_command(options), output_format='csv')
return result
@classmethod
def remove_operating_system(cls, options=None):
"""
Removes OS from record.
"""
cls.command_sub = 'remove-operatingsystem'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def sc_params(cls, options=None):
"""
Lists all smart class parameters.
"""
cls.command_sub = 'sc-params'
result = cls.execute(cls._construct_command(options), output_format='csv')
return result
@classmethod
def set_parameter(cls, options=None):
"""
Creates or updates parameter for a record.
"""
cls.command_sub = 'set-parameter'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def update(cls, options=None, return_raw_response=None):
"""
Updates existing record.
"""
cls.command_sub = 'update'
result = cls.execute(
cls._construct_command(options),
output_format='csv',
return_raw_response=return_raw_response,
)
return result
@classmethod
def with_user(cls, username=None, password=None):
"""Context Manager for credentials"""
class Wrapper(cls):
"""Wrapper class which defines the foreman admin username and
password to be used when executing any cli command.
"""
foreman_admin_username = username
foreman_admin_password = password
return Wrapper
@classmethod
def _construct_command(cls, options=None):
"""Build a hammer cli command based on the options passed"""
tail = ''
if options is None:
options = {}
for key, val in options.items():
if val is None:
continue
if val is True:
tail += ' --{0}'.format(key)
elif val is not False:
if isinstance(val, list):
val = ','.join(str(el) for el in val)
tail += ' --{0}="{1}"'.format(key, val)
cmd = f"{cls.command_base} {cls.command_sub or ''} {tail.strip()}"
return cmd
| gpl-3.0 | -8,573,411,306,033,115,000 | 33.447257 | 99 | 0.563694 | false |
sanjuro/RCJK | vendor/django/core/cache/backends/db.py | 2 | 4332 | "Database cache backend."
from django.core.cache.backends.base import BaseCache
from django.db import connection, transaction, DatabaseError
import base64, time
from datetime import datetime
try:
import cPickle as pickle
except ImportError:
import pickle
class CacheClass(BaseCache):
def __init__(self, table, params):
BaseCache.__init__(self, params)
self._table = connection.ops.quote_name(table)
max_entries = params.get('max_entries', 300)
try:
self._max_entries = int(max_entries)
except (ValueError, TypeError):
self._max_entries = 300
cull_frequency = params.get('cull_frequency', 3)
try:
self._cull_frequency = int(cull_frequency)
except (ValueError, TypeError):
self._cull_frequency = 3
def get(self, key, default=None):
cursor = connection.cursor()
cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % self._table, [key])
row = cursor.fetchone()
if row is None:
return default
now = datetime.now()
if row[2] < now:
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
transaction.commit_unless_managed()
return default
value = connection.ops.process_clob(row[1])
return pickle.loads(base64.decodestring(value))
def set(self, key, value, timeout=None):
self._base_set('set', key, value, timeout)
def add(self, key, value, timeout=None):
return self._base_set('add', key, value, timeout)
def _base_set(self, mode, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
cursor = connection.cursor()
cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
num = cursor.fetchone()[0]
now = datetime.now().replace(microsecond=0)
exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0)
if num > self._max_entries:
self._cull(cursor, now)
encoded = base64.encodestring(pickle.dumps(value, 2)).strip()
cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % self._table, [key])
try:
result = cursor.fetchone()
if result and (mode == 'set' or
(mode == 'add' and result[1] < now)):
cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % self._table,
[encoded, connection.ops.value_to_db_datetime(exp), key])
else:
cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % self._table,
[key, encoded, connection.ops.value_to_db_datetime(exp)])
except DatabaseError:
# To be threadsafe, updates/inserts are allowed to fail silently
transaction.rollback_unless_managed()
return False
else:
transaction.commit_unless_managed()
return True
def delete(self, key):
cursor = connection.cursor()
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
transaction.commit_unless_managed()
def has_key(self, key):
now = datetime.now().replace(microsecond=0)
cursor = connection.cursor()
cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % self._table,
[key, connection.ops.value_to_db_datetime(now)])
return cursor.fetchone() is not None
def _cull(self, cursor, now):
if self._cull_frequency == 0:
cursor.execute("DELETE FROM %s" % self._table)
else:
cursor.execute("DELETE FROM %s WHERE expires < %%s" % self._table,
[connection.ops.value_to_db_datetime(now)])
cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
num = cursor.fetchone()[0]
if num > self._max_entries:
cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % self._table, [num / self._cull_frequency])
cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % self._table, [cursor.fetchone()[0]])
| apache-2.0 | -6,734,824,525,095,844,000 | 43.204082 | 140 | 0.584949 | false |
guori12321/todo | todo/generator.py | 1 | 1176 | # coding=utf8
"""
Generator from todo to todo format string.
from todo.generator import generator
generator.generate(todo) # return str
"""
from models import Task
from models import Todo
class TodoGenerator(object):
"""
Generator from python list to string.
"""
g_newline = "\n"
def g_id(self, v):
return str(v) + "."
def g_done(self, v):
if v is True:
return '(x)'
else:
return ' '
def g_task(self, v):
return v
def gen_task(self, task):
lst = []
lst.append(self.g_id(task.id))
lst.append(self.g_done(task.done))
lst.append(self.g_task(task.content))
return " ".join(lst)
def generate(self, todo):
"""
Generate todo to string format.
e.g.
[<task object>, ..] => "1. (x) do something .."
"""
re = []
for i in todo:
if isinstance(i, Task):
re.append(self.gen_task(i))
else:
raise SyntaxError('Not support type: ' + type(i))
return self.g_newline.join(re)
generator = TodoGenerator() # build generator
| mit | 8,895,891,163,284,157,000 | 20.381818 | 65 | 0.527211 | false |
Gnewt/bhs_sales | shirts/models.py | 1 | 1486 | from django.db import models
from json_field import JSONField
SMALL = 'S'
MEDIUM = 'M'
LARGE = 'L'
XLARGE = 'XL'
ITEM_SIZE_CHOICES = (
(SMALL, 'Small (S)'),
(MEDIUM, 'Medium (M)'),
(LARGE, 'Large (L)'),
(XLARGE, 'Extra Large (XL)'),
)
class StoreItem(models.Model):
name = models.CharField(max_length=128)
image = models.URLField()
description = models.TextField()
price = models.DecimalField(max_digits=5, decimal_places=2)
def __unicode__(self):
return self.name
class Order(models.Model):
item = models.ForeignKey("shirts.StoreItem")
first_name = models.CharField(max_length=128)
last_name = models.CharField(max_length=128)
size = models.CharField(max_length=2,
choices=ITEM_SIZE_CHOICES)
timestamp = models.DateTimeField(auto_now_add=True)
purchase_price = models.DecimalField(max_digits=5, decimal_places=2)
stripe_charge_id = models.CharField(max_length=64, blank=True, null=True)
notes = models.TextField(blank=True)
STRIPE = 'ST'
OTHER = 'OT'
PAYMENT_METHOD_CHOICES = (
(STRIPE, 'Stripe'),
(OTHER, 'Other'),
)
payment_method = models.CharField(max_length=2,
choices=PAYMENT_METHOD_CHOICES,
default=STRIPE)
def __unicode__(self):
return "%s %s: %s (%s)" % (self.first_name, self.last_name, self.item.name, self.size)
| mit | -7,521,620,094,661,558,000 | 29.326531 | 94 | 0.597577 | false |
elgambitero/FreeCAD_sf_master | src/Mod/Arch/importIFC.py | 1 | 62430 | #***************************************************************************
#* *
#* Copyright (c) 2014 *
#* Yorik van Havre <[email protected]> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
__title__ = "FreeCAD IFC importer - Enhanced ifcopenshell-only version"
__author__ = "Yorik van Havre"
__url__ = "http://www.freecadweb.org"
import os,time,tempfile,uuid,FreeCAD,Part,Draft,Arch,math,DraftVecUtils
if open.__module__ == '__builtin__':
pyopen = open # because we'll redefine open below
# which IFC type must create which FreeCAD type
typesmap = { "Site": ["IfcSite"],
"Building": ["IfcBuilding"],
"Floor": ["IfcBuildingStorey"],
"Structure": ["IfcBeam", "IfcBeamStandardCase", "IfcColumn", "IfcColumnStandardCase", "IfcSlab", "IfcFooting", "IfcPile", "IfcTendon"],
"Wall": ["IfcWall", "IfcWallStandardCase", "IfcCurtainWall"],
"Window": ["IfcWindow", "IfcWindowStandardCase", "IfcDoor", "IfcDoorStandardCase"],
"Roof": ["IfcRoof"],
"Stairs": ["IfcStair", "IfcStairFlight", "IfcRamp", "IfcRampFlight"],
"Space": ["IfcSpace"],
"Rebar": ["IfcReinforcingBar"],
"Equipment": ["IfcFurnishingElement","IfcSanitaryTerminal","IfcFlowTerminal","IfcElectricAppliance"]
}
# which IFC entity (product) is a structural object
structuralifcobjects = (
"IfcStructuralCurveMember", "IfcStructuralSurfaceMember",
"IfcStructuralPointConnection", "IfcStructuralCurveConnection", "IfcStructuralSurfaceConnection",
"IfcStructuralAction", "IfcStructuralPointAction",
"IfcStructuralLinearAction", "IfcStructuralLinearActionVarying", "IfcStructuralPlanarAction"
)
# specific name translations
translationtable = { "Foundation":"Footing",
"Floor":"BuildingStorey",
"Rebar":"ReinforcingBar",
"HydroEquipment":"SanitaryTerminal",
"ElectricEquipment":"ElectricAppliance",
"Furniture":"FurnishingElement",
"Stair Flight":"StairFlight",
"Curtain Wall":"CurtainWall"
}
ifctemplate = """ISO-10303-21;
HEADER;
FILE_DESCRIPTION(('ViewDefinition [CoordinationView]'),'2;1');
FILE_NAME('$filename','$timestamp',('$owner','$email'),('$company'),'IfcOpenShell','IfcOpenShell','');
FILE_SCHEMA(('IFC2X3'));
ENDSEC;
DATA;
#1=IFCPERSON($,$,'$owner',$,$,$,$,$);
#2=IFCORGANIZATION($,'$company',$,$,$);
#3=IFCPERSONANDORGANIZATION(#1,#2,$);
#4=IFCAPPLICATION(#2,'$version','FreeCAD','118df2cf_ed21_438e_a41');
#5=IFCOWNERHISTORY(#3,#4,$,.ADDED.,$,#3,#4,$now);
#6=IFCDIRECTION((1.,0.,0.));
#7=IFCDIRECTION((0.,0.,1.));
#8=IFCCARTESIANPOINT((0.,0.,0.));
#9=IFCAXIS2PLACEMENT3D(#8,#7,#6);
#10=IFCDIRECTION((0.,1.,0.));
#11=IFCGEOMETRICREPRESENTATIONCONTEXT('Plan','Model',3,1.E-05,#9,#10);
#12=IFCDIMENSIONALEXPONENTS(0,0,0,0,0,0,0);
#13=IFCSIUNIT(*,.LENGTHUNIT.,$,.METRE.);
#14=IFCSIUNIT(*,.AREAUNIT.,$,.SQUARE_METRE.);
#15=IFCSIUNIT(*,.VOLUMEUNIT.,$,.CUBIC_METRE.);
#16=IFCSIUNIT(*,.PLANEANGLEUNIT.,$,.RADIAN.);
#17=IFCMEASUREWITHUNIT(IFCPLANEANGLEMEASURE(0.017453292519943295),#16);
#18=IFCCONVERSIONBASEDUNIT(#12,.PLANEANGLEUNIT.,'DEGREE',#17);
#19=IFCUNITASSIGNMENT((#13,#14,#15,#18));
#20=IFCPROJECT('$projectid',#5,'$project',$,$,$,$,(#11),#19);
ENDSEC;
END-ISO-10303-21;
"""
def decode(filename,utf=False):
if isinstance(filename,unicode):
# workaround since ifcopenshell currently can't handle unicode filenames
if utf:
encoding = "utf8"
else:
import sys
encoding = sys.getfilesystemencoding()
filename = filename.encode(encoding)
return filename
def doubleClickTree(item,column):
txt = item.text(column)
if "Entity #" in txt:
eid = txt.split("#")[1].split(":")[0]
addr = tree.findItems(eid,0,0)
if addr:
tree.scrollToItem(addr[0])
addr[0].setSelected(True)
def explore(filename=None):
"""explore([filename]): opens a dialog showing
the contents of an IFC file. If no filename is given, a dialog will
pop up to choose a file."""
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
DEBUG = p.GetBool("ifcDebug",False)
try:
import ifcopenshell
except:
FreeCAD.Console.PrintError("IfcOpenShell was not found on this system. IFC support is disabled\n")
return
if not filename:
from PySide import QtGui
filename = QtGui.QFileDialog.getOpenFileName(QtGui.qApp.activeWindow(),'IFC files','*.ifc')
if filename:
filename = filename[0]
from PySide import QtCore,QtGui
filename = decode(filename,utf=True)
if not os.path.exists(filename):
print "File not found"
return
ifc = ifcopenshell.open(filename)
global tree
tree = QtGui.QTreeWidget()
tree.setColumnCount(3)
tree.setWordWrap(True)
tree.header().setDefaultSectionSize(60)
tree.header().resizeSection(0,60)
tree.header().resizeSection(1,30)
tree.header().setStretchLastSection(True)
tree.headerItem().setText(0, "ID")
tree.headerItem().setText(1, "")
tree.headerItem().setText(2, "Item and Properties")
bold = QtGui.QFont()
bold.setWeight(75)
bold.setBold(True)
entities = ifc.by_type("IfcRoot")
entities += ifc.by_type("IfcRepresentation")
entities += ifc.by_type("IfcRepresentationItem")
entities += ifc.by_type("IfcPlacement")
entities += ifc.by_type("IfcProperty")
entities += ifc.by_type("IfcPhysicalSimpleQuantity")
entities += ifc.by_type("IfcMaterial")
entities += ifc.by_type("IfcProductRepresentation")
entities = sorted(entities, key=lambda eid: eid.id())
done = []
for entity in entities:
if hasattr(entity,"id"):
if entity.id() in done:
continue
done.append(entity.id())
item = QtGui.QTreeWidgetItem(tree)
item.setText(0,str(entity.id()))
if entity.is_a() in ["IfcWall","IfcWallStandardCase"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Wall_Tree.svg"))
elif entity.is_a() in ["IfcBuildingElementProxy"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Component.svg"))
elif entity.is_a() in ["IfcColumn","IfcColumnStandardCase","IfcBeam","IfcBeamStandardCase","IfcSlab","IfcFooting","IfcPile","IfcTendon"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Structure_Tree.svg"))
elif entity.is_a() in ["IfcSite"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Site_Tree.svg"))
elif entity.is_a() in ["IfcBuilding"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Building_Tree.svg"))
elif entity.is_a() in ["IfcBuildingStorey"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Floor_Tree.svg"))
elif entity.is_a() in ["IfcWindow","IfcWindowStandardCase","IfcDoor","IfcDoorStandardCase"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Window_Tree.svg"))
elif entity.is_a() in ["IfcRoof"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Roof_Tree.svg"))
elif entity.is_a() in ["IfcExtrudedAreaSolid","IfcClosedShell"]:
item.setIcon(1,QtGui.QIcon(":icons/Tree_Part.svg"))
elif entity.is_a() in ["IfcFace"]:
item.setIcon(1,QtGui.QIcon(":icons/Draft_SwitchMode.svg"))
elif entity.is_a() in ["IfcArbitraryClosedProfileDef","IfcPolyloop"]:
item.setIcon(1,QtGui.QIcon(":icons/Draft_Draft.svg"))
elif entity.is_a() in ["IfcPropertySingleValue","IfcQuantityArea","IfcQuantityVolume"]:
item.setIcon(1,QtGui.QIcon(":icons/Tree_Annotation.svg"))
elif entity.is_a() in ["IfcMaterial"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Material.svg"))
item.setText(2,str(entity.is_a()))
item.setFont(2,bold);
i = 0
while True:
try:
argname = entity.attribute_name(i)
except:
break
else:
try:
argvalue = getattr(entity,argname)
except:
print "Error in entity ",entity
break
else:
if not argname in ["Id", "GlobalId"]:
colored = False
if isinstance(argvalue,ifcopenshell.entity_instance):
if argvalue.id() == 0:
t = str(argvalue)
else:
colored = True
t = "Entity #" + str(argvalue.id()) + ": " + str(argvalue.is_a())
elif isinstance(argvalue,list):
t = ""
else:
t = str(argvalue)
t = " " + str(argname) + " : " + str(t)
item = QtGui.QTreeWidgetItem(tree)
item.setText(2,str(t))
if colored:
item.setForeground(2,QtGui.QBrush(QtGui.QColor("#005AFF")))
if isinstance(argvalue,list):
for argitem in argvalue:
colored = False
if isinstance(argitem,ifcopenshell.entity_instance):
if argitem.id() == 0:
t = str(argitem)
else:
colored = True
t = "Entity #" + str(argitem.id()) + ": " + str(argitem.is_a())
else:
t = argitem
t = " " + str(t)
item = QtGui.QTreeWidgetItem(tree)
item.setText(2,str(t))
if colored:
item.setForeground(2,QtGui.QBrush(QtGui.QColor("#005AFF")))
i += 1
d = QtGui.QDialog()
d.setObjectName("IfcExplorer")
d.setWindowTitle("Ifc Explorer")
d.resize(640, 480)
layout = QtGui.QVBoxLayout(d)
layout.addWidget(tree)
tree.itemDoubleClicked.connect(doubleClickTree)
d.exec_()
del tree
return
def open(filename,skip=[],only=[],root=None):
"opens an IFC file in a new document"
docname = os.path.splitext(os.path.basename(filename))[0]
docname = decode(docname,utf=True)
doc = FreeCAD.newDocument(docname)
doc.Label = docname
doc = insert(filename,doc.Name,skip,only,root)
return doc
def insert(filename,docname,skip=[],only=[],root=None):
"""insert(filename,docname,skip=[],only=[],root=None): imports the contents of an IFC file.
skip can contain a list of ids of objects to be skipped, only can restrict the import to
certain object ids (will also get their children) and root can be used to
import only the derivates of a certain element type (default = ifcProduct)."""
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
DEBUG = p.GetBool("ifcDebug",False)
PREFIX_NUMBERS = p.GetBool("ifcPrefixNumbers",False)
SKIP = p.GetString("ifcSkip","").split(",")
SEPARATE_OPENINGS = p.GetBool("ifcSeparateOpenings",False)
ROOT_ELEMENT = p.GetString("ifcRootElement","IfcProduct")
GET_EXTRUSIONS = p.GetBool("ifcGetExtrusions",False)
MERGE_MATERIALS = p.GetBool("ifcMergeMaterials",False)
if root:
ROOT_ELEMENT = root
MERGE_MODE_ARCH = p.GetInt("ifcImportModeArch",0)
MERGE_MODE_STRUCT = p.GetInt("ifcImportModeStruct",1)
if MERGE_MODE_ARCH > 0:
SEPARATE_OPENINGS = False
GET_EXTRUSIONS = False
if not SEPARATE_OPENINGS:
SKIP.append("IfcOpeningElement")
try:
import ifcopenshell
except:
FreeCAD.Console.PrintError("IfcOpenShell was not found on this system. IFC support is disabled\n")
return
if DEBUG: print "Opening ",filename,"...",
try:
doc = FreeCAD.getDocument(docname)
except:
doc = FreeCAD.newDocument(docname)
FreeCAD.ActiveDocument = doc
if DEBUG: print "done."
global ifcfile # keeping global for debugging purposes
filename = decode(filename,utf=True)
ifcfile = ifcopenshell.open(filename)
from ifcopenshell import geom
settings = ifcopenshell.geom.settings()
settings.set(settings.USE_BREP_DATA,True)
settings.set(settings.SEW_SHELLS,True)
settings.set(settings.USE_WORLD_COORDS,True)
if SEPARATE_OPENINGS:
settings.set(settings.DISABLE_OPENING_SUBTRACTIONS,True)
if MERGE_MODE_STRUCT != 3:
try:
settings.set(settings.INCLUDE_CURVES,True)
except:
FreeCAD.Console.PrintError("Set INCLUDE_CURVES failed. IfcOpenShell seams to be an Outdated Developer Version.\n")
FreeCAD.Console.PrintError("Import of StructuralAnalysisView Entities will not work!\n")
sites = ifcfile.by_type("IfcSite")
buildings = ifcfile.by_type("IfcBuilding")
floors = ifcfile.by_type("IfcBuildingStorey")
products = ifcfile.by_type(ROOT_ELEMENT)
openings = ifcfile.by_type("IfcOpeningElement")
annotations = ifcfile.by_type("IfcAnnotation")
materials = ifcfile.by_type("IfcMaterial")
if DEBUG: print "Building relationships table...",
# building relations tables
objects = {} # { id:object, ... }
additions = {} # { host:[child,...], ... }
groups = {} # { host:[child,...], ... } # used in structural IFC
subtractions = [] # [ [opening,host], ... ]
properties = {} # { host:[property, ...], ... }
colors = {} # { id:(r,g,b) }
shapes = {} # { id:shaoe } only used for merge mode
structshapes = {} # { id:shaoe } only used for merge mode
mattable = {} # { objid:matid }
sharedobjects = {} # { representationmapid:object }
for r in ifcfile.by_type("IfcRelContainedInSpatialStructure"):
additions.setdefault(r.RelatingStructure.id(),[]).extend([e.id() for e in r.RelatedElements])
for r in ifcfile.by_type("IfcRelAggregates"):
additions.setdefault(r.RelatingObject.id(),[]).extend([e.id() for e in r.RelatedObjects])
for r in ifcfile.by_type("IfcRelAssignsToGroup"):
groups.setdefault(r.RelatingGroup.id(),[]).extend([e.id() for e in r.RelatedObjects])
for r in ifcfile.by_type("IfcRelVoidsElement"):
subtractions.append([r.RelatedOpeningElement.id(), r.RelatingBuildingElement.id()])
for r in ifcfile.by_type("IfcRelDefinesByProperties"):
for obj in r.RelatedObjects:
if r.RelatingPropertyDefinition.is_a("IfcPropertySet"):
properties.setdefault(obj.id(),[]).extend([e.id() for e in r.RelatingPropertyDefinition.HasProperties])
for r in ifcfile.by_type("IfcRelAssociatesMaterial"):
for o in r.RelatedObjects:
mattable[o.id()] = r.RelatingMaterial.id()
for r in ifcfile.by_type("IfcStyledItem"):
if r.Styles[0].is_a("IfcPresentationStyleAssignment"):
if r.Styles[0].Styles[0].is_a("IfcSurfaceStyle"):
if r.Styles[0].Styles[0].Styles[0].is_a("IfcSurfaceStyleRendering"):
if r.Styles[0].Styles[0].Styles[0].SurfaceColour:
c = r.Styles[0].Styles[0].Styles[0].SurfaceColour
if r.Item:
for p in ifcfile.by_type("IfcProduct"):
if p.Representation:
for it in p.Representation.Representations:
if it.Items:
if it.Items[0].id() == r.Item.id():
colors[p.id()] = (c.Red,c.Green,c.Blue)
elif it.Items[0].is_a("IfcBooleanResult"):
if (it.Items[0].FirstOperand.id() == r.Item.id()):
colors[p.id()] = (c.Red,c.Green,c.Blue)
else:
for m in ifcfile.by_type("IfcMaterialDefinitionRepresentation"):
for it in m.Representations:
if it.Items:
if it.Items[0].id() == r.id():
colors[m.RepresentedMaterial.id()] = (c.Red,c.Green,c.Blue)
if only: # only import a list of IDs and their children
ids = []
while only:
currentid = only.pop()
ids.append(currentid)
if currentid in additions.keys():
only.extend(additions[currentid])
products = [ifcfile[currentid] for currentid in ids]
if DEBUG: print "done."
count = 0
from FreeCAD import Base
progressbar = Base.ProgressIndicator()
progressbar.start("Importing IFC objects...",len(products))
if DEBUG: print "Processing objects..."
# products
for product in products:
pid = product.id()
guid = product.GlobalId
ptype = product.is_a()
if DEBUG: print count+1,"/",len(products)," creating object #",pid," : ",ptype,
name = str(ptype[3:])
if product.Name:
name = product.Name.decode("unicode_escape").encode("utf8")
if PREFIX_NUMBERS: name = "ID" + str(pid) + " " + name
obj = None
baseobj = None
brep = None
shape = None
archobj = True # assume all objects not in structuralifcobjects are architecture
if ptype in structuralifcobjects:
archobj = False
if DEBUG: print " (struct)",
else:
if DEBUG: print " (arch)",
if MERGE_MODE_ARCH == 4 and archobj:
if DEBUG: print " skipped."
continue
if MERGE_MODE_STRUCT == 3 and not archobj:
if DEBUG: print " skipped."
continue
if pid in skip: # user given id skip list
if DEBUG: print " skipped."
continue
if ptype in SKIP: # preferences-set type skip list
if DEBUG: print " skipped."
continue
# detect if this object is sharing its shape
clone = None
store = None
if product.Representation and MERGE_MODE_ARCH == 0 and archobj:
for s in product.Representation.Representations:
if s.RepresentationIdentifier.upper() == "BODY":
if s.Items[0].is_a("IfcMappedItem"):
bid = s.Items[0].MappingSource.id()
if bid in sharedobjects:
clone = sharedobjects[bid]
else:
sharedobjects[bid] = None
store = bid
try:
cr = ifcopenshell.geom.create_shape(settings,product)
brep = cr.geometry.brep_data
except:
pass # IfcOpenShell will yield an error if a given product has no shape, but we don't care
if brep:
if DEBUG: print " ",str(len(brep)/1000),"k ",
shape = Part.Shape()
shape.importBrepFromString(brep)
shape.scale(1000.0) # IfcOpenShell always outputs in meters
if not shape.isNull():
if (MERGE_MODE_ARCH > 0 and archobj) or not archobj:
if ptype == "IfcSpace": # do not add spaces to compounds
if DEBUG: print "skipping space ",pid
elif not archobj:
structshapes[pid] = shape
if DEBUG: print shape.Solids," ",
baseobj = shape
else:
shapes[pid] = shape
if DEBUG: print shape.Solids," ",
baseobj = shape
else:
if clone:
if DEBUG: print "clone ",
else:
if GET_EXTRUSIONS:
ex = Arch.getExtrusionData(shape)
if ex:
print "extrusion ",
baseface = FreeCAD.ActiveDocument.addObject("Part::Feature",name+"_footprint")
baseface.Shape = ex[0]
baseobj = FreeCAD.ActiveDocument.addObject("Part::Extrusion",name+"_body")
baseobj.Base = baseface
baseobj.Dir = ex[1]
if FreeCAD.GuiUp:
baseface.ViewObject.hide()
if (not baseobj):
baseobj = FreeCAD.ActiveDocument.addObject("Part::Feature",name+"_body")
baseobj.Shape = shape
else:
if DEBUG: print "null shape ",
if not shape.isValid():
if DEBUG: print "invalid shape ",
#continue
else:
if DEBUG: print " no brep ",
if MERGE_MODE_ARCH == 0 and archobj:
# full Arch objects
for freecadtype,ifctypes in typesmap.items():
if ptype in ifctypes:
if clone:
obj = getattr(Arch,"make"+freecadtype)(name=name)
obj.CloneOf = clone
if shape:
v = shape.Solids[0].CenterOfMass.sub(clone.Shape.Solids[0].CenterOfMass)
r = getRotation(product)
if not r.isNull():
v = v.add(clone.Shape.Solids[0].CenterOfMass)
v = v.add(r.multVec(clone.Shape.Solids[0].CenterOfMass.negative()))
obj.Placement.Rotation = r
obj.Placement.move(v)
else:
obj = getattr(Arch,"make"+freecadtype)(baseobj=baseobj,name=name)
if store:
sharedobjects[store] = obj
obj.Label = name
if FreeCAD.GuiUp and baseobj:
if hasattr(baseobj,"ViewObject"):
baseobj.ViewObject.hide()
# setting role
try:
r = ptype[3:]
tr = dict((v,k) for k, v in translationtable.iteritems())
if r in tr.keys():
r = tr[r]
# remove the "StandardCase"
if "StandardCase" in r:
r = r[:-12]
obj.Role = r
except:
pass
# setting uid
if hasattr(obj,"IfcAttributes"):
a = obj.IfcAttributes
a["IfcUID"] = str(guid)
obj.IfcAttributes = a
break
if not obj:
obj = Arch.makeComponent(baseobj,name=name)
if obj:
sols = str(obj.Shape.Solids) if hasattr(obj,"Shape") else ""
if DEBUG: print sols
objects[pid] = obj
elif (MERGE_MODE_ARCH == 1 and archobj) or (MERGE_MODE_STRUCT == 0 and not archobj):
# non-parametric Arch objects
if ptype in ["IfcSite","IfcBuilding","IfcBuildingStorey"]:
for freecadtype,ifctypes in typesmap.items():
if ptype in ifctypes:
obj = getattr(Arch,"make"+freecadtype)(baseobj=None,name=name)
elif baseobj:
obj = Arch.makeComponent(baseobj,name=name,delete=True)
elif (MERGE_MODE_ARCH == 2 and archobj) or (MERGE_MODE_STRUCT == 1 and not archobj):
# Part shapes
if ptype in ["IfcSite","IfcBuilding","IfcBuildingStorey"]:
for freecadtype,ifctypes in typesmap.items():
if ptype in ifctypes:
obj = getattr(Arch,"make"+freecadtype)(baseobj=None,name=name)
elif baseobj:
obj = FreeCAD.ActiveDocument.addObject("Part::Feature",name)
obj.Shape = shape
if obj:
obj.Label = name
objects[pid] = obj
# properties
if pid in properties:
if hasattr(obj,"IfcAttributes"):
a = obj.IfcAttributes
for p in properties[pid]:
o = ifcfile[p]
if o.is_a("IfcPropertySingleValue"):
a[o.Name.decode("unicode_escape").encode("utf8")] = str(o.NominalValue)
obj.IfcAttributes = a
# color
if FreeCAD.GuiUp and (pid in colors) and hasattr(obj.ViewObject,"ShapeColor"):
if DEBUG: print " setting color: ",int(colors[pid][0]*255),"/",int(colors[pid][1]*255),"/",int(colors[pid][2]*255)
obj.ViewObject.ShapeColor = colors[pid]
# if DEBUG is on, recompute after each shape
if DEBUG: FreeCAD.ActiveDocument.recompute()
count += 1
progressbar.next()
progressbar.stop()
FreeCAD.ActiveDocument.recompute()
if MERGE_MODE_STRUCT == 2:
if DEBUG: print "Joining Structural shapes..."
for host,children in groups.items(): # Structural
if ifcfile[host].is_a("IfcStructuralAnalysisModel"):
compound = []
for c in children:
if c in structshapes.keys():
compound.append(structshapes[c])
del structshapes[c]
if compound:
name = ifcfile[host].Name or "AnalysisModel"
if PREFIX_NUMBERS: name = "ID" + str(host) + " " + name
obj = FreeCAD.ActiveDocument.addObject("Part::Feature",name)
obj.Label = name
obj.Shape = Part.makeCompound(compound)
if structshapes: # remaining Structural shapes
obj = FreeCAD.ActiveDocument.addObject("Part::Feature","UnclaimedStruct")
obj.Shape = Part.makeCompound(structshapes.values())
else:
if DEBUG: print "Processing Struct relationships..."
# groups
for host,children in groups.items():
if ifcfile[host].is_a("IfcStructuralAnalysisModel"):
# print host, ' --> ', children
obj = FreeCAD.ActiveDocument.addObject("App::DocumentObjectGroup","AnalysisModel")
objects[host] = obj
if host in objects.keys():
cobs = [objects[child] for child in children if child in objects.keys()]
if cobs:
if DEBUG: print "adding ",len(cobs), " object(s) to ", objects[host].Label
Arch.addComponents(cobs,objects[host])
if DEBUG: FreeCAD.ActiveDocument.recompute()
if MERGE_MODE_ARCH > 2: # if ArchObj is compound or ArchObj not imported
FreeCAD.ActiveDocument.recompute()
# cleaning bad shapes
for obj in objects.values():
if obj.isDerivedFrom("Part::Feature"):
if obj.Shape.isNull():
Arch.rebuildArchShape(obj)
if MERGE_MODE_ARCH == 3:
if DEBUG: print "Joining Arch shapes..."
for host,children in additions.items(): # Arch
if ifcfile[host].is_a("IfcBuildingStorey"):
compound = []
for c in children:
if c in shapes.keys():
compound.append(shapes[c])
del shapes[c]
if c in additions.keys():
for c2 in additions[c]:
if c2 in shapes.keys():
compound.append(shapes[c2])
del shapes[c2]
if compound:
name = ifcfile[host].Name or "Floor"
if PREFIX_NUMBERS: name = "ID" + str(host) + " " + name
obj = FreeCAD.ActiveDocument.addObject("Part::Feature",name)
obj.Label = name
obj.Shape = Part.makeCompound(compound)
if shapes: # remaining Arch shapes
obj = FreeCAD.ActiveDocument.addObject("Part::Feature","UnclaimedArch")
obj.Shape = Part.makeCompound(shapes.values())
else:
if DEBUG: print "Processing Arch relationships..."
# subtractions
if SEPARATE_OPENINGS:
for subtraction in subtractions:
if (subtraction[0] in objects.keys()) and (subtraction[1] in objects.keys()):
if DEBUG: print "subtracting ",objects[subtraction[0]].Label, " from ", objects[subtraction[1]].Label
Arch.removeComponents(objects[subtraction[0]],objects[subtraction[1]])
if DEBUG: FreeCAD.ActiveDocument.recompute()
# additions
for host,children in additions.items():
if host in objects.keys():
cobs = [objects[child] for child in children if child in objects.keys()]
if cobs:
if DEBUG and (len(cobs) > 10) and ( not(Draft.getType(objects[host]) in ["Site","Building","Floor"])):
# avoid huge fusions
print "more than 10 shapes to add: skipping."
else:
if DEBUG: print "adding ",len(cobs), " object(s) to ", objects[host].Label
Arch.addComponents(cobs,objects[host])
if DEBUG: FreeCAD.ActiveDocument.recompute()
FreeCAD.ActiveDocument.recompute()
# cleaning bad shapes
for obj in objects.values():
if obj.isDerivedFrom("Part::Feature"):
if obj.Shape.isNull():
Arch.rebuildArchShape(obj)
FreeCAD.ActiveDocument.recompute()
# 2D elements
if DEBUG and annotations: print "Creating 2D geometry..."
for annotation in annotations:
aid = annotation.id()
if aid in skip: continue # user given id skip list
if "IfcAnnotation" in SKIP: continue # preferences-set type skip list
name = "Annotation"
if annotation.Name:
name = annotation.Name.decode("unicode_escape").encode("utf8")
if PREFIX_NUMBERS: name = "ID" + str(aid) + " " + name
shapes2d = []
for repres in annotation.Representation.Representations:
shapes2d.extend(setRepresentation(repres))
if shapes2d:
sh = Part.makeCompound(shapes2d)
pc = str(int((float(count)/(len(products)+len(annotations))*100)))+"% "
if DEBUG: print pc,"creating object ",aid," : Annotation with shape: ",sh
o = FreeCAD.ActiveDocument.addObject("Part::Feature",name)
o.Shape = sh
count += 1
FreeCAD.ActiveDocument.recompute()
# Materials
if DEBUG and materials: print "Creating materials..."
fcmats = {}
for material in materials:
name = "Material"
if material.Name:
name = material.Name.decode("unicode_escape").encode("utf8")
if MERGE_MATERIALS and (name in fcmats.keys()):
mat = fcmats[name]
else:
mat = Arch.makeMaterial(name=name)
mdict = {}
if material.id() in colors:
mdict["Color"] = str(colors[material.id()])
if mdict:
mat.Material = mdict
fcmats[name] = mat
for o,m in mattable.items():
if m == material.id():
if o in objects:
if hasattr(objects[o],"BaseMaterial"):
objects[o].BaseMaterial = mat
FreeCAD.ActiveDocument.recompute()
if FreeCAD.GuiUp:
import FreeCADGui
FreeCADGui.SendMsgToActiveView("ViewFit")
print "Finished importing."
return doc
def export(exportList,filename):
"exports FreeCAD contents to an IFC file"
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
FORCEBREP = p.GetBool("ifcExportAsBrep",False)
DEBUG = p.GetBool("ifcDebug",False)
try:
global ifcopenshell
import ifcopenshell
except:
FreeCAD.Console.PrintError("IfcOpenShell was not found on this system. IFC support is disabled\n")
return
version = FreeCAD.Version()
owner = FreeCAD.ActiveDocument.CreatedBy
email = ''
if ("@" in owner) and ("<" in owner):
s = owner.split("<")
owner = s[0]
email = s[1].strip(">")
global template
template = ifctemplate.replace("$version",version[0]+"."+version[1]+" build "+version[2])
template = template.replace("$owner",owner)
template = template.replace("$company",FreeCAD.ActiveDocument.Company)
template = template.replace("$email",email)
template = template.replace("$now",str(int(time.time())))
template = template.replace("$projectid",FreeCAD.ActiveDocument.Uid[:22].replace("-","_"))
template = template.replace("$project",FreeCAD.ActiveDocument.Name)
template = template.replace("$filename",filename)
template = template.replace("$timestamp",str(time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime())))
templatefile = tempfile.mkstemp(suffix=".ifc")[1]
of = pyopen(templatefile,"wb")
of.write(template.encode("utf8"))
of.close()
global ifcfile, surfstyles, clones, sharedobjects
ifcfile = ifcopenshell.open(templatefile)
history = ifcfile.by_type("IfcOwnerHistory")[0]
context = ifcfile.by_type("IfcGeometricRepresentationContext")[0]
project = ifcfile.by_type("IfcProject")[0]
objectslist = Draft.getGroupContents(exportList,walls=True,addgroups=True)
objectslist = Arch.pruneIncluded(objectslist)
products = {} # { Name: IfcEntity, ... }
surfstyles = {} # { (r,g,b): IfcEntity, ... }
clones = {} # { Basename:[Clonename1,Clonename2,...] }
sharedobjects = {} # { BaseName: IfcRepresentationMap }
count = 1
# build clones table
for o in objectslist:
b = Draft.getCloneBase(o,strict=True)
if b:
clones.setdefault(b.Name,[]).append(o.Name)
print "clones table: ",clones
print objectslist
# products
for obj in objectslist:
# getting generic data
name = str(obj.Label.encode("utf8"))
description = str(obj.Description) if hasattr(obj,"Description") else ""
# getting uid
uid = None
if hasattr(obj,"IfcAttributes"):
if "IfcUID" in obj.IfcAttributes.keys():
uid = str(obj.IfcAttributes["IfcUID"])
if not uid:
uid = ifcopenshell.guid.compress(uuid.uuid1().hex)
# setting the IFC type + name conversions
if hasattr(obj,"Role"):
ifctype = obj.Role.replace(" ","")
else:
ifctype = Draft.getType(obj)
if ifctype in translationtable.keys():
ifctype = translationtable[ifctype]
ifctype = "Ifc" + ifctype
if ifctype == "IfcGroup":
continue
ifctypes = []
for v in typesmap.values():
ifctypes.extend(v)
if not ifctype in ifctypes:
ifctype = "IfcBuildingElementProxy"
# getting the "Force BREP" flag
brepflag = False
if hasattr(obj,"IfcAttributes"):
if "FlagForceBrep" in obj.IfcAttributes.keys():
if obj.IfcAttributes["FlagForceBrep"] == "True":
brepflag = True
# getting the representation
representation,placement,shapetype = getRepresentation(ifcfile,context,obj,forcebrep=(brepflag or FORCEBREP))
if DEBUG: print str(count).ljust(3)," : ", ifctype, " (",shapetype,") : ",name
# setting the arguments
args = [uid,history,name,description,None,placement,representation,None]
if ifctype in ["IfcSlab","IfcFooting","IfcRoof"]:
args = args + ["NOTDEFINED"]
elif ifctype in ["IfcWindow","IfcDoor"]:
args = args + [obj.Width.Value/1000.0, obj.Height.Value/1000.0]
elif ifctype == "IfcSpace":
args = args + ["ELEMENT","INTERNAL",obj.Shape.BoundBox.ZMin/1000.0]
elif ifctype == "IfcBuildingElementProxy":
args = args + ["ELEMENT"]
elif ifctype == "IfcSite":
latitude = None
longitude = None
elevation = None
landtitlenumber = None
address = None
args = args + ["ELEMENT",latitude,longitude,elevation,landtitlenumber,address]
elif ifctype == "IfcBuilding":
args = args + ["ELEMENT",None,None,None]
elif ifctype == "IfcBuildingStorey":
args = args + ["ELEMENT",obj.Placement.Base.z]
# creating the product
product = getattr(ifcfile,"create"+ifctype)(*args)
products[obj.Name] = product
# additions
if hasattr(obj,"Additions") and (shapetype == "extrusion"):
for o in obj.Additions:
r2,p2,c2 = getRepresentation(ifcfile,context,o,forcebrep=True)
if DEBUG: print " adding ",c2," : ",str(o.Label)
prod2 = ifcfile.createIfcBuildingElementProxy(ifcopenshell.guid.compress(uuid.uuid1().hex),history,str(o.Label),None,None,p2,r2,None,"ELEMENT")
ifcfile.createIfcRelAggregates(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'Addition','',product,[prod2])
# subtractions
if hasattr(obj,"Subtractions") and (shapetype == "extrusion"):
for o in obj.Subtractions:
r2,p2,c2 = getRepresentation(ifcfile,context,o,forcebrep=True,subtraction=True)
if DEBUG: print " subtracting ",c2," : ",str(o.Label)
prod2 = ifcfile.createIfcOpeningElement(ifcopenshell.guid.compress(uuid.uuid1().hex),history,str(o.Label),None,None,p2,r2,None)
ifcfile.createIfcRelVoidsElement(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'Subtraction','',product,prod2)
# properties
if hasattr(obj,"IfcAttributes"):
props = []
for key in obj.IfcAttributes:
if not (key in ["IfcUID","FlagForceBrep"]):
r = obj.IfcAttributes[key].strip(")").split("(")
if len(r) == 1:
tp = "IfcText"
val = r[0]
else:
tp = r[0]
val = "(".join(r[1:])
val = val.strip("'")
val = val.strip('"')
if DEBUG: print " property ",key," : ",str(val), " (", str(tp), ")"
if tp in ["IfcLabel","IfcText","IfcIdentifier"]:
val = str(val)
elif tp == "IfcBoolean":
if val == ".T.":
val = True
else:
val = False
elif tp == "IfcInteger":
val = int(val)
else:
val = float(val)
props.append(ifcfile.createIfcPropertySingleValue(str(key),None,ifcfile.create_entity(str(tp),val),None))
if props:
pset = ifcfile.createIfcPropertySet(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'PropertySet',None,props)
ifcfile.createIfcRelDefinesByProperties(ifcopenshell.guid.compress(uuid.uuid1().hex),history,None,None,[product],pset)
count += 1
# relationships
sites = []
buildings = []
floors = []
for site in Draft.getObjectsOfType(objectslist,"Site"):
for building in Draft.getObjectsOfType(site.Group,"Building"):
for floor in Draft.getObjectsOfType(building.Group,"Floor"):
children = Draft.getGroupContents(floor,walls=True)
children = Arch.pruneIncluded(children)
children = [products[c.Name] for c in children if c.Name in products.keys()]
floor = products[floor.Name]
ifcfile.createIfcRelContainedInSpatialStructure(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'StoreyLink','',children,floor)
floors.append(floor)
building = products[building.Name]
if floors:
ifcfile.createIfcRelAggregates(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'BuildingLink','',building,floors)
buildings.append(building)
site = products[site.Name]
if buildings:
ifcfile.createIfcRelAggregates(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'SiteLink','',site,buildings)
sites.append(site)
if not sites:
if DEBUG: print "adding default site"
sites = [ifcfile.createIfcSite(ifcopenshell.guid.compress(uuid.uuid1().hex),history,"Default Site",'',None,None,None,None,"ELEMENT",None,None,None,None,None)]
ifcfile.createIfcRelAggregates(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'ProjectLink','',project,sites)
if not buildings:
if DEBUG: print "adding default building"
buildings = [ifcfile.createIfcBuilding(ifcopenshell.guid.compress(uuid.uuid1().hex),history,"Default Building",'',None,None,None,None,"ELEMENT",None,None,None)]
ifcfile.createIfcRelAggregates(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'SiteLink','',sites[0],buildings)
ifcfile.createIfcRelContainedInSpatialStructure(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'BuildingLink','',products.values(),buildings[0])
# materials
materials = {}
for m in Arch.getDocumentMaterials():
mat = ifcfile.createIfcMaterial(m.Label.encode("utf8"))
materials[m.Label] = mat
if "Color" in m.Material:
rgb = tuple([float(f) for f in m.Material['Color'].strip("()").split(",")])
col = ifcfile.createIfcColourRgb(None,rgb[0],rgb[1],rgb[2])
ssr = ifcfile.createIfcSurfaceStyleRendering(col,None,None,None,None,None,None,None,"FLAT")
iss = ifcfile.createIfcSurfaceStyle(None,"BOTH",[ssr])
psa = ifcfile.createIfcPresentationStyleAssignment([iss])
isi = ifcfile.createIfcStyledItem(None,[psa],None)
isr = ifcfile.createIfcStyledRepresentation(context,"Style","Material",[isi])
imd = ifcfile.createIfcMaterialDefinitionRepresentation(None,None,[isr],mat)
relobjs = []
for o in m.InList:
if hasattr(o,"BaseMaterial"):
if o.BaseMaterial:
if o.BaseMaterial.Name == m.Name:
if o.Name in products:
relobjs.append(products[o.Name])
if relobjs:
ifcfile.createIfcRelAssociatesMaterial(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'MaterialLink','',relobjs,mat)
if DEBUG: print "writing ",filename,"..."
filename = decode(filename)
ifcfile.write(filename)
def getRepresentation(ifcfile,context,obj,forcebrep=False,subtraction=False,tessellation=1):
"""returns an IfcShapeRepresentation object or None"""
import Part,math,DraftGeomUtils,DraftVecUtils
shapes = []
placement = None
productdef = None
shapetype = "no shape"
tostore = False
# check for clones
for k,v in clones.items():
if (obj.Name == k ) or (obj.Name in v):
if k in sharedobjects:
# base shape already exists
repmap = sharedobjects[k]
pla = obj.Placement
axis1 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(1,0,0))))
axis2 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(0,1,0))))
axis3 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(0,0,1))))
origin = ifcfile.createIfcCartesianPoint(tuple(FreeCAD.Vector(pla.Base).multiply(0.001)))
transf = ifcfile.createIfcCartesianTransformationOperator3D(axis1,axis2,origin,1.0,axis3)
mapitem = ifcfile.createIfcMappedItem(repmap,transf)
shapes = [mapitem]
solidType = "MappedRepresentation"
shapetype = "clone"
else:
# base shape not yet created
tostore = k
if (not shapes) and (not forcebrep):
profile = None
if hasattr(obj,"Proxy"):
if hasattr(obj.Proxy,"getProfiles"):
p = obj.Proxy.getProfiles(obj,noplacement=True)
extrusionv = obj.Proxy.getExtrusionVector(obj,noplacement=True)
if not DraftVecUtils.isNull(extrusionv):
extrusionv.multiply(0.001) # to meters
if (len(p) == 1) and extrusionv:
p = p[0]
p.scale(0.001) # to meters
r = obj.Proxy.getPlacement(obj)
r.Base = r.Base.multiply(0.001) # to meters
if len(p.Edges) == 1:
pxvc = ifcfile.createIfcDirection((1.0,0.0))
povc = ifcfile.createIfcCartesianPoint((0.0,0.0))
pt = ifcfile.createIfcAxis2Placement2D(povc,pxvc)
# extruded circle
if isinstance(p.Edges[0].Curve,Part.Circle):
profile = ifcfile.createIfcCircleProfileDef("AREA",None,pt, p.Edges[0].Curve.Radius)
# extruded ellipse
elif isinstance(p.Edges[0].Curve,Part.Ellipse):
profile = ifcfile.createIfcEllipseProfileDef("AREA",None,pt, p.Edges[0].Curve.MajorRadius, p.Edges[0].Curve.MinorRadius)
else:
curves = False
for e in p.Edges:
if isinstance(e.Curve,Part.Circle):
curves = True
# extruded polyline
if not curves:
w = Part.Wire(DraftGeomUtils.sortEdges(p.Edges))
pts = [ifcfile.createIfcCartesianPoint(tuple(v.Point)[:2]) for v in w.Vertexes+[w.Vertexes[0]]]
pol = ifcfile.createIfcPolyline(pts)
# extruded composite curve
else:
segments = []
last = None
edges = DraftGeomUtils.sortEdges(p.Edges)
for e in edges:
if isinstance(e.Curve,Part.Circle):
follow = True
if last:
if not DraftVecUtils.equals(last,e.Vertexes[0].Point):
follow = False
last = e.Vertexes[0].Point
else:
last = e.Vertexes[-1].Point
else:
last = e.Vertexes[-1].Point
p1 = math.degrees(-DraftVecUtils.angle(e.Vertexes[0].Point.sub(e.Curve.Center)))
p2 = math.degrees(-DraftVecUtils.angle(e.Vertexes[-1].Point.sub(e.Curve.Center)))
da = DraftVecUtils.angle(e.valueAt(e.FirstParameter+0.1).sub(e.Curve.Center),e.Vertexes[0].Point.sub(e.Curve.Center))
if p1 < 0:
p1 = 360 + p1
if p2 < 0:
p2 = 360 + p2
if da > 0:
follow = not(follow)
xvc = ifcfile.createIfcDirection((1.0,0.0))
ovc = ifcfile.createIfcCartesianPoint(tuple(e.Curve.Center)[:2])
plc = ifcfile.createIfcAxis2Placement2D(ovc,xvc)
cir = ifcfile.createIfcCircle(plc,e.Curve.Radius)
curve = ifcfile.createIfcTrimmedCurve(cir,[ifcfile.createIfcParameterValue(p1)],[ifcfile.createIfcParameterValue(p2)],follow,"PARAMETER")
else:
verts = [vertex.Point for vertex in e.Vertexes]
if last:
if not DraftVecUtils.equals(last,verts[0]):
verts.reverse()
last = e.Vertexes[0].Point
else:
last = e.Vertexes[-1].Point
else:
last = e.Vertexes[-1].Point
pts = [ifcfile.createIfcCartesianPoint(tuple(v)[:2]) for v in verts]
curve = ifcfile.createIfcPolyline(pts)
segment = ifcfile.createIfcCompositeCurveSegment("CONTINUOUS",True,curve)
segments.append(segment)
pol = ifcfile.createIfcCompositeCurve(segments,False)
profile = ifcfile.createIfcArbitraryClosedProfileDef("AREA",None,pol)
if profile:
xvc = ifcfile.createIfcDirection(tuple(r.Rotation.multVec(FreeCAD.Vector(1,0,0))))
zvc = ifcfile.createIfcDirection(tuple(r.Rotation.multVec(FreeCAD.Vector(0,0,1))))
ovc = ifcfile.createIfcCartesianPoint(tuple(r.Base))
lpl = ifcfile.createIfcAxis2Placement3D(ovc,zvc,xvc)
edir = ifcfile.createIfcDirection(tuple(FreeCAD.Vector(extrusionv).normalize()))
shape = ifcfile.createIfcExtrudedAreaSolid(profile,lpl,edir,extrusionv.Length)
shapes.append(shape)
solidType = "SweptSolid"
shapetype = "extrusion"
if not shapes:
# brep representation
fcshape = None
solidType = "Brep"
if subtraction:
if hasattr(obj,"Proxy"):
if hasattr(obj.Proxy,"getSubVolume"):
fcshape = obj.Proxy.getSubVolume(obj)
if not fcshape:
if hasattr(obj,"Shape"):
if obj.Shape:
if not obj.Shape.isNull():
fcshape = obj.Shape
elif hasattr(obj,"Terrain"):
if obj.Terrain:
if hasattr(obj.Terrain,"Shape"):
if obj.Terrain.Shape:
if not obj.Terrain.Shape.isNull():
fcshape = obj.Terrain.Shape
if fcshape:
solids = []
if fcshape.Solids:
dataset = fcshape.Solids
else:
dataset = fcshape.Shells
print "Warning! object contains no solids"
for fcsolid in dataset:
fcsolid.scale(0.001) # to meters
faces = []
curves = False
for fcface in fcsolid.Faces:
for e in fcface.Edges:
if not isinstance(e.Curve,Part.Line):
if e.curvatureAt(e.FirstParameter+(e.LastParameter-e.FirstParameter)/2) > 0.0001:
curves = True
break
if curves:
#shapetype = "triangulated"
#tris = fcsolid.tessellate(tessellation)
#for tri in tris[1]:
# pts = [ifcfile.createIfcCartesianPoint(tuple(tris[0][i])) for i in tri]
# loop = ifcfile.createIfcPolyLoop(pts)
# bound = ifcfile.createIfcFaceOuterBound(loop,True)
# face = ifcfile.createIfcFace([bound])
# faces.append(face)
fcsolid = Arch.removeCurves(fcsolid)
shapetype = "brep"
for fcface in fcsolid.Faces:
loops = []
verts = [v.Point for v in Part.Wire(DraftGeomUtils.sortEdges(fcface.OuterWire.Edges)).Vertexes]
c = fcface.CenterOfMass
v1 = verts[0].sub(c)
v2 = verts[1].sub(c)
n = fcface.normalAt(0,0)
if DraftVecUtils.angle(v2,v1,n) >= 0:
verts.reverse() # inverting verts order if the direction is couterclockwise
pts = [ifcfile.createIfcCartesianPoint(tuple(v)) for v in verts]
loop = ifcfile.createIfcPolyLoop(pts)
bound = ifcfile.createIfcFaceOuterBound(loop,True)
loops.append(bound)
for wire in fcface.Wires:
if wire.hashCode() != fcface.OuterWire.hashCode():
verts = [v.Point for v in Part.Wire(DraftGeomUtils.sortEdges(wire.Edges)).Vertexes]
v1 = verts[0].sub(c)
v2 = verts[1].sub(c)
if DraftVecUtils.angle(v2,v1,DraftVecUtils.neg(n)) >= 0:
verts.reverse()
pts = [ifcfile.createIfcCartesianPoint(tuple(v)) for v in verts]
loop = ifcfile.createIfcPolyLoop(pts)
bound = ifcfile.createIfcFaceBound(loop,True)
loops.append(bound)
face = ifcfile.createIfcFace(loops)
faces.append(face)
shell = ifcfile.createIfcClosedShell(faces)
shape = ifcfile.createIfcFacetedBrep(shell)
shapes.append(shape)
if shapes:
if tostore:
subrep = ifcfile.createIfcShapeRepresentation(context,'Body',solidType,shapes)
xvc = ifcfile.createIfcDirection((1.0,0.0,0.0))
zvc = ifcfile.createIfcDirection((0.0,0.0,1.0))
ovc = ifcfile.createIfcCartesianPoint((0.0,0.0,0.0))
gpl = ifcfile.createIfcAxis2Placement3D(ovc,zvc,xvc)
repmap = ifcfile.createIfcRepresentationMap(gpl,subrep)
pla = FreeCAD.ActiveDocument.getObject(k).Placement
axis1 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(1,0,0))))
axis2 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(0,1,0))))
origin = ifcfile.createIfcCartesianPoint(tuple(FreeCAD.Vector(pla.Base).multiply(0.001)))
axis3 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(0,0,1))))
transf = ifcfile.createIfcCartesianTransformationOperator3D(axis1,axis2,origin,1.0,axis3)
mapitem = ifcfile.createIfcMappedItem(repmap,transf)
shapes = [mapitem]
sharedobjects[k] = repmap
solidType = "MappedRepresentation"
# set surface style
if FreeCAD.GuiUp and (not subtraction) and hasattr(obj.ViewObject,"ShapeColor"):
# only set a surface style if the object has no material.
# apparently not needed, no harm in having both.
#m = False
#if hasattr(obj,"BaseMaterial"):
# if obj.BaseMaterial:
# if "Color" in obj.BaseMaterial.Material:
# m = True
#if not m:
rgb = obj.ViewObject.ShapeColor[:3]
if rgb in surfstyles:
psa = surfstyles[rgb]
else:
col = ifcfile.createIfcColourRgb(None,rgb[0],rgb[1],rgb[2])
ssr = ifcfile.createIfcSurfaceStyleRendering(col,None,None,None,None,None,None,None,"FLAT")
iss = ifcfile.createIfcSurfaceStyle(None,"BOTH",[ssr])
psa = ifcfile.createIfcPresentationStyleAssignment([iss])
surfstyles[rgb] = psa
for shape in shapes:
isi = ifcfile.createIfcStyledItem(shape,[psa],None)
xvc = ifcfile.createIfcDirection((1.0,0.0,0.0))
zvc = ifcfile.createIfcDirection((0.0,0.0,1.0))
ovc = ifcfile.createIfcCartesianPoint((0.0,0.0,0.0))
gpl = ifcfile.createIfcAxis2Placement3D(ovc,zvc,xvc)
placement = ifcfile.createIfcLocalPlacement(None,gpl)
representation = ifcfile.createIfcShapeRepresentation(context,'Body',solidType,shapes)
productdef = ifcfile.createIfcProductDefinitionShape(None,None,[representation])
return productdef,placement,shapetype
def setRepresentation(representation):
"""Returns a shape from a 2D IfcShapeRepresentation"""
def getPolyline(ent):
pts = []
for p in ent.Points:
c = p.Coordinates
pts.append(FreeCAD.Vector(c[0],c[1],c[2] if len(c) > 2 else 0))
return Part.makePolygon(pts)
def getCircle(ent):
c = ent.Position.Location.Coordinates
c = FreeCAD.Vector(c[0],c[1],c[2] if len(c) > 2 else 0)
r = ent.Radius
return Part.makeCircle(r,c)
result = []
if representation.is_a("IfcShapeRepresentation"):
for item in representation.Items:
if item.is_a("IfcGeometricCurveSet"):
for el in item.Elements:
if el.is_a("IfcPolyline"):
result.append(getPolyline(el))
elif el.is_a("IfcCircle"):
result.append(getCircle(el))
elif el.is_a("IfcTrimmedCurve"):
base = el.BasisCurve
t1 = el.Trim1[0].wrappedValue
t2 = el.Trim2[0].wrappedValue
if not el.SenseAgreement:
t1,t2 = t2,t1
if base.is_a("IfcPolyline"):
bc = getPolyline(base)
result.append(bc)
elif base.is_a("IfcCircle"):
bc = getCircle(base)
e = Part.ArcOfCircle(bc.Curve,math.radians(t1),math.radians(t2)).toShape()
d = base.Position.RefDirection.DirectionRatios
v = FreeCAD.Vector(d[0],d[1],d[2] if len(d) > 2 else 0)
a = -DraftVecUtils.angle(v)
e.rotate(bc.Curve.Center,FreeCAD.Vector(0,0,1),math.degrees(a))
result.append(e)
return result
def getRotation(entity):
"returns a FreeCAD rotation from an IfcProduct with a IfcMappedItem representation"
try:
rmap = entity.Representation.Representations[0].Items[0].MappingTarget
u = FreeCAD.Vector(rmap.Axis1.DirectionRatios)
v = FreeCAD.Vector(rmap.Axis2.DirectionRatios)
w = FreeCAD.Vector(rmap.Axis3.DirectionRatios)
except AttributeError:
return FreeCAD.Rotation()
import WorkingPlane
p = WorkingPlane.plane(u=u,v=v,w=w)
return p.getRotation().Rotation
| lgpl-2.1 | 3,035,360,781,126,952,400 | 45.694091 | 181 | 0.530194 | false |
erangre/Dioptas | dioptas/tests/unit_tests/test_OverlayModel.py | 1 | 2318 | # -*- coding: utf-8 -*-
# Dioptas - GUI program for fast processing of 2D X-ray diffraction data
# Principal author: Clemens Prescher ([email protected])
# Copyright (C) 2014-2019 GSECARS, University of Chicago, USA
# Copyright (C) 2015-2018 Institute for Geology and Mineralogy, University of Cologne, Germany
# Copyright (C) 2019 DESY, Hamburg, Germany
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import os
import numpy as np
from numpy.testing import assert_array_almost_equal
from ...model.util import Pattern
from ...model.OverlayModel import OverlayModel
unittest_path = os.path.dirname(__file__)
data_path = os.path.join(unittest_path, '../data')
class OverlayModelTest(unittest.TestCase):
def setUp(self):
self.x = np.linspace(0.1, 15, 100)
self.y = np.sin(self.x)
self.pattern = Pattern(self.x, self.y)
self.overlay_model = OverlayModel()
def test_add_overlay(self):
x_overlay = np.linspace(0, 10)
y_overlay = np.linspace(0, 100)
self.overlay_model.add_overlay(x_overlay, y_overlay, "dummy")
self.assertEqual(len(self.overlay_model.overlays), 1)
new_overlay = self.overlay_model.get_overlay(0)
self.assertEqual(new_overlay.name, "dummy")
assert_array_almost_equal(new_overlay.x, x_overlay)
assert_array_almost_equal(new_overlay.y, y_overlay)
def test_add_overlay_from_file(self):
filename = os.path.join(data_path, 'pattern_001.xy')
self.overlay_model.add_overlay_file(filename)
self.assertEqual(len(self.overlay_model.overlays), 1)
self.assertEqual(self.overlay_model.get_overlay(0).name, ''.join(os.path.basename(filename).split('.')[0:-1])) | gpl-3.0 | 5,521,971,999,401,059,000 | 40.410714 | 118 | 0.710095 | false |
greenlin/universal-portfolios | universal/result.py | 1 | 10866 | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import pickle
from universal import tools
class PickleMixin(object):
def save(self, filename):
""" Save object as a pickle """
with open(filename, 'wb') as f:
pickle.dump(self, f, -1)
@classmethod
def load(cls, filename):
""" Load pickled object. """
with open(filename, 'rb') as f:
return pickle.load(f)
class AlgoResult(PickleMixin):
""" Results returned by algo's run method. The class containts useful
metrics such as sharpe ratio, mean return, drawdowns, ... and also
many visualizations.
You can specify transactions by setting AlgoResult.fee. Fee is
expressed in a percentages as a one-round fee.
"""
def __init__(self, X, B):
"""
:param X: Price relatives.
:param B: Weights.
"""
# set initial values
self._fee = 0.
self._B = B
self.rf_rate = 0.
self._X = X
# update logarithms, fees, etc.
self._recalculate()
@property
def X(self):
return self._X
@X.setter
def X(self, _X):
self._X = _X
self._recalculate()
@property
def B(self):
return self._B
@B.setter
def B(self, _B):
self._B = _B
self._recalculate()
@property
def fee(self):
return self._fee
@fee.setter
def fee(self, value):
""" Set transaction costs. Fees can be either float or Series
of floats for individual assets with proper indices. """
if isinstance(value, dict):
value = pd.Series(value)
if isinstance(value, pd.Series):
missing = set(self.X.columns) - set(value.index)
assert len(missing) == 0, 'Missing fees for {}'.format(missing)
self._fee = value
self._recalculate()
def _recalculate(self):
# calculate return for individual stocks
r = (self.X - 1) * self.B
self.asset_r = r + 1
self.r = r.sum(axis=1) + 1
# stock went bankrupt
self.r[self.r < 0] = 0.
# add fees
if not isinstance(self._fee, float) or self._fee != 0:
fees = (self.B.shift(-1).mul(self.r, axis=0) - self.B * self.X).abs()
fees.iloc[0] = self.B.ix[0]
fees.iloc[-1] = 0.
fees *= self._fee
self.asset_r -= fees
self.r -= fees.sum(axis=1)
self.r_log = np.log(self.r)
@property
def weights(self):
return self.B
@property
def equity(self):
return self.r.cumprod()
@property
def equity_decomposed(self):
""" Return equity decomposed to individual assets. """
return self.asset_r.cumprod()
@property
def asset_equity(self):
return self.X.cumprod()
@property
def total_wealth(self):
return self.r.prod()
@property
def profit_factor(self):
x = self.r_log
up = x[x > 0].sum()
down = -x[x < 0].sum()
return up / down if down != 0 else np.inf
@property
def sharpe(self):
""" Compute annualized sharpe ratio from log returns. If data does
not contain datetime index, assume daily frequency with 252 trading days a year.
"""
return tools.sharpe(self.r_log, rf_rate=self.rf_rate, freq=self.freq())
@property
def information(self):
""" Information ratio benchmarked against uniform CRP portfolio. """
s = self.X.mean(axis=1)
x = self.r_log - np.log(s)
mu, sd = x.mean(), x.std()
freq = self.freq()
if sd > 1e-8:
return mu / sd * np.sqrt(freq)
elif mu > 1e-8:
return np.inf * np.sign(mu)
else:
return 0.
@property
def growth_rate(self):
return self.r_log.mean() * self.freq()
@property
def volatility(self):
return np.sqrt(self.freq()) * self.r_log.std()
@property
def annualized_return(self):
return np.exp(self.r_log.mean() * self.freq()) - 1
@property
def annualized_volatility(self):
return np.exp(self.r_log).std() * np.sqrt(self.freq())
@property
def drawdown_period(self):
''' Returns longest drawdown perid. Stagnation is a drawdown too. '''
x = self.equity
period = [0.] * len(x)
peak = 0
for i in range(len(x)):
# new peak
if x[i] > peak:
peak = x[i]
period[i] = 0
else:
period[i] = period[i-1] + 1
return max(period) * 252. / self.freq()
@property
def max_drawdown(self):
''' Returns highest drawdown in percentage. '''
x = self.equity
return max(1. - x / x.cummax())
@property
def winning_pct(self):
x = self.r_log
win = (x > 0).sum()
all_trades = (x != 0).sum()
return float(win) / all_trades
def freq(self, x=None):
""" Number of data items per year. If data does not contain
datetime index, assume daily frequency with 252 trading days a year."""
x = x or self.r
return tools.freq(x.index)
def summary(self, name=None):
return """Summary{}:
Profit factor: {:.2f}
Sharpe ratio: {:.2f}
Information ratio (wrt UCRP): {:.2f}
Annualized return: {:.2f}%
Annualized volatility: {:.2f}%
Longest drawdown: {:.0f} days
Max drawdown: {:.2f}%
Winning days: {:.1f}%
""".format(
'' if name is None else ' for ' + name,
self.profit_factor,
self.sharpe,
self.information,
100 * self.annualized_return,
100 * self.annualized_volatility,
self.drawdown_period,
100 * self.max_drawdown,
100 * self.winning_pct
)
def plot(self, weights=True, assets=True, portfolio_label='PORTFOLIO', **kwargs):
""" Plot equity of all assets plus our strategy.
:param weights: Plot weights as a subplot.
:param assets: Plot asset prices.
:return: List of axes.
"""
res = ListResult([self], [portfolio_label])
if not weights:
ax1 = res.plot(assets=assets, **kwargs)
return [ax1]
else:
plt.figure(1)
ax1 = plt.subplot2grid((3, 1), (0, 0), rowspan=2)
res.plot(assets=assets, ax=ax1, **kwargs)
ax2 = plt.subplot2grid((3, 1), (2, 0), sharex=ax1)
# plot weights as lines
if self.B.values.min() < -0.01:
self.B.plot(ax=ax2, ylim=(min(0., self.B.values.min()), max(1., self.B.values.max())),
legend=False, colormap=plt.get_cmap('jet'))
else:
# fix rounding errors near zero
if self.B.values.min() < 0:
B = self.B - self.B.values.min()
else:
B = self.B
B.plot(ax=ax2, ylim=(0., max(1., B.values.max())),
legend=False, colormap=plt.get_cmap('jet'), kind='area', stacked=True)
plt.ylabel('weights')
return [ax1, ax2]
def hedge(self, result=None):
""" Hedge results with results of other strategy (subtract weights).
:param result: Other result object. Default is UCRP.
:return: New AlgoResult object.
"""
if result is None:
from algos import CRP
result = CRP().run(self.X.cumprod())
return AlgoResult(self.X, self.B - result.B)
def plot_decomposition(self, **kwargs):
""" Decompose equity into components of individual assets and plot
them. Does not take fees into account. """
ax = self.equity_decomposed.plot(**kwargs)
return ax
@property
def importance(self):
ws = self.weights.sum()
return ws / sum(ws)
class ListResult(list, PickleMixin):
""" List of AlgoResults. """
def __init__(self, results=None, names=None):
results = results if results is not None else []
names = names if names is not None else []
super(ListResult, self).__init__(results)
self.names = names
def append(self, result, name):
super(ListResult, self).append(result)
self.names.append(name)
def to_dataframe(self):
""" Calculate equities for all results and return one dataframe. """
eq = {}
for result, name in zip(self, self.names):
eq[name] = result.equity
return pd.DataFrame(eq)
def save(self, filename, **kwargs):
# do not save it with fees
#self.fee = 0.
#self.to_dataframe().to_pickle(*args, **kwargs)
with open(filename, 'wb') as f:
pickle.dump(self, f, -1)
@classmethod
def load(cls, filename):
# df = pd.read_pickle(*args, **kwargs)
# return cls([df[c] for c in df], df.columns)
with open(filename, 'rb') as f:
return pickle.load(f)
@property
def fee(self):
return {name: result.fee for result, name in zip(self, self.names)}
@fee.setter
def fee(self, value):
for result in self:
result.fee = value
def summary(self):
return '\n'.join([result.summary(name) for result, name in zip(self, self.names)])
def plot(self, ucrp=False, bah=False, assets=False, **kwargs):
""" Plot strategy equity.
:param ucrp: Add uniform CRP as a benchmark.
:param bah: Add Buy-And-Hold portfolio as a benchmark.
:param assets: Add asset prices.
:param kwargs: Additional arguments for pd.DataFrame.plot
"""
# NOTE: order of plotting is important because of coloring
# plot portfolio
d = self.to_dataframe()
portfolio = d.copy()
ax = portfolio.plot(linewidth=3., legend=False, **kwargs)
kwargs['ax'] = ax
ax.set_ylabel('Total wealth')
# plot uniform constant rebalanced portfolio
if ucrp:
from algos import CRP
crp_algo = CRP().run(self[0].X.cumprod())
crp_algo.fee = self[0].fee
d['UCRP'] = crp_algo.equity
d[['UCRP']].plot(**kwargs)
# add bah
if bah:
from algos import BAH
bah_algo = BAH().run(self[0].X.cumprod())
bah_algo.fee = self[0].fee
d['BAH'] = bah_algo.equity
d[['BAH']].plot(**kwargs)
# add individual assets
if assets:
self[0].asset_equity.plot(colormap=plt.get_cmap('jet'), **kwargs)
# plot portfolio again to highlight it
kwargs['color'] = 'blue'
portfolio.plot(linewidth=3., **kwargs)
return ax
| mit | -8,270,731,566,286,813,000 | 28.769863 | 102 | 0.544543 | false |
jsharkey13/isaac-selenium-testing | isaactest/tests/symbolic_q_text_entry_correct.py | 1 | 2725 | import time
from ..utils.log import log, INFO, ERROR, PASS
from ..utils.isaac import answer_symbolic_q_text_entry, open_accordion_section, submit_login_form, assert_logged_in
from ..utils.i_selenium import assert_tab, image_div
from ..utils.i_selenium import wait_for_xpath_element
from ..tests import TestWithDependency
from selenium.common.exceptions import TimeoutException, NoSuchElementException
__all__ = ["symbolic_q_text_entry_correct"]
#####
# Test : Symbolic Questions Text Entry Correct Answers
#####
@TestWithDependency("SYMBOLIC_Q_TEXT_ENTRY_CORRECT")
def symbolic_q_text_entry_correct(driver, Users, ISAAC_WEB, WAIT_DUR, **kwargs):
"""Test if symbolic questions can be answered correctly with text entry.
- 'driver' should be a Selenium WebDriver.
- 'ISAAC_WEB' is the string URL of the Isaac website to be tested.
- 'WAIT_DUR' is the time in seconds to wait for JavaScript to run/load.
"""
assert_tab(driver, ISAAC_WEB)
driver.get(ISAAC_WEB + "/questions/_regression_test_")
time.sleep(WAIT_DUR)
assert_tab(driver, ISAAC_WEB + "/questions/_regression_test_")
time.sleep(WAIT_DUR)
try:
open_accordion_section(driver, 4)
sym_question = driver.find_element_by_xpath("//div[@ng-switch-when='isaacSymbolicQuestion']")
except NoSuchElementException:
log(ERROR, "Can't find the symbolic question; can't continue!")
return False
log(INFO, "Attempt to enter correct answer.")
if not answer_symbolic_q_text_entry(sym_question, "(((x)))", wait_dur=WAIT_DUR):
log(ERROR, "Couldn't answer symbolic Question; can't continue!")
return False
time.sleep(WAIT_DUR)
try:
wait_for_xpath_element(driver, "//div[@ng-switch-when='isaacSymbolicQuestion']//h1[text()='Correct!']")
log(INFO, "A 'Correct!' message was displayed as expected.")
wait_for_xpath_element(driver, "(//div[@ng-switch-when='isaacSymbolicQuestion']//p[text()='This is a correct choice. It requires an exact match!'])[2]")
log(INFO, "The editor entered explanation text was correctly shown.")
wait_for_xpath_element(driver, "//div[@ng-switch-when='isaacSymbolicQuestion']//strong[text()='Well done!']")
log(INFO, "The 'Well done!' message was correctly shown.")
log(INFO, "Avoid rate limiting: wait 1 minute.")
time.sleep(WAIT_DUR)
log(PASS, "Symbolic Question 'correct value, correct unit' behavior as expected.")
return True
except TimeoutException:
image_div(driver, "ERROR_symbolic_q_correct")
log(ERROR, "The messages shown for a correct answer were not all displayed; see 'ERROR_symbolic_q_correct.png'!")
return False
| mit | 8,669,670,329,497,179,000 | 48.545455 | 160 | 0.686972 | false |
EricssonResearch/calvin-base | calvin/actor/actor.py | 1 | 36802 | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import wrapt
import functools
import time
import copy
from calvin.utilities import calvinuuid
from calvin.actor import actorport
from calvin.utilities.calvinlogger import get_logger
from calvin.utilities.utils import enum
from calvin.runtime.north.calvin_token import Token, ExceptionToken
# from calvin.runtime.north import calvincontrol
from calvin.runtime.north.replicationmanager import ReplicationId
import calvin.requests.calvinresponse as response
from calvin.runtime.south.async import async
from calvin.runtime.north.plugins.authorization_checks import check_authorization_plugin_list
from calvin.utilities.calvin_callback import CalvinCB
from calvin.csparser.port_property_syntax import get_port_property_capabilities, get_port_property_runtime
from calvin.runtime.north.calvinsys import get_calvinsys
from calvin.runtime.north.calvinlib import get_calvinlib
_log = get_logger(__name__)
# Tests in test_manage_decorator.py
def manage(include=None, exclude=None):
"""
Decorator for Actor::init() providing automatic management of state variables.
Usage:
@manage() # Manage every instance variable known upon completion of __init__
@manage(include = []) # Manage nothing
@manage(include = [foo, bar]) # Manage self.foo and self.bar only. Equivalent to @manage([foo, bar])
@manage(exclude = [foo, bar]) # Manage everything except self.foo and self.bar
@manage(exclude = []) # Same as @manage()
@manage(<list>) # Same as @manage(include = <list>)
N.B. If include and exclude are both present, exclude will be disregarded.
"""
if include and type(include) is not list or exclude and type(exclude) is not list:
raise Exception("@manage decorator: Must use list as argument")
include_set = set(include) if include else set()
exclude_set = set(exclude) if exclude else set()
# Using wrapt since we need to preserve the signature of the wrapped signature.
# See http://wrapt.readthedocs.org/en/latest/index.html
# FIXME: Since we use wrapt here, we might as well use it in guard and condition too.
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
# Exclude the instance variables added by superclasses
exclude_set.update(instance.__dict__)
x = wrapped(*args, **kwargs)
if include is None:
# include set not given, so construct the implicit include set
include_set.update(instance.__dict__)
include_set.remove('_managed')
include_set.difference_update(exclude_set)
instance._managed.update(include_set)
return x
return wrapper
def condition(action_input=[], action_output=[]):
"""
Decorator condition specifies the required input data and output space.
Both parameters are lists of port names
Return value is a tuple (did_fire, output_available, exhaust_list)
"""
tokens_produced = len(action_output)
tokens_consumed = len(action_input)
def wrap(action_method):
@functools.wraps(action_method)
def condition_wrapper(self):
#
# Check if input ports have enough tokens. Note that all([]) evaluates to True
#
input_ok = all(self.inports[portname].tokens_available(1) for portname in action_input)
#
# Check if output port have enough free token slots
#
output_ok = all(self.outports[portname].tokens_available(1) for portname in action_output)
if not input_ok or not output_ok:
return (False, output_ok, ())
#
# Build the arguments for the action from the input port(s)
#
exhausted_ports = set()
exception = False
args = []
for portname in action_input:
port = self.inports[portname]
token, exhaust = port.read()
is_exception_token = isinstance(token, ExceptionToken)
exception = exception or is_exception_token
args.append(token if is_exception_token else token.value )
if exhaust:
exhausted_ports.add(port)
#
# Check for exceptional conditions
#
if exception:
# FIXME: Simplify exception handling
production = self.exception_handler(action_method, args) or ()
else:
#
# Perform the action (N.B. the method may be wrapped in a decorator)
# Action methods not returning a production (i.e. no output ports) returns None
# => replace with empty_production constant
#
production = action_method(self, *args) or ()
valid_production = (tokens_produced == len(production))
if not valid_production:
#
# Error condition
#
action = "%s.%s" % (self._type, action_method.__name__)
raise Exception("%s invalid production %s, expected %s" % (action, str(production), str(tuple(action_output))))
#
# Write the results from the action to the output port(s)
#
for portname, retval in zip(action_output, production):
port = self.outports[portname]
port.write_token(retval if isinstance(retval, Token) else Token(retval))
return (True, True, exhausted_ports)
return condition_wrapper
return wrap
def stateguard(action_guard):
"""
Decorator guard refines the criteria for picking an action to run by stating a function
with THE SAME signature as the guarded action returning a boolean (True if action allowed).
If the speciified function is unbound or a lambda expression, you must account for 'self',
e.g. 'lambda self, a, b: a>0'
"""
def wrap(action_method):
@functools.wraps(action_method)
def guard_wrapper(self, *args):
if not action_guard(self):
return (False, True, ())
return action_method(self, *args)
return guard_wrapper
return wrap
def verify_status(valid_status_list, raise_=False):
"""
Decorator to help with debugging of state transitions
If a decorated is called when the actors status is not in valid_status_list
it will log (or raise exception if raise_ is True) the attempt.
"""
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
# Exclude the instance variables added by superclasses
if not instance.fsm.disable_state_checks and instance.fsm.state() not in valid_status_list:
msg = "Invalid status %s for operation %s" % (instance.fsm, wrapped.__name__)
if raise_:
raise Exception(msg)
else:
_log.info(msg)
x = wrapped(*args, **kwargs)
return x
return wrapper
def _implements_state(obj):
"""Helper method to check if foreign object supports setting/getting state."""
return hasattr(obj, 'state') and callable(getattr(obj, 'state')) and \
hasattr(obj, 'set_state') and callable(getattr(obj, 'set_state'))
class calvinsys(object):
"""
Calvinsys interface exposed to actors
"""
@staticmethod
def open(actor, name, **kwargs):
return get_calvinsys().open(name, actor, **kwargs)
@staticmethod
def can_write(ref):
return get_calvinsys().can_write(ref)
@staticmethod
def write(ref, data):
return get_calvinsys().write(ref, data)
@staticmethod
def can_read(ref):
return get_calvinsys().can_read(ref)
@staticmethod
def read(ref):
return get_calvinsys().read(ref)
@staticmethod
def close(ref):
return get_calvinsys().close(ref)
class calvinlib(object):
"""
CalvinLib interface exposed to actors
"""
@staticmethod
def use(name, **kwargs):
return get_calvinlib().use(name, **kwargs)
class Actor(object):
"""
Base class for all actors
Need a name supplied.
Subclasses need to declare the parameter
calvinsys if they want access to system
interface on the node, this parameter
will be supplied by the node and not by user
"""
# Class variable controls action priority order
action_priority = tuple()
# These are the security variables that will always be serialized, see serialize()/deserialize() below
_security_state_keys = ('_subject_attributes')
# These are the instance variables that will always be serialized, see serialize()/deserialize() below
_private_state_keys = ('_id', '_name', '_has_started', '_deployment_requirements',
'_signature', '_migration_info', "_port_property_capabilities", "_replication_id")
# Internal state (status)
class FSM(object):
def __init__(self, states, initial, transitions, hooks=None, allow_invalid_transitions=True,
disable_transition_checks=False, disable_state_checks=False):
self.states = states
self._state = initial
self.transitions = transitions
self.hooks = hooks or {}
self.allow_invalid_transitions = allow_invalid_transitions
self.disable_transition_checks = disable_transition_checks
# disable_state_checks is used in the verify_status decorator
self.disable_state_checks = disable_state_checks
def state(self):
return self._state
def transition_to(self, new_state):
if new_state in self.transitions[self._state] or self.disable_transition_checks:
hook = self.hooks.get((self._state, new_state), None)
if hook:
hook()
self._state = new_state
else:
msg = "Invalid transition %s -> %s" % (self, self.printable(new_state))
if self.allow_invalid_transitions:
_log.warning("ALLOWING " + msg)
self._state = new_state
else:
raise Exception(msg)
def printable(self, state):
return self.states.reverse_mapping[state]
def __str__(self):
return self.printable(self._state)
STATUS = enum('LOADED', 'READY', 'PENDING', 'ENABLED', 'DENIED', 'MIGRATABLE')
VALID_TRANSITIONS = {
STATUS.LOADED : [STATUS.READY],
STATUS.READY : [STATUS.PENDING, STATUS.ENABLED, STATUS.DENIED],
STATUS.PENDING : [STATUS.READY, STATUS.PENDING, STATUS.ENABLED],
STATUS.ENABLED : [STATUS.READY, STATUS.PENDING, STATUS.DENIED],
STATUS.DENIED : [STATUS.ENABLED, STATUS.MIGRATABLE, STATUS.PENDING],
STATUS.MIGRATABLE: [STATUS.READY, STATUS.DENIED]
}
test_args = ()
test_kwargs = {}
@property
def id(self):
return self._id
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def migration_info(self):
return self._migration_info
# What are the arguments, really?
def __init__(self, actor_type, name='', allow_invalid_transitions=True, disable_transition_checks=False,
disable_state_checks=False, actor_id=None, security=None):
"""Should _not_ be overridden in subclasses."""
super(Actor, self).__init__()
self._type = actor_type
self._name = name # optional: human_readable_name
self._id = actor_id or calvinuuid.uuid("ACTOR")
_log.debug("New actor id: %s, supplied actor id %s" % (self._id, actor_id))
self._deployment_requirements = []
self._port_property_capabilities = None
self._signature = None
self._component_members = set([self._id]) # We are only part of component if this is extended
self._managed = set()
self._has_started = False
# self.control = calvincontrol.get_calvincontrol()
self._migration_info = None
self._migrating_to = None # During migration while on the previous node set to the next node id
self._migration_connected = True # False while setup the migrated actor, to prevent further migrations
self._last_time_warning = 0.0
self.sec = security
self._subject_attributes = self.sec.get_subject_attributes() if self.sec is not None else None
self.authorization_checks = None
self._replication_id = ReplicationId()
self._exhaust_cb = None
self._pressure_event = 0 # Time of last pressure event time (not in state only local)
self.inports = {p: actorport.InPort(p, self, pp) for p, pp in self.inport_properties.items()}
self.outports = {p: actorport.OutPort(p, self, pp) for p, pp in self.outport_properties.items()}
hooks = {
(Actor.STATUS.PENDING, Actor.STATUS.ENABLED): self._will_start,
(Actor.STATUS.ENABLED, Actor.STATUS.PENDING): self.will_stop,
}
self.fsm = Actor.FSM(Actor.STATUS, Actor.STATUS.LOADED, Actor.VALID_TRANSITIONS, hooks,
allow_invalid_transitions=allow_invalid_transitions,
disable_transition_checks=disable_transition_checks,
disable_state_checks=disable_state_checks)
def set_authorization_checks(self, authorization_checks):
self.authorization_checks = authorization_checks
@verify_status([STATUS.LOADED])
def setup_complete(self):
self.fsm.transition_to(Actor.STATUS.READY)
def init(self):
raise Exception("Implementing 'init()' is mandatory.")
def _will_start(self):
"""Ensure will_start() is only called once"""
if not self._has_started:
self.will_start()
self._has_started = True
def will_start(self):
"""Override in actor subclass if actions need to be taken before starting."""
pass
def will_stop(self):
"""Override in actor subclass if actions need to be taken before stopping."""
pass
def will_migrate(self):
"""Override in actor subclass if actions need to be taken before migrating."""
pass
def did_migrate(self):
"""Override in actor subclass if actions need to be taken after migrating."""
pass
def _will_end(self):
if hasattr(self, "will_end") and callable(self.will_end):
self.will_end()
get_calvinsys().close_all(self)
def did_replicate(self, index):
"""Override in actor subclass if actions need to be taken after replication."""
pass
def __str__(self):
ip = ""
for p in self.inports.values():
ip = ip + str(p)
op = ""
for p in self.outports.values():
op = op + str(p)
s = "Actor: '%s' class '%s'\nstatus: %s\ninports: %s\noutports:%s" % (
self._name, self._type, self.fsm, ip, op)
return s
@verify_status([STATUS.READY, STATUS.PENDING, STATUS.ENABLED])
def did_connect(self, port):
"""Called when a port is connected, checks actor is fully connected."""
if self.fsm.state() == Actor.STATUS.ENABLED:
# We already was enabled thats fine now with dynamic port connections
return
_log.debug("actor.did_connect BEGIN %s %s " % (self._name, self._id))
# If we happen to be in READY, go to PENDING
if self.fsm.state() == Actor.STATUS.READY:
self.fsm.transition_to(Actor.STATUS.PENDING)
# Three non-patological options:
# have inports, have outports, or have in- and outports
if self.inports:
for p in self.inports.values():
if not p.is_connected():
return
if self.outports:
for p in self.outports.values():
if not p.is_connected():
return
# If we made it here, all ports are connected
self.fsm.transition_to(Actor.STATUS.ENABLED)
_log.debug("actor.did_connect ENABLED %s %s " % (self._name, self._id))
@verify_status([STATUS.ENABLED, STATUS.PENDING, STATUS.DENIED, STATUS.MIGRATABLE])
def did_disconnect(self, port):
"""Called when a port is disconnected, checks actor is fully disconnected."""
# If the actor is MIGRATABLE, return since it will be migrated soon.
_log.debug("Actor %s did_disconnect %s" % (self._id, Actor.STATUS.reverse_mapping[self.fsm.state()]))
if self.fsm.state() == Actor.STATUS.MIGRATABLE:
return
# If we happen to be in ENABLED/DENIED, go to PENDING
if self.fsm.state() != Actor.STATUS.PENDING:
self.fsm.transition_to(Actor.STATUS.PENDING)
# Three non-patological options:
# have inports, have outports, or have in- and outports
if self.inports:
for p in self.inports.values():
if p.is_connected():
return
if self.outports:
for p in self.outports.values():
if p.is_connected():
return
# If we made it here, all ports are disconnected
self.fsm.transition_to(Actor.STATUS.READY)
def exhaust(self, callback):
self._exhaust_cb = callback
def get_pressure(self):
_log.debug("get_pressure %s" % self._replication_id.measure_pressure())
if not self._replication_id.measure_pressure():
return None
t = time.time()
pressure = {}
for port in self.inports.values():
for e in port.endpoints:
PRESSURE_LENGTH = len(e.pressure)
pressure[port.id + "," + e.peer_id] = {'last': e.pressure_last, 'count': e.pressure_count,
'pressure': [e.pressure[i % PRESSURE_LENGTH] for i in range(
max(0, e.pressure_count - PRESSURE_LENGTH), e.pressure_count)]}
pressure_event = False
for p in pressure.values():
if len(p['pressure']) < 2:
continue
if ((p['pressure'][-1][1] - p['pressure'][-2][1]) < 10 and
p['pressure'][-1][1] > self._pressure_event):
# Less than 10 sec between queue full and not reported, maybe scale out
self._pressure_event = max(p['pressure'][-1][1], self._pressure_event)
pressure_event = True
break
if (p['pressure'][-1][1] < (t - 30) and
p['last'] > p['pressure'][-1][0] + 3 and
p['pressure'][-1][1] > self._pressure_event):
# More than 30 sec since queue full, received at least 3 tokens and not reported, maybe scale in
self._pressure_event = max(p['pressure'][-1][1], self._pressure_event)
pressure_event = True
break
pressure['time'] = t
_log.debug("get_pressure pressure_event:%s, pressure: %s" % (pressure_event, pressure))
return pressure if pressure_event else None
#
# FIXME: The following methods (_authorized, _warn_slow_actor, _handle_exhaustion) were
# extracted from fire() to make the logic easier to follow
# FIXME: Responsibility of scheduler, not actor class
#
def _authorized(self):
authorized = self.check_authorization_decision()
if not authorized:
_log.info("Access denied for actor %s(%s)" % ( self._type, self._id))
# The authorization decision is not valid anymore.
# Change actor status to DENIED.
self.fsm.transition_to(Actor.STATUS.DENIED)
# Try to migrate actor.
self.sec.authorization_runtime_search(self._id, self._signature, callback=CalvinCB(self.set_migration_info))
return authorized
def _warn_slow_actor(self, time_spent, start_time):
time_since_warning = start_time - self._last_time_warning
if time_since_warning < 120.0:
return
self._last_time_warning = start_time
_log.warning("%s (%s) actor blocked for %f sec" % (self._name, self._type, time_spent))
def _handle_exhaustion(self, exhausted_ports, output_ok):
_log.debug("actor_fire %s test exhaust %s, %s, %s" % (self._id, self._exhaust_cb is not None, exhausted_ports, output_ok))
for port in exhausted_ports:
# Might result in actor changing to PENDING
try:
port.finished_exhaustion()
except:
_log.exception("FINSIHED EXHAUSTION FAILED")
if (output_ok and self._exhaust_cb is not None and
not any([p.any_outstanding_exhaustion_tokens() for p in self.inports.values()])):
_log.debug("actor %s exhausted" % self._id)
# We are in exhaustion, got all exhaustion tokens from peer ports
# but stopped firing while outport token slots available, i.e. exhausted inports or deadlock
# FIXME handle exhaustion deadlock
# Initiate disconnect of outports and destroy the actor
async.DelayedCall(0, self._exhaust_cb, status=response.CalvinResponse(True))
self._exhaust_cb = None
@verify_status([STATUS.ENABLED])
def fire(self):
"""
Fire an actor.
Returns tuple (did_fire, output_ok, exhausted)
"""
#
# Go over the action priority list once
#
for action_method in self.__class__.action_priority:
did_fire, output_ok, exhausted = action_method(self)
# Action firing should fire the first action that can fire
if did_fire:
break
return did_fire, output_ok, exhausted
def enabled(self):
# We want to run even if not fully connected during exhaustion
r = self.fsm.state() == Actor.STATUS.ENABLED or self._exhaust_cb is not None
if not r:
_log.debug("Actor %s %s not enabled" % (self._name, self._id))
return r
def denied(self):
return self.fsm.state() == Actor.STATUS.DENIED
def migratable(self):
return self.fsm.state() == Actor.STATUS.MIGRATABLE
@verify_status([STATUS.DENIED])
def enable_or_migrate(self):
"""Enable actor if access is permitted. Try to migrate if access still denied."""
if self.check_authorization_decision():
self.fsm.transition_to(Actor.STATUS.ENABLED)
else:
# Try to migrate actor.
self.sec.authorization_runtime_search(self._id, self._signature, callback=CalvinCB(self.set_migration_info))
# DEPRECATED: Only here for backwards compatibility
@verify_status([STATUS.ENABLED])
def enable(self):
self.fsm.transition_to(Actor.STATUS.ENABLED)
@verify_status([STATUS.READY, STATUS.PENDING, STATUS.LOADED])
# DEPRECATED: Only here for backwards compatibility
def disable(self):
self.fsm.transition_to(Actor.STATUS.PENDING)
# TODO verify status should only allow reading connections when and after being fully connected (enabled)
@verify_status([STATUS.ENABLED, STATUS.READY, STATUS.PENDING, STATUS.MIGRATABLE])
def connections(self, node_id):
c = {'actor_id': self._id, 'actor_name': self._name}
inports = {}
for port in self.inports.values():
peers = [
(node_id, p[1]) if p[0] == 'local' else p for p in port.get_peers()]
inports[port.id] = peers
c['inports'] = inports
outports = {}
for port in self.outports.values():
peers = [
(node_id, p[1]) if p[0] == 'local' else p for p in port.get_peers()]
outports[port.id] = peers
c['outports'] = outports
return c
def state(self):
"""Serialize custom state, implement in subclass if necessary"""
return {}
def set_state(self, state):
"""Deserialize and set custom state, implement in subclass if necessary"""
pass
def _private_state(self):
"""Serialize state common to all actors"""
state = {}
state['inports'] = {
port: self.inports[port]._state() for port in self.inports}
state['outports'] = {
port: self.outports[port]._state() for port in self.outports}
state['_component_members'] = list(self._component_members)
# Place requires in state, in the event we become a ShadowActor
state['_requires'] = self.requires if hasattr(self, 'requires') else []
# FIXME: The objects in _private_state_keys are well known, they are private after all,
# and we shouldn't need this generic handler.
for key in self._private_state_keys:
obj = self.__dict__[key]
if _implements_state(obj):
state[key] = obj.state()
else:
state[key] = obj
state["_calvinsys"] = get_calvinsys().serialize(actor=self)
return state
def _set_private_state(self, state):
"""Deserialize and apply state common to all actors"""
if "_calvinsys" in state:
get_calvinsys().deserialize(actor=self, csobjects=state["_calvinsys"])
for port in state['inports']:
# Uses setdefault to support shadow actor
self.inports.setdefault(port, actorport.InPort(port, self))._set_state(state['inports'][port])
for port in state['outports']:
# Uses setdefault to support shadow actor
self.outports.setdefault(port, actorport.OutPort(port, self))._set_state(state['outports'][port])
self._component_members= set(state['_component_members'])
# FIXME: The objects in _private_state_keys are well known, they are private after all,
# and we shouldn't need this generic handler.
for key in self._private_state_keys:
if key not in self.__dict__:
self.__dict__[key] = state.get(key, None)
else:
obj = self.__dict__[key]
if _implements_state(obj):
obj.set_state(state.get(key))
else:
self.__dict__[key] = state.get(key, None)
def _replication_state(self):
return None
def _set_replication_state(self, state):
"""Deserialize and apply state related to a replicating actor """
pass
def _security_state(self):
"""
Serialize security state.
Security state can only contain objects that can be JSON-serialized.
"""
return {'_subject_attributes':self._subject_attributes}
def _set_security_state(self, state):
"""
Deserialize and apply security state.
Security state can only contain objects that can be JSON-serialized.
"""
pass
def _managed_state(self):
"""
Serialize managed state.
Managed state can only contain objects that can be JSON-serialized.
"""
state = {key: self.__dict__[key] for key in self._managed}
return state
def _set_managed_state(self, state):
"""
Deserialize and apply managed state.
Managed state can only contain objects that can be JSON-serialized.
"""
self._managed.update(set(state.keys()))
for key, val in state.iteritems():
self.__dict__[key] = val
def serialize(self):
"""Returns the serialized state of an actor."""
state = {}
state['private'] = self._private_state()
rstate = self._replication_state()
if rstate is not None:
state['replication'] = rstate
state['managed'] = self._managed_state()
state['security']= self._security_state()
state['custom'] = self.state()
return state
def deserialize(self, state):
"""Restore an actor's state from the serialized state."""
self._set_private_state(state['private'])
self._set_replication_state(state.get('replication', None))
self._set_security_state(state['security'])
self._set_managed_state(state['managed'])
self.set_state(state['custom'])
def exception_handler(self, action, args):
"""Defult handler when encountering ExceptionTokens"""
_log.error("ExceptionToken encountered\n name: %s\n type: %s\n action: %s\n args: %s\n" %
(self._name, self._type, action.__name__, args))
raise Exception("ExceptionToken NOT HANDLED")
def events(self):
return []
def component_add(self, actor_ids):
if not isinstance(actor_ids, (set, list, tuple)):
actor_ids = [actor_ids]
self._component_members.update(actor_ids)
def component_remove(self, actor_ids):
if not isinstance(actor_ids, (set, list, tuple)):
actor_ids = [actor_ids]
self._component_members -= set(actor_ids)
def part_of_component(self):
return len(self._component_members - set([self._id]))>0
def component_members(self):
return self._component_members
def requirements_add(self, deploy_reqs, extend=False):
if extend:
self._deployment_requirements.extend(deploy_reqs)
else:
self._deployment_requirements = deploy_reqs
def requirements_get(self):
if self._port_property_capabilities is None:
self._port_property_capabilities = self._derive_port_property_capabilities()
capability_port = [{
'op': 'port_property_match',
'kwargs': {'port_property': self._port_property_capabilities},
'type': '+'
}]
if hasattr(self, 'requires') and self.requires:
capability_require = [{
'op': 'actor_reqs_match',
'kwargs': {'requires': self.requires},
'type': '+'
}]
else:
capability_require = []
return (self._deployment_requirements + capability_require +
capability_port + self._replication_id._placement_req)
def _derive_port_property_capabilities(self):
port_property_capabilities = set([])
for port in self.inports.values():
port_property_capabilities.update(get_port_property_capabilities(port.properties))
for port in self.outports.values():
port_property_capabilities.update(get_port_property_capabilities(port.properties))
_log.debug("derive_port_property_capabilities:" + str(port_property_capabilities))
return get_port_property_runtime(port_property_capabilities)
def signature_set(self, signature):
if self._signature is None:
self._signature = signature
def check_authorization_decision(self):
"""Check if authorization decision is still valid"""
if self.authorization_checks:
if any(isinstance(elem, list) for elem in self.authorization_checks):
# If list of lists, True must be found in each list.
for plugin_list in self.authorization_checks:
if not check_authorization_plugin_list(plugin_list):
return False
return True
else:
return check_authorization_plugin_list(self.authorization_checks)
return True
@verify_status([STATUS.DENIED])
def set_migration_info(self, reply):
if reply and reply.status == 200 and reply.data["node_id"]:
self._migration_info = reply.data
self.fsm.transition_to(Actor.STATUS.MIGRATABLE)
_log.info("Migrate actor %s to node %s" % (self._name, self._migration_info["node_id"]))
# Inform the scheduler that the actor is ready to migrate.
get_calvinsys().scheduler_maintenance_wakeup()
else:
_log.info("No possible migration destination found for actor %s" % self._name)
# Try to enable/migrate actor again after a delay.
get_calvinsys().scheduler_maintenance_wakeup(delay=True)
@verify_status([STATUS.MIGRATABLE, STATUS.READY])
def remove_migration_info(self, status):
if status.status != 200:
self._migration_info = None
# FIXME: destroy() in actormanager.py was called before trying to migrate.
# Need to make the actor runnable again before transition to DENIED.
#self.fsm.transition_to(Actor.STATUS.DENIED)
def is_shadow(self):
return False
class ShadowActor(Actor):
"""A shadow actor try to behave as another actor but don't have any implementation"""
def __init__(self, actor_type, name='', allow_invalid_transitions=True, disable_transition_checks=False,
disable_state_checks=False, actor_id=None, security=None):
self.inport_properties = {}
self.outport_properties = {}
self.calvinsys_state = {}
self.requires = None
self._replication_state_data = None
super(ShadowActor, self).__init__(actor_type, name, allow_invalid_transitions=allow_invalid_transitions,
disable_transition_checks=disable_transition_checks,
disable_state_checks=disable_state_checks, actor_id=actor_id,
security=security)
@manage(['_shadow_args'])
def init(self, **args):
self._shadow_args = args
def is_shadow(self):
return True
def create_shadow_port(self, port_name, port_dir, port_id=None):
# TODO check if we should create port against meta info
if port_dir == "in":
self.inport_properties[port_name] = {}
port = actorport.InPort(port_name, self)
self.inports[port_name] = port
else:
self.outport_properties[port_name] = {}
port = actorport.OutPort(port_name, self)
self.outports[port_name] = port
return port
def enabled(self):
return False
def did_connect(self, port):
# Do nothing
return
def did_disconnect(self, port):
# Do nothing
return
def requirements_get(self):
# Get standard actor requirements first
reqs = super(ShadowActor, self).requirements_get()
if self._signature and hasattr(self, '_shadow_args') and self.requires is None:
# Fresh ShadowActor, needs to find placement based on signature
# Since actor requires is not known locally
reqs += [{'op': 'shadow_actor_reqs_match',
'kwargs': {'signature': self._signature,
'shadow_params': self._shadow_args.keys()},
'type': '+'}]
return reqs
def _set_private_state(self, state):
"""Pop _calvinsys state, set requires and call super class"""
self.calvinsys_state = state.pop("_calvinsys")
# Done only in ShadowActor since requires is normally part of the real Actor sub-class
self.requires = state['_requires']
super(ShadowActor, self)._set_private_state(state)
def _private_state(self):
"""Call super class and add stored calvinsys state"""
state = super(ShadowActor, self)._private_state()
state["_calvinsys"] = self.calvinsys_state
return state
def _set_replication_state(self, state):
""" Save the replication state, besides ports since they are already handled on the shadow instance """
super(ShadowActor, self)._set_replication_state(state)
# Need copy since remove the ports, which is needed for connect
self._replication_state_data = copy.copy(state)
if state is None:
return
def _replication_state(self):
return self._replication_state_data
| apache-2.0 | 8,187,153,479,713,603,000 | 39.308872 | 130 | 0.606081 | false |
hiuwo/acq4 | acq4/pyqtgraph/opengl/items/GLGridItem.py | 1 | 1650 | from OpenGL.GL import *
from .. GLGraphicsItem import GLGraphicsItem
from ... import QtGui
__all__ = ['GLGridItem']
class GLGridItem(GLGraphicsItem):
"""
**Bases:** :class:`GLGraphicsItem <pyqtgraph.opengl.GLGraphicsItem>`
Displays a wire-grame grid.
"""
def __init__(self, size=None, color=None, antialias=True, glOptions='translucent'):
GLGraphicsItem.__init__(self)
self.setGLOptions(glOptions)
self.antialias = antialias
if size is None:
size = QtGui.QVector3D(1,1,1)
self.setSize(size=size)
def setSize(self, x=None, y=None, z=None, size=None):
"""
Set the size of the axes (in its local coordinate system; this does not affect the transform)
Arguments can be x,y,z or size=QVector3D().
"""
if size is not None:
x = size.x()
y = size.y()
z = size.z()
self.__size = [x,y,z]
self.update()
def size(self):
return self.__size[:]
def paint(self):
self.setupGLState()
if self.antialias:
glEnable(GL_LINE_SMOOTH)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glHint(GL_LINE_SMOOTH_HINT, GL_NICEST);
glBegin( GL_LINES )
x,y,z = self.size()
glColor4f(1, 1, 1, .3)
for x in range(-10, 11):
glVertex3f(x, -10, 0)
glVertex3f(x, 10, 0)
for y in range(-10, 11):
glVertex3f(-10, y, 0)
glVertex3f( 10, y, 0)
glEnd()
| mit | -7,482,402,917,673,212,000 | 27.448276 | 101 | 0.526061 | false |
konrad/kufpybio | kufpybio/restapi.py | 1 | 1251 | import os
import urllib.request
class RESTAPI(object):
"""A general class that handles the local file access or the
retrival of tha file.
"""
def _get_data(self, path_template, url_template, entity_id):
file_path = self._get_file_path(
path_template, url_template, entity_id)
return open(file_path).read()
def _get_file_path(self, path_template, url_template, entity_id):
file_path = self._file_path(path_template, entity_id)
if not os.path.exists(file_path):
self._retrive_data(url_template, entity_id, file_path)
return file_path
def _create_download_folder(self):
if not os.path.exists(self._download_folder):
os.makedirs(self._download_folder)
def _retrive_data(self, url_template, entity_id, file_path):
data = urllib.request.urlopen(
self._base_url + url_template % (entity_id)).read()
data_fh = open(file_path, "wb")
data_fh.write(data)
data_fh.close()
def _file_path(self, path_template, entity_id):
return path_template % (self._download_folder, entity_id)
def _rest_url(self, url_template, entity_id):
return url_template % (self._base_url, entity_id)
| isc | 1,140,365,508,240,056,600 | 32.810811 | 69 | 0.629097 | false |
davy39/eric | Debugger/VariablesFilterDialog.py | 1 | 3253 | # -*- coding: utf-8 -*-
# Copyright (c) 2002 - 2014 Detlev Offenbach <[email protected]>
#
"""
Module implementing the variables filter dialog.
"""
from __future__ import unicode_literals
from PyQt5.QtWidgets import QDialog, QDialogButtonBox
from Debugger.Config import ConfigVarTypeDispStrings
import Preferences
from .Ui_VariablesFilterDialog import Ui_VariablesFilterDialog
class VariablesFilterDialog(QDialog, Ui_VariablesFilterDialog):
"""
Class implementing the variables filter dialog.
It opens a dialog window for the configuration of the variables type
filter to be applied during a debugging session.
"""
def __init__(self, parent=None, name=None, modal=False):
"""
Constructor
@param parent parent widget of this dialog (QWidget)
@param name name of this dialog (string)
@param modal flag to indicate a modal dialog (boolean)
"""
super(VariablesFilterDialog, self).__init__(parent)
if name:
self.setObjectName(name)
self.setModal(modal)
self.setupUi(self)
self.defaultButton = self.buttonBox.addButton(
self.tr("Save Default"), QDialogButtonBox.ActionRole)
lDefaultFilter, gDefaultFilter = Preferences.getVarFilters()
#populate the listboxes and set the default selection
for lb in self.localsList, self.globalsList:
for ts in ConfigVarTypeDispStrings:
lb.addItem(self.tr(ts))
for filterIndex in lDefaultFilter:
itm = self.localsList.item(filterIndex)
itm.setSelected(True)
for filterIndex in gDefaultFilter:
itm = self.globalsList.item(filterIndex)
itm.setSelected(True)
def getSelection(self):
"""
Public slot to retrieve the current selections.
@return A tuple of lists of integer values. The first list is the
locals variables filter, the second the globals variables filter.
"""
lList = []
gList = []
for i in range(self.localsList.count()):
itm = self.localsList.item(i)
if itm.isSelected():
lList.append(i)
for i in range(self.globalsList.count()):
itm = self.globalsList.item(i)
if itm.isSelected():
gList.append(i)
return (lList, gList)
def setSelection(self, lList, gList):
"""
Public slot to set the current selection.
@param lList local variables filter (list of int)
@param gList global variables filter (list of int)
"""
for filterIndex in lList:
itm = self.localsList.item(filterIndex)
itm.setSelected(True)
for filterIndex in gList:
itm = self.globalsList.item(filterIndex)
itm.setSelected(True)
def on_buttonBox_clicked(self, button):
"""
Private slot called by a button of the button box clicked.
@param button button that was clicked (QAbstractButton)
"""
if button == self.defaultButton:
Preferences.setVarFilters(self.getSelection())
| gpl-3.0 | 3,282,031,345,774,328,300 | 32.193878 | 77 | 0.618813 | false |
tqchen/tvm | tests/python/relay/test_op_qnn_dense.py | 1 | 8757 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
from tvm import te
import numpy as np
from tvm import relay
from tvm.contrib import graph_runtime
from tvm.relay.testing.temp_op_attr import TempOpAttr
# We use llvm target for testing functionality. `llvm` points to an older Intel
# generation machine, that legalizes to a simple lowering. Therefore, the
# legalization is overwritten such that it can be skipped and we use the
# QNNCanonicalizeOps lowering for the testing.
def legalize_qnn_dense(attrs, inputs, types):
return None
def make_requantize_params(input_scale, output_scale, output_zero_point, out_dtype):
config = {
"input_scale": input_scale,
"output_scale": output_scale,
"output_zero_point": output_zero_point,
"out_dtype": out_dtype,
}
return config
def make_configuration(
quantized_data,
quantized_kernel,
dtype,
input_shape,
kernel_shape,
input_zero_point,
kernel_zero_point,
input_scale,
kernel_scale,
units,
output,
out_dtype="int32",
bias=None,
requantize=None,
):
if requantize is not None:
assert bias is not None
config = {
"quantized_data": quantized_data,
"quantized_kernel": quantized_kernel,
"dtype": dtype,
"input_shape": input_shape,
"kernel_shape": kernel_shape,
"input_zero_point": input_zero_point,
"kernel_zero_point": kernel_zero_point,
"input_scale": input_scale,
"kernel_scale": kernel_scale,
"units": units,
"output": output,
"out_dtype": out_dtype,
"bias": bias,
"requantize": requantize,
}
return config
def make_int_configuration(use_bias=False, requantize_output=False, per_channel=False):
input_shape, kernel_shape, output_shape = (2, 10), (3, 10), (2, 3)
input_zero_point, kernel_zero_point = -1, -1
in_dtype = "int8"
out_dtype = "int32" if not requantize_output else "int8"
units = 3
quantized_data_np = (
np.array([1, 3, 5, 7, 9, 11, 13, 15, -19, -21, 1, 3, 5, 7, 9, 11, 13, -17, 17, -21])
.astype(in_dtype)
.reshape(input_shape)
)
quantized_kernel_np = (
np.array(
[
1,
3,
5,
7,
9,
11,
13,
15,
17,
19,
1,
3,
5,
7,
9,
11,
13,
15,
17,
19,
1,
3,
5,
7,
9,
11,
13,
15,
17,
19,
]
)
.astype(in_dtype)
.reshape(kernel_shape)
)
input_scale = 0.5
kernel_scale = 0.5
output_scale = 1.0
bias = np.array([4, 8, 12]).astype(out_dtype).reshape((units,)) if use_bias else None
if per_channel:
assert use_bias and requantize_output
kernel_scale = np.array([0.5, 0.3, 0.4], dtype=np.float32)
output = np.array([23, 14, 20, 57, 34, 47])
elif requantize_output:
assert use_bias
output = np.array([23, 24, 25, 57, 58, 59])
elif use_bias:
output = np.array([96, 100, 104, 232, 236, 240])
else:
output = np.array([92, 92, 92, 228, 228, 228])
requant_params = (
make_requantize_params(input_scale * kernel_scale, output_scale, -1, "int8")
if requantize_output
else None
)
output = output.astype(out_dtype).reshape(output_shape)
return make_configuration(
quantized_data=quantized_data_np,
quantized_kernel=quantized_kernel_np,
dtype=in_dtype,
input_shape=input_shape,
kernel_shape=kernel_shape,
input_zero_point=input_zero_point,
kernel_zero_point=kernel_zero_point,
input_scale=input_scale,
kernel_scale=kernel_scale,
units=units,
output=output,
bias=bias,
requantize=requant_params,
)
def qnn_dense_driver(test_configuration):
in_dtype = test_configuration["dtype"]
out_dtype = test_configuration["out_dtype"]
quantized_data_name = "quantized_data"
quantized_kernel_name = "quantized_kernel"
expected_out_dtype = test_configuration["out_dtype"]
bias_name = "bias"
quantized_data = relay.var(
quantized_data_name, shape=test_configuration["input_shape"], dtype=in_dtype
)
quantized_kernel = relay.var(
quantized_kernel_name, shape=test_configuration["kernel_shape"], dtype=in_dtype
)
mod = relay.qnn.op.dense(
quantized_data,
quantized_kernel,
relay.const(test_configuration["input_zero_point"], "int32"),
relay.const(test_configuration["kernel_zero_point"], "int32"),
relay.const(test_configuration["input_scale"], "float32"),
relay.const(test_configuration["kernel_scale"], "float32"),
test_configuration["units"],
)
if test_configuration[bias_name] is not None:
bias = relay.var(bias_name, shape=test_configuration["bias"].shape, dtype=out_dtype)
mod = relay.nn.bias_add(mod, bias)
if test_configuration["requantize"] is not None:
requantize_config = test_configuration["requantize"]
mod = relay.qnn.op.requantize(
mod,
input_scale=relay.const(requantize_config["input_scale"], "float32"),
input_zero_point=relay.const(0, "int32"),
output_scale=relay.const(requantize_config["output_scale"], "float32"),
output_zero_point=relay.const(requantize_config["output_zero_point"], "int32"),
out_dtype=requantize_config["out_dtype"],
)
expected_out_dtype = requantize_config["out_dtype"]
mod = relay.Function(relay.analysis.free_vars(mod), mod)
mod = tvm.IRModule.from_expr(mod)
mod = relay.qnn.transform.CanonicalizeOps()(mod)
with tvm.transform.PassContext(opt_level=2):
graph, lib, params = relay.build(mod, "llvm", params=None)
mod = graph_runtime.create(graph, lib, ctx=tvm.cpu(0))
mod.set_input(quantized_data_name, test_configuration[quantized_data_name])
mod.set_input(quantized_kernel_name, test_configuration[quantized_kernel_name])
if test_configuration[bias_name] is not None:
mod.set_input(bias_name, test_configuration[bias_name])
mod.set_input(**params)
mod.run()
res = mod.get_output(0).asnumpy()
np.testing.assert_equal(res, test_configuration["output"])
assert res.dtype == expected_out_dtype
def test_qnn_dense_without_bias():
with TempOpAttr("qnn.dense", "FTVMQnnLegalize", legalize_qnn_dense):
int32_output_without_bias_params = make_int_configuration(use_bias=False)
qnn_dense_driver(int32_output_without_bias_params)
def test_qnn_dense_with_bias():
with TempOpAttr("qnn.dense", "FTVMQnnLegalize", legalize_qnn_dense):
int32_output_with_bias_params = make_int_configuration(use_bias=True)
qnn_dense_driver(int32_output_with_bias_params)
def test_qnn_dense_with_requantized_output():
with TempOpAttr("qnn.dense", "FTVMQnnLegalize", legalize_qnn_dense):
int8_requantized_output_with_bias_params = make_int_configuration(
use_bias=True, requantize_output=True
)
qnn_dense_driver(int8_requantized_output_with_bias_params)
def test_per_channel_weight_scale():
with TempOpAttr("qnn.dense", "FTVMQnnLegalize", legalize_qnn_dense):
config = make_int_configuration(use_bias=True, requantize_output=True, per_channel=True)
qnn_dense_driver(config)
if __name__ == "__main__":
test_qnn_dense_without_bias()
test_qnn_dense_with_bias()
test_qnn_dense_with_requantized_output()
test_per_channel_weight_scale()
| apache-2.0 | -1,486,182,902,509,419,800 | 32.94186 | 96 | 0.613338 | false |
s-tar/just-a-chat | kernel/server.py | 1 | 3625 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'mr.S'
import bottle
from gevent import monkey
import datetime
from kernel.widget import get as loadWidget
from kernel.helpers import is_ajax
from bottle import default_app, Bottle, route, static_file, ServerAdapter, Jinja2Template, request, error, redirect, jinja2_template as template
from kernel.session import Session
from beaker.middleware import SessionMiddleware
from kernel.user import User
import time
import kernel.db
import sys
import os
monkey.patch_all()
bottle.debug(True)
app = application = default_app()
reload(sys)
sys.setdefaultencoding('UTF8')
template_path = './templates/default/'
bottle.TEMPLATE_PATH.insert(0, template_path)
def run(run=False):
global app
import kernel.module
# redistogo_url = os.getenv('REDISTOGO_URL', None)
# if redistogo_url == None:
# redis_url = '127.0.0.1:6379'
# else:
# redis_url = redistogo_url
# redis_url = redis_url.split('redis://redistogo:')[1]
# redis_url = redis_url.split('/')[0]
# REDIS_PWD, REDIS_HOST = redis_url.split('@', 1)
# redis_url = "%s?password=%s" % (REDIS_HOST, REDIS_PWD)
# session_opts = {
# 'session.type': 'redis',
# 'session.url': redis_url,
# 'session.key': 'just_a_chat',
# 'session.auto': True, }
session_opts = {
'session.type': 'file',
'session.data_dir': './temp/sessions',
'session.cookie_expires': 7*24*60*60,
'session.auto': True}
class BeforeRequestMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, e, h):
e['PATH_INFO'] = e['PATH_INFO'].rstrip('/')
return self.app(e, h)
Jinja2Template.defaults = {
'widget': loadWidget,
'is_ajax': is_ajax,
'modules': kernel.module.modules,
'datetime': datetime
}
Jinja2Template.settings = {
'filters': {
'nl2br': lambda value: value.replace('\n', '<br>\n')
}
}
@app.route('/static/<path:path>')
def static(path):
return static_file(path, './templates/default/static/')
@app.route('/file/<path:path>')
def file(path):
return static_file(path, './files/')
@app.post('/widget/<name:path>')
def widget(name):
try:
data = request.json['data'] if request.json is not None and 'data' in request.json else {}
return loadWidget(name, data, wrap=False)
except ValueError:
bottle.response.status = 404
@app.error(404)
def error404(error):
return template("404")
@app.hook('before_request')
def before_request():
request.session = Session(request.environ)
request.db = kernel.db.Database()
request.user = User(request.session, request.db)
Jinja2Template.defaults['user'] = request.user
@app.hook('after_request')
def after_request():
if 'db' in request:
request.db().close()
app = BeforeRequestMiddleware(app)
app = SessionMiddleware(app, session_opts)
#bottle.run(app, host='192.168.1.2', port=3000)
if run:
import kernel.socket
from socketio.server import SocketIOServer
SocketIOServer(('192.168.1.2', 3000), app).serve_forever()
def get_environment():
if request.environ['PATH_INFO'].startswith('/admin/') or request.environ['PATH_INFO'] == '/admin':
return 'admin'
else:
return 'site'
files_dir = os.path.abspath("./files/")
from modules import *
__all__ = ["app", "session", "files_dir"] | mit | 8,543,817,912,420,767,000 | 27.551181 | 144 | 0.608828 | false |
joelouismarino/iterative_inference | cfg/mnist/single_level/iterative/config.py | 1 | 1579 | # training set-up
train_config = {
'dataset': 'MNIST',
'output_distribution': 'bernoulli',
'batch_size': 64,
'n_samples': 10,
'n_iterations': 5,
'encoder_optimizer': 'adam',
'decoder_optimizer': 'adam',
'encoder_learning_rate': 0.0002,
'decoder_learning_rate': 0.0002,
'average_gradient': True,
'encoder_decoder_train_multiple': 1,
'kl_min': 0,
'kl_warm_up': False,
'cuda_device': 1,
'display_iter': 50,
'eval_iter': 500,
'resume_experiment': None
}
# model architecture
arch = {
'model_form': 'dense', # 'dense', 'conv'
'encoder_type': 'inference_model', # 'em', 'inference_model'
'inference_model_type': 'feedforward', # 'feedforward', 'recurrent'
'encoding_form': ['posterior', 'log_gradient', 'sign_gradient', 'mean', 'log_var'],
'variable_update_form': 'highway',
'concat_variables': False,
'posterior_form': 'gaussian',
'whiten_input': False,
'constant_prior_variances': True,
'single_output_variance': False,
'learn_top_prior': False,
'top_size': 1,
'n_latent': [64],
'n_det_enc': [0],
'n_det_dec': [0],
'n_layers_enc': [2, 0],
'n_layers_dec': [2, 1],
'n_units_enc': [512, 0],
'n_units_dec': [512, 1],
'non_linearity_enc': 'elu',
'non_linearity_dec': 'elu',
'connection_type_enc': 'highway',
'connection_type_dec': 'sequential',
'batch_norm_enc': False,
'batch_norm_dec': False,
'weight_norm_enc': False,
'weight_norm_dec': False,
'dropout_enc': 0.0,
'dropout_dec': 0.0
}
| mit | 1,561,352,637,299,236,400 | 23.292308 | 87 | 0.575047 | false |
apojomovsky/cuatro_en_linea | tests/test_tournament.py | 1 | 2368 | #!/usr/bin/env python
import unittest
from game.strategy import Strategy
from game.tournament import NotEnoughStrategies
from game.tournament import Tournament
import time
import itertools
class DummyStrategy1(Strategy):
def return_column(self, board):
return board.retrieve_first_non_full_column()
class DummyStrategy2(Strategy):
def return_column(self, board):
return board.retrieve_first_non_full_column()
def prepare(self, color, workers_pool):
time.sleep(0.06)
class DummyStrategy3(Strategy):
def return_column(self, board):
time.sleep(0.06)
return board.retrieve_first_non_full_column()
class DummyStrategy4(DummyStrategy1):
pass
class TestTournament(unittest.TestCase):
def test_generate_matches_with_no_strategies(self):
with self.assertRaises(NotEnoughStrategies):
strategies = []
Tournament(strategies)
def test_generate_matches_with_only_one_strategy(self):
with self.assertRaises(NotEnoughStrategies):
strategies = [DummyStrategy1]
Tournament(strategies)
def test_generate_matches_with_two_strategies(self):
with self.assertRaises(NotEnoughStrategies):
strategies = [DummyStrategy1, DummyStrategy2]
Tournament(strategies)
def test_run_with_strategy_that_timeouts_when_playing(self):
"""Ensures that the strategy who violates the play time limits,
won't win even a single match on the tournament
"""
strategies = [DummyStrategy1, DummyStrategy4, DummyStrategy3]
tournament = Tournament(strategies, play_time_limit=0.05)
tournament.run()
results = tournament.get_results_table()
winners = [result[2] for result in results]
self.assertFalse('DummyStrategy3' in winners)
def test_run_with_strategy_that_timeouts_when_preparing(self):
"""Ensures that the strategy who violates the prepare time limits,
won't win even a single match on the tournament
"""
strategies = [DummyStrategy1, DummyStrategy4, DummyStrategy2]
tournament = Tournament(strategies, prepare_time_limit=0.05)
tournament.run()
results = tournament.get_results_table()
winners = [result[2] for result in results]
self.assertFalse('DummyStrategy2' in winners)
| apache-2.0 | 9,052,826,954,571,969,000 | 36 | 74 | 0.691723 | false |
ivuk/pylxd | pylxd/tests/test_container.py | 1 | 8727 | # Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import OrderedDict
from ddt import data
from ddt import ddt
import json
import mock
import tempfile
from pylxd import connection
from pylxd.tests import annotated_data
from pylxd.tests import fake_api
from pylxd.tests import LXDAPITestBase
@ddt
@mock.patch.object(connection.LXDConnection, 'get_object',
return_value=('200', fake_api.fake_operation()))
class LXDAPIContainerTestObject(LXDAPITestBase):
def test_list_containers(self, ms):
ms.return_value = ('200', fake_api.fake_container_list())
self.assertEqual(
['trusty-1'],
self.lxd.container_list())
ms.assert_called_once_with('GET',
'/1.0/containers')
@annotated_data(
('STOPPED', False),
('STOPPING', False),
('ABORTING', False),
('RUNNING', True),
('STARTING', True),
('FREEZING', True),
('FROZEN', True),
('THAWED', True),
)
def test_container_running(self, status, running, ms):
with mock.patch.object(connection.LXDConnection, 'get_object') as ms:
ms.return_value = ('200', fake_api.fake_container_state(status))
self.assertEqual(running, self.lxd.container_running('trusty-1'))
ms.assert_called_once_with('GET',
'/1.0/containers/trusty-1/state')
def test_container_init(self, ms):
self.assertEqual(ms.return_value, self.lxd.container_init('fake'))
ms.assert_called_once_with('POST',
'/1.0/containers',
'"fake"')
def test_container_update(self, ms):
self.assertEqual(ms.return_value,
self.lxd.container_update('trusty-1',
'fake'))
ms.assert_called_once_with('PUT',
'/1.0/containers/trusty-1',
'"fake"')
def test_container_state(self, ms):
ms.return_value = ('200', fake_api.fake_container_state('RUNNING'))
self.assertEqual('RUNNING', self.lxd.container_state('trusty-1'))
ms.assert_called_with('GET',
'/1.0/containers/trusty-1/state')
@annotated_data(
('start', 'start'),
('stop', 'stop'),
('suspend', 'freeze'),
('resume', 'unfreeze'),
('reboot', 'restart'),
)
def test_container_actions(self, method, action, ms):
self.assertEqual(
ms.return_value,
getattr(self.lxd, 'container_' + method)('trusty-1', 30))
ms.assert_called_once_with('PUT',
'/1.0/containers/trusty-1/state',
json.dumps({'action': action,
'timeout': 30}))
def test_container_destroy(self, ms):
self.assertEqual(
ms.return_value, self.lxd.container_destroy('trusty-1'))
ms.assert_called_once_with('DELETE',
'/1.0/containers/trusty-1')
def test_container_log(self, ms):
ms.return_value = ('200', fake_api.fake_container_log())
self.assertEqual(
'fake log', self.lxd.get_container_log('trusty-1'))
ms.assert_called_once_with('GET',
'/1.0/containers/trusty-1?log=true')
def test_container_config(self, ms):
ms.return_value = ('200', fake_api.fake_container_state('fake'))
self.assertEqual(
{'status': 'fake'}, self.lxd.get_container_config('trusty-1'))
ms.assert_called_once_with('GET',
'/1.0/containers/trusty-1?log=false')
def test_container_info(self, ms):
ms.return_value = ('200', fake_api.fake_container_state('fake'))
self.assertEqual(
{'status': 'fake'}, self.lxd.container_info('trusty-1'))
ms.assert_called_once_with('GET',
'/1.0/containers/trusty-1/state')
def test_container_migrate(self, ms):
ms.return_value = ('200', fake_api.fake_container_migrate())
self.assertEqual(
{'control': 'fake_control',
'criu': 'fake_criu',
'fs': 'fake_fs'},
self.lxd.container_migrate('trusty-1'))
ms.assert_called_once_with('POST',
'/1.0/containers/trusty-1',
'{"migration": true}')
def test_container_publish(self, ms):
ms.return_value = ('200', fake_api.fake_operation())
self.assertEqual(
ms.return_value, self.lxd.container_publish('trusty-1'))
ms.assert_called_once_with('POST',
'/1.0/images',
'"trusty-1"')
def test_container_put_file(self, ms):
temp_file = tempfile.NamedTemporaryFile()
ms.return_value = ('200', fake_api.fake_standard_return())
self.assertEqual(
ms.return_value, self.lxd.put_container_file('trusty-1',temp_file.name,'dst_file'))
ms.assert_called_once_with('POST',
'/1.0/containers/trusty-1/files?path=dst_file',
body='',
headers={'X-LXD-gid': 0, 'X-LXD-mode': 0644, 'X-LXD-uid': 0})
def test_list_snapshots(self, ms):
ms.return_value = ('200', fake_api.fake_snapshots_list())
self.assertEqual(
['/1.0/containers/trusty-1/snapshots/first'],
self.lxd.container_snapshot_list('trusty-1'))
ms.assert_called_once_with('GET',
'/1.0/containers/trusty-1/snapshots')
@annotated_data(
('create', 'POST', '', ('fake config',), ('"fake config"',)),
('info', 'GET', '/first', ('first',), ()),
('rename', 'POST', '/first',
('first', 'fake config'), ('"fake config"',)),
('delete', 'DELETE', '/first', ('first',), ()),
)
def test_snapshot_operations(self, method, http, path,
args, call_args, ms):
self.assertEqual(
ms.return_value,
getattr(self.lxd,
'container_snapshot_' + method)('trusty-1', *args))
ms.assert_called_once_with(http,
'/1.0/containers/trusty-1/snapshots' +
path,
*call_args)
def test_container_run_command(self, ms):
data = OrderedDict((
('command', ['/fake/command']),
('interactive', False),
('wait-for-websocket', False),
('environment', {'FAKE_ENV': 'fake'})
))
self.assertEqual(
ms.return_value,
self.lxd.container_run_command('trusty-1', *data.values()))
self.assertEqual(1, ms.call_count)
self.assertEqual(
ms.call_args[0][:2],
('POST', '/1.0/containers/trusty-1/exec'))
self.assertEqual(
json.loads(ms.call_args[0][2]),
dict(data)
)
@ddt
@mock.patch.object(connection.LXDConnection, 'get_status')
class LXDAPIContainerTestStatus(LXDAPITestBase):
@data(True, False)
def test_container_defined(self, defined, ms):
ms.return_value = defined
self.assertEqual(defined, self.lxd.container_defined('trusty-1'))
ms.assert_called_once_with('GET',
'/1.0/containers/trusty-1/state')
@ddt
@mock.patch.object(connection.LXDConnection, 'get_raw',
return_value='fake contents')
class LXDAPIContainerTestRaw(LXDAPITestBase):
def test_container_file(self, ms):
self.assertEqual(
'fake contents', self.lxd.get_container_file('trusty-1',
'/file/name'))
ms.assert_called_once_with(
'GET', '/1.0/containers/trusty-1/files?path=/file/name')
| apache-2.0 | -2,730,246,936,904,055,000 | 38.668182 | 96 | 0.532944 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_06_01/aio/operations/_connection_monitors_operations.py | 1 | 45799 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ConnectionMonitorsOperations:
"""ConnectionMonitorsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
parameters: "_models.ConnectionMonitor",
**kwargs
) -> "_models.ConnectionMonitorResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConnectionMonitor')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
parameters: "_models.ConnectionMonitor",
**kwargs
) -> AsyncLROPoller["_models.ConnectionMonitorResult"]:
"""Create or update a connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:param parameters: Parameters that define the operation to create a connection monitor.
:type parameters: ~azure.mgmt.network.v2019_06_01.models.ConnectionMonitor
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ConnectionMonitorResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_06_01.models.ConnectionMonitorResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> "_models.ConnectionMonitorResult":
"""Gets a connection monitor by name.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionMonitorResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_06_01.models.ConnectionMonitorResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> "_models.ConnectionMonitorResult":
"""Update tags of the specified connection monitor.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:param parameters: Parameters supplied to update connection monitor tags.
:type parameters: ~azure.mgmt.network.v2019_06_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionMonitorResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_06_01.models.ConnectionMonitorResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def _stop_initial(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self._stop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/stop'} # type: ignore
async def begin_stop(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Stops the specified connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._stop_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/stop'} # type: ignore
async def _start_initial(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self._start_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/start'} # type: ignore
async def begin_start(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Starts the specified connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._start_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/start'} # type: ignore
async def _query_initial(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> "_models.ConnectionMonitorQueryResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorQueryResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self._query_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectionMonitorQueryResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('ConnectionMonitorQueryResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_query_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/query'} # type: ignore
async def begin_query(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> AsyncLROPoller["_models.ConnectionMonitorQueryResult"]:
"""Query a snapshot of the most recent connection states.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name given to the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ConnectionMonitorQueryResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_06_01.models.ConnectionMonitorQueryResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorQueryResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._query_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectionMonitorQueryResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_query.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/query'} # type: ignore
def list(
self,
resource_group_name: str,
network_watcher_name: str,
**kwargs
) -> AsyncIterable["_models.ConnectionMonitorListResult"]:
"""Lists all connection monitors for the specified Network Watcher.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ConnectionMonitorListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_06_01.models.ConnectionMonitorListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ConnectionMonitorListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors'} # type: ignore
| mit | -1,777,509,022,329,166,800 | 52.069525 | 242 | 0.658551 | false |
keepkey/python-keepkey | keepkeylib/client.py | 1 | 50402 | # This file is part of the TREZOR project.
#
# Copyright (C) 2012-2016 Marek Palatinus <[email protected]>
# Copyright (C) 2012-2016 Pavol Rusnak <[email protected]>
# Copyright (C) 2016 Jochen Hoenicke <[email protected]>
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# The script has been modified for KeepKey Device.
from __future__ import print_function, absolute_import
import os
import sys
import time
import binascii
import hashlib
import unicodedata
import json
import getpass
import copy
from mnemonic import Mnemonic
from . import tools
from . import mapping
from . import messages_pb2 as proto
from . import messages_eos_pb2 as eos_proto
from . import messages_nano_pb2 as nano_proto
from . import messages_cosmos_pb2 as cosmos_proto
from . import messages_ripple_pb2 as ripple_proto
from . import messages_tendermint_pb2 as tendermint_proto
from . import messages_thorchain_pb2 as thorchain_proto
from . import types_pb2 as types
from . import eos
from . import nano
from .debuglink import DebugLink
# try:
# from PIL import Image
# SCREENSHOT = True
# except:
# SCREENSHOT = False
SCREENSHOT = False
DEFAULT_CURVE = 'secp256k1'
# monkeypatching: text formatting of protobuf messages
tools.monkeypatch_google_protobuf_text_format()
def get_buttonrequest_value(code):
# Converts integer code to its string representation of ButtonRequestType
return [ k for k, v in types.ButtonRequestType.items() if v == code][0]
def pprint(msg):
msg_class = msg.__class__.__name__
msg_size = msg.ByteSize()
"""
msg_ser = msg.SerializeToString()
msg_id = mapping.get_type(msg)
msg_json = json.dumps(protobuf_json.pb2json(msg))
"""
if isinstance(msg, proto.FirmwareUpload):
return "<%s> (%d bytes):\n" % (msg_class, msg_size)
else:
return "<%s> (%d bytes):\n%s" % (msg_class, msg_size, msg)
def log(msg):
sys.stderr.write(msg + '\n')
sys.stderr.flush()
def log_cr(msg):
sys.stdout.write('\r' + msg)
sys.stdout.flush()
def format_mnemonic(word_pos, character_pos):
return "WORD %d: %s" % (word_pos, character_pos * '*')
def getch():
try:
import termios
except ImportError:
# Non-POSIX. Return msvcrt's (Windows') getch.
import msvcrt
return msvcrt.getch()
# POSIX system. Create and return a getch that manipulates the tty.
import sys, tty
def _getch():
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(fd)
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
return _getch()
class CallException(Exception):
def __init__(self, code, message):
super(CallException, self).__init__()
self.args = [code, message]
class PinException(CallException):
pass
class field(object):
# Decorator extracts single value from
# protobuf object. If the field is not
# present, raises an exception.
def __init__(self, field):
self.field = field
def __call__(self, f):
def wrapped_f(*args, **kwargs):
ret = f(*args, **kwargs)
ret.HasField(self.field)
return getattr(ret, self.field)
return wrapped_f
class expect(object):
# Decorator checks if the method
# returned one of expected protobuf messages
# or raises an exception
def __init__(self, *expected):
self.expected = expected
def __call__(self, f):
def wrapped_f(*args, **kwargs):
ret = f(*args, **kwargs)
if not isinstance(ret, self.expected):
raise Exception("Got %s, expected %s" % (ret.__class__, self.expected))
return ret
return wrapped_f
def session(f):
# Decorator wraps a BaseClient method
# with session activation / deactivation
def wrapped_f(*args, **kwargs):
client = args[0]
try:
client.transport.session_begin()
return f(*args, **kwargs)
finally:
client.transport.session_end()
return wrapped_f
def normalize_nfc(txt):
if sys.version_info[0] < 3:
if isinstance(txt, unicode):
return unicodedata.normalize('NFC', txt)
if isinstance(txt, str):
return unicodedata.normalize('NFC', txt.decode('utf-8'))
else:
if isinstance(txt, bytes):
return unicodedata.normalize('NFC', txt.decode('utf-8'))
if isinstance(txt, str):
return unicodedata.normalize('NFC', txt)
raise Exception('unicode/str or bytes/str expected')
class BaseClient(object):
# Implements very basic layer of sending raw protobuf
# messages to device and getting its response back.
def __init__(self, transport, **kwargs):
self.transport = transport
self.verbose = False
super(BaseClient, self).__init__() # *args, **kwargs)
def cancel(self):
self.transport.write(proto.Cancel())
@session
def call_raw(self, msg):
self.transport.write(msg)
return self.transport.read_blocking()
@session
def call(self, msg):
resp = self.call_raw(msg)
handler_name = "callback_%s" % resp.__class__.__name__
handler = getattr(self, handler_name, None)
if handler != None:
msg = handler(resp)
if msg == None:
raise Exception("Callback %s must return protobuf message, not None" % handler)
resp = self.call(msg)
return resp
def callback_Failure(self, msg):
if msg.code in (types.Failure_PinInvalid,
types.Failure_PinCancelled, types.Failure_PinExpected):
raise PinException(msg.code, msg.message)
raise CallException(msg.code, msg.message)
def close(self):
self.transport.close()
class DebugWireMixin(object):
def call_raw(self, msg):
log("SENDING " + pprint(msg))
resp = super(DebugWireMixin, self).call_raw(msg)
log("RECEIVED " + pprint(resp))
return resp
class TextUIMixin(object):
# This class demonstrates easy test-based UI
# integration between the device and wallet.
# You can implement similar functionality
# by implementing your own GuiMixin with
# graphical widgets for every type of these callbacks.
def __init__(self, *args, **kwargs):
super(TextUIMixin, self).__init__(*args, **kwargs)
self.character_request_first_pass = True
def callback_ButtonRequest(self, msg):
# log("Sending ButtonAck for %s " % get_buttonrequest_value(msg.code))
return proto.ButtonAck()
def callback_RecoveryMatrix(self, msg):
if self.recovery_matrix_first_pass:
self.recovery_matrix_first_pass = False
log("Use the numeric keypad to describe positions. For the word list use only left and right keys. The layout is:")
log(" 7 8 9 7 | 9")
log(" 4 5 6 4 | 6")
log(" 1 2 3 1 | 3")
while True:
character = getch()
if character in ('\x03', '\x04'):
return proto.Cancel()
if character in ('\x08', '\x7f'):
return proto.WordAck(word='\x08')
# ignore middle column if only 6 keys requested.
if (msg.type == types.WordRequestType_Matrix6 and
character in ('2', '5', '8')):
continue
if (ord(character) >= ord('1') and ord(character) <= ord('9')):
return proto.WordAck(word=character)
def callback_PinMatrixRequest(self, msg):
if msg.type == 1:
desc = 'current PIN'
elif msg.type == 2:
desc = 'new PIN'
elif msg.type == 3:
desc = 'new PIN again'
else:
desc = 'PIN'
log("Use the numeric keypad to describe number positions. The layout is:")
log(" 7 8 9")
log(" 4 5 6")
log(" 1 2 3")
log("Please enter %s: " % desc)
pin = getpass.getpass('')
return proto.PinMatrixAck(pin=pin)
def callback_PassphraseRequest(self, msg):
log("Passphrase required: ")
passphrase = getpass.getpass('')
log("Confirm your Passphrase: ")
if passphrase == getpass.getpass(''):
passphrase = normalize_nfc(passphrase)
return proto.PassphraseAck(passphrase=passphrase)
else:
log("Passphrase did not match! ")
exit()
def callback_CharacterRequest(self, msg):
if self.character_request_first_pass:
self.character_request_first_pass = False
log("Use recovery cipher on device to input mnemonic. Words are autocompleted at 3 or 4 characters.")
log("(use spacebar to progress to next word after match, use backspace to correct bad character or word entries)")
# format mnemonic for console
formatted_console = format_mnemonic(msg.word_pos + 1, msg.character_pos)
# clear the runway before we display formatted mnemonic
log_cr(' ' * 14)
log_cr(formatted_console)
while True:
character = getch().lower()
# capture escape
if character in ('\x03', '\x04'):
return proto.Cancel()
character_ascii = ord(character)
if character_ascii >= 97 and character_ascii <= 122 \
and msg.character_pos != 4:
# capture characters a-z
return proto.CharacterAck(character=character)
elif character_ascii == 32 and msg.word_pos < 23 \
and msg.character_pos >= 3:
# capture spaces
return proto.CharacterAck(character=' ')
elif character_ascii == 8 or character_ascii == 127 \
and (msg.word_pos > 0 or msg.character_pos > 0):
# capture backspaces
return proto.CharacterAck(delete=True)
elif character_ascii == 13 and msg.word_pos in (11, 17, 23):
# capture returns
log("")
return proto.CharacterAck(done=True)
class DebugLinkMixin(object):
# This class implements automatic responses
# and other functionality for unit tests
# for various callbacks, created in order
# to automatically pass unit tests.
#
# This mixing should be used only for purposes
# of unit testing, because it will fail to work
# without special DebugLink interface provided
# by the device.
def __init__(self, *args, **kwargs):
super(DebugLinkMixin, self).__init__(*args, **kwargs)
self.debug = None
self.in_with_statement = 0
self.button_wait = 0
self.screenshot_id = 0
# Always press Yes and provide correct pin
self.setup_debuglink(True, True)
self.auto_button = True
# Do not expect any specific response from device
self.expected_responses = None
# Use blank passphrase
self.set_passphrase('')
def close(self):
super(DebugLinkMixin, self).close()
if self.debug:
self.debug.close()
def set_debuglink(self, debug_transport):
self.debug = DebugLink(debug_transport)
def set_buttonwait(self, secs):
self.button_wait = secs
def __enter__(self):
# For usage in with/expected_responses
self.in_with_statement += 1
return self
def __exit__(self, _type, value, traceback):
self.in_with_statement -= 1
if _type != None:
# Another exception raised
return False
# return isinstance(value, TypeError)
# Evaluate missed responses in 'with' statement
if self.expected_responses != None and len(self.expected_responses):
raise Exception("Some of expected responses didn't come from device: %s" % \
[ pprint(x) for x in self.expected_responses ])
# Cleanup
self.expected_responses = None
return False
def set_expected_responses(self, expected):
if not self.in_with_statement:
raise Exception("Must be called inside 'with' statement")
self.expected_responses = expected
def setup_debuglink(self, button, pin_correct):
self.button = button # True -> YES button, False -> NO button
self.pin_correct = pin_correct
def set_passphrase(self, passphrase):
self.passphrase = normalize_nfc(passphrase)
def set_mnemonic(self, mnemonic):
self.mnemonic = normalize_nfc(mnemonic).split(' ')
def call_raw(self, msg):
if SCREENSHOT and self.debug:
layout = self.debug.read_layout()
im = Image.new("RGB", (128, 64))
pix = im.load()
for x in range(128):
for y in range(64):
rx, ry = 127 - x, 63 - y
if (ord(layout[rx + (ry / 8) * 128]) & (1 << (ry % 8))) > 0:
pix[x, y] = (255, 255, 255)
im.save('scr%05d.png' % self.screenshot_id)
self.screenshot_id += 1
resp = super(DebugLinkMixin, self).call_raw(msg)
self._check_request(resp)
return resp
def _check_request(self, msg):
if self.expected_responses != None:
try:
expected = self.expected_responses.pop(0)
except IndexError:
raise CallException(types.Failure_Other,
"Got %s, but no message has been expected" % pprint(msg))
if msg.__class__ != expected.__class__:
raise CallException(types.Failure_Other,
"Expected %s, got %s" % (pprint(expected), pprint(msg)))
fields = expected.ListFields() # only filled (including extensions)
for field, value in fields:
if not msg.HasField(field.name) or getattr(msg, field.name) != value:
raise CallException(types.Failure_Other,
"Expected %s, got %s" % (pprint(expected), pprint(msg)))
def callback_ButtonRequest(self, msg):
if self.verbose:
log("ButtonRequest code: " + get_buttonrequest_value(msg.code))
if self.auto_button:
if self.verbose:
log("Pressing button " + str(self.button))
if self.button_wait:
if self.verbose:
log("Waiting %d seconds " % self.button_wait)
time.sleep(self.button_wait)
self.debug.press_button(self.button)
return proto.ButtonAck()
def callback_PinMatrixRequest(self, msg):
if self.pin_correct:
pin = self.debug.read_pin_encoded()
else:
pin = '444222'
return proto.PinMatrixAck(pin=pin)
def callback_PassphraseRequest(self, msg):
if self.verbose:
log("Provided passphrase: '%s'" % self.passphrase)
return proto.PassphraseAck(passphrase=self.passphrase)
class ProtocolMixin(object):
PRIME_DERIVATION_FLAG = 0x80000000
VENDORS = ('keepkey.com',)
def __init__(self, *args, **kwargs):
super(ProtocolMixin, self).__init__(*args, **kwargs)
self.init_device()
self.tx_api = None
def set_tx_api(self, tx_api):
self.tx_api = tx_api
def get_tx_api(self):
return self.tx_api
def init_device(self):
self.features = expect(proto.Features)(self.call)(proto.Initialize())
if str(self.features.vendor) not in self.VENDORS:
raise Exception("Unsupported device")
def _get_local_entropy(self):
return os.urandom(32)
def _convert_prime(self, n):
# Convert minus signs to uint32 with flag
return [ int(abs(x) | self.PRIME_DERIVATION_FLAG) if x < 0 else x for x in n ]
@staticmethod
def expand_path(n):
# Convert string of bip32 path to list of uint32 integers with prime flags
# 0/-1/1' -> [0, 0x80000001, 0x80000001]
if not n:
return []
n = n.split('/')
# m/a/b/c => a/b/c
if n[0] == 'm':
n = n[1:]
# coin_name/a/b/c => 44'/SLIP44_constant'/a/b/c
# https://github.com/satoshilabs/slips/blob/master/slip-0044.md
coins = {
"Bitcoin": 0,
"Testnet": 1,
"Litecoin": 2,
"Dogecoin": 3,
"Dash": 5,
"Namecoin": 7,
"Bitsend": 91,
"Groestlcoin": 17,
"Zcash": 133,
"BitcoinCash": 145,
"Bitcore": 160,
"Megacoin": 217,
"Bitcloud": 218,
"Axe": 4242,
}
if n[0] in coins:
n = ["44'", "%d'" % coins[n[0]] ] + n[1:]
path = []
for x in n:
prime = False
if x.endswith("'"):
x = x.replace('\'', '')
prime = True
if x.startswith('-'):
prime = True
x = abs(int(x))
if prime:
x |= ProtocolMixin.PRIME_DERIVATION_FLAG
path.append(x)
return path
@expect(proto.PublicKey)
def get_public_node(self, n, ecdsa_curve_name=DEFAULT_CURVE, show_display=False, coin_name=None, script_type=types.SPENDADDRESS):
n = self._convert_prime(n)
if not ecdsa_curve_name:
ecdsa_curve_name=DEFAULT_CURVE
return self.call(proto.GetPublicKey(address_n=n, ecdsa_curve_name=ecdsa_curve_name, show_display=show_display, coin_name=coin_name, script_type=script_type))
@field('address')
@expect(proto.Address)
def get_address(self, coin_name, n, show_display=False, multisig=None, script_type=types.SPENDADDRESS):
n = self._convert_prime(n)
if multisig:
return self.call(proto.GetAddress(address_n=n, coin_name=coin_name, show_display=show_display, multisig=multisig, script_type=script_type))
else:
return self.call(proto.GetAddress(address_n=n, coin_name=coin_name, show_display=show_display, script_type=script_type))
@field('address')
@expect(proto.EthereumAddress)
def ethereum_get_address(self, n, show_display=False, multisig=None):
n = self._convert_prime(n)
return self.call(proto.EthereumGetAddress(address_n=n, show_display=show_display))
@session
def ethereum_sign_tx(self, n, nonce, gas_price, gas_limit, value, to=None, to_n=None, address_type=None, exchange_type=None, data=None, chain_id=None):
from keepkeylib.tools import int_to_big_endian
n = self._convert_prime(n)
if address_type == types.TRANSFER: #Ethereum transfer transaction
msg = proto.EthereumSignTx(
address_n=n,
nonce=int_to_big_endian(nonce),
gas_price=int_to_big_endian(gas_price),
gas_limit=int_to_big_endian(gas_limit),
value=int_to_big_endian(value),
to_address_n=to_n,
address_type=address_type
)
elif address_type == types.EXCHANGE: #Ethereum exchange transaction
msg = proto.EthereumSignTx(
address_n=n,
nonce=int_to_big_endian(nonce),
gas_price=int_to_big_endian(gas_price),
gas_limit=int_to_big_endian(gas_limit),
value=int_to_big_endian(value),
to_address_n=to_n,
exchange_type=exchange_type,
address_type=address_type
)
else:
msg = proto.EthereumSignTx(
address_n=n,
nonce=int_to_big_endian(nonce),
gas_price=int_to_big_endian(gas_price),
gas_limit=int_to_big_endian(gas_limit),
value=int_to_big_endian(value)
)
if to:
msg.to = to
if data:
msg.data_length = len(data)
data, chunk = data[1024:], data[:1024]
msg.data_initial_chunk = chunk
if chain_id:
msg.chain_id = chain_id
response = self.call(msg)
while response.HasField('data_length'):
data_length = response.data_length
data, chunk = data[data_length:], data[:data_length]
response = self.call(proto.EthereumTxAck(data_chunk=chunk))
if address_type:
return response.signature_v, response.signature_r, response.signature_s, response.hash, response.signature_der
else:
return response.signature_v, response.signature_r, response.signature_s
@expect(eos_proto.EosPublicKey)
def eos_get_public_key(self, address_n, show_display=True, legacy=True):
msg = eos_proto.EosGetPublicKey(
address_n=address_n,
show_display=show_display,
kind = eos_proto.EOS if legacy else eos_proto.EOS_K1
)
return self.call(msg)
@session
def eos_sign_tx_raw(self, msg, actions):
response = self.call(msg)
for common, action in actions:
if isinstance(action, eos_proto.EosActionTransfer):
msg = eos_proto.EosTxActionAck(common=common, transfer=action)
elif isinstance(action, eos_proto.EosActionDelegate):
msg = eos_proto.EosTxActionAck(common=common, delegate=action)
elif isinstance(action, eos_proto.EosActionUndelegate):
msg = eos_proto.EosTxActionAck(common=common, undelegate=action)
elif isinstance(action, eos_proto.EosActionRefund):
msg = eos_proto.EosTxActionAck(common=common, refund=action)
elif isinstance(action, eos_proto.EosActionBuyRam):
msg = eos_proto.EosTxActionAck(common=common, buy_ram=action)
elif isinstance(action, eos_proto.EosActionBuyRamBytes):
msg = eos_proto.EosTxActionAck(common=common, buy_ram_bytes=action)
elif isinstance(action, eos_proto.EosActionSellRam):
msg = eos_proto.EosTxActionAck(common=common, sell_ram=action)
elif isinstance(action, eos_proto.EosActionVoteProducer):
msg = eos_proto.EosTxActionAck(common=common, vote_producer=action)
elif isinstance(action, eos_proto.EosActionUpdateAuth):
msg = eos_proto.EosTxActionAck(common=common, update_auth=action)
elif isinstance(action, eos_proto.EosActionDeleteAuth):
msg = eos_proto.EosTxActionAck(common=common, delete_auth=action)
elif isinstance(action, eos_proto.EosActionUnlinkAuth):
msg = eos_proto.EosTxActionAck(common=common, unlink_auth=action)
elif isinstance(action, eos_proto.EosActionLinkAuth):
msg = eos_proto.EosTxActionAck(common=common, link_auth=action)
elif isinstance(action, eos_proto.EosActionNewAccount):
msg = eos_proto.EosTxActionAck(common=common, new_account=action)
elif isinstance(action, eos_proto.EosActionUnknown):
msg = eos_proto.EosTxActionAck(common=common, unknown=action)
else:
raise Exception("Unknown EOS Action")
response = self.call(msg)
if not isinstance(response, eos_proto.EosSignedTx):
raise Exception("Unexpected EOS signing response")
return response
@session
def eos_sign_tx(self, n, transaction):
tx = eos.parse_transaction_json(copy.deepcopy(transaction))
header = eos_proto.EosTxHeader(
expiration=tx.expiration,
ref_block_num=tx.ref_block_num,
ref_block_prefix=tx.ref_block_prefix,
max_net_usage_words=tx.net_usage_words,
max_cpu_usage_ms=tx.max_cpu_usage_ms,
delay_sec=tx.delay_sec)
msg = eos_proto.EosSignTx(
address_n=n,
chain_id=tx.chain_id,
header=header,
num_actions=tx.num_actions)
response = self.call(msg)
try:
while isinstance(response, eos_proto.EosTxActionRequest):
a = eos.parse_action(tx.actions.pop(0))
if isinstance(a, list):
while len(a) and isinstance(response, eos_proto.EosTxActionRequest):
response = self.call(a.pop(0))
else:
response = self.call(a)
except IndexError:
# pop from empty list
raise Exception("Unexpected EOS signing response")
if not isinstance(response, eos_proto.EosSignedTx):
raise Exception("Unexpected EOS signing response")
return response
@expect(nano_proto.NanoAddress)
def nano_get_address(self, coin_name, address_n, show_display=False):
msg = nano_proto.NanoGetAddress(
coin_name=coin_name,
address_n=address_n,
show_display=show_display)
return self.call(msg)
@expect(nano_proto.NanoSignedTx)
def nano_sign_tx(
self, coin_name, address_n,
grandparent_hash=None,
parent_link=None,
parent_representative=None,
parent_balance=None,
link_hash=None,
link_recipient=None,
link_recipient_n=None,
representative=None,
balance=None,
):
parent_block = None
if (grandparent_hash is not None or
parent_link is not None or
parent_representative is not None or
parent_balance is not None):
parent_block = nano_proto.NanoSignTx.ParentBlock(
parent_hash=grandparent_hash,
link=parent_link,
representative=parent_representative,
balance=nano.encode_balance(parent_balance),
)
msg = nano_proto.NanoSignTx(
coin_name=coin_name,
address_n=address_n,
parent_block=parent_block,
link_hash=link_hash,
link_recipient=link_recipient,
link_recipient_n=link_recipient_n,
representative=representative,
balance=nano.encode_balance(balance),
)
return self.call(msg)
@field('address')
@expect(cosmos_proto.CosmosAddress)
def cosmos_get_address(self, address_n, show_display=False):
return self.call(
cosmos_proto.CosmosGetAddress(address_n=address_n, show_display=show_display)
)
@session
def cosmos_sign_tx(
self,
address_n,
account_number,
chain_id,
fee,
gas,
msgs,
memo,
sequence,
exchange_types=None
):
resp = self.call(cosmos_proto.CosmosSignTx(
address_n=address_n,
account_number=account_number,
chain_id=chain_id,
fee_amount=fee,
gas=gas,
memo=memo,
sequence=sequence,
msg_count=len(msgs)
))
for (msg, exchange_type) in zip(msgs, exchange_types or [None] * len(msgs)):
if not isinstance(resp, cosmos_proto.CosmosMsgRequest):
raise CallException(
"Cosmos.ExpectedMsgRequest",
"Message request expected but not received.",
)
if msg['type'] == "cosmos-sdk/MsgSend":
if len(msg['value']['amount']) != 1:
raise CallException("Cosmos.MsgSend", "Multiple amounts per msg not supported")
denom = msg['value']['amount'][0]['denom']
if denom != 'uatom':
raise CallException("Cosmos.MsgSend", "Unsupported denomination: " + denom)
resp = self.call(cosmos_proto.CosmosMsgAck(
send=cosmos_proto.CosmosMsgSend(
from_address=msg['value']['from_address'],
to_address=msg['value']['to_address'],
amount=int(msg['value']['amount'][0]['amount']),
address_type=types.EXCHANGE if exchange_type is not None else types.SPEND,
exchange_type=exchange_type
)
))
else:
raise CallException(
"Cosmos.UnknownMsg",
"Cosmos message %s is not yet supported" % (msg['type'],)
)
if not isinstance(resp, cosmos_proto.CosmosSignedTx):
raise CallException(
"Cosmos.UnexpectedEndOfOperations",
"Reached end of operations without a signature.",
)
return resp
@field('address')
@expect(thorchain_proto.ThorchainAddress)
def thorchain_get_address(self, address_n, show_display=False, testnet=False):
return self.call(
thorchain_proto.ThorchainGetAddress(address_n=address_n, show_display=show_display, testnet=testnet)
)
@session
def thorchain_sign_tx(
self,
address_n,
account_number,
chain_id,
fee,
gas,
msgs,
memo,
sequence,
exchange_types=None,
testnet=None
):
resp = self.call(thorchain_proto.ThorchainSignTx(
address_n=address_n,
account_number=account_number,
chain_id=chain_id,
fee_amount=fee,
gas=gas,
memo=memo,
sequence=sequence,
msg_count=len(msgs),
testnet=testnet
))
for (msg, exchange_type) in zip(msgs, exchange_types or [None] * len(msgs)):
if not isinstance(resp, thorchain_proto.ThorchainMsgRequest):
raise CallException(
"Thorchain.ExpectedMsgRequest",
"Message request expected but not received.",
)
if msg['type'] == "thorchain/MsgSend":
if len(msg['value']['amount']) != 1:
raise CallException("Thorchain.MsgSend", "Multiple amounts per send msg not supported")
denom = msg['value']['amount'][0]['denom']
if denom != 'rune':
raise CallException("Thorchain.MsgSend", "Unsupported denomination: " + denom)
resp = self.call(thorchain_proto.ThorchainMsgAck(
send=thorchain_proto.ThorchainMsgSend(
from_address=msg['value']['from_address'],
to_address=msg['value']['to_address'],
amount=int(msg['value']['amount'][0]['amount']),
address_type=types.EXCHANGE if exchange_type is not None else types.SPEND,
exchange_type=exchange_type
)
))
elif msg['type'] == "thorchain/MsgDeposit":
if len(msg['value']['coins']) != 1:
raise CallException("Thorchain.MsgDeposit", "Multiple coins per deposit msg not supported")
asset = msg['value']['coins'][0]['asset']
if asset != 'THOR.RUNE':
raise CallException("Thorchain.MsgDeposit", "Unsupported asset: " + asset)
resp = self.call(thorchain_proto.ThorchainMsgAck(
deposit=thorchain_proto.ThorchainMsgDeposit(
asset=asset,
amount=int(msg['value']['coins'][0]['amount']),
memo=msg['value']['memo'],
signer=msg['value']['signer']
)
))
else:
raise CallException(
"Thorchain.UnknownMsg",
"Thorchain message %s is not yet supported" % (msg['type'],)
)
if not isinstance(resp, thorchain_proto.ThorchainSignedTx):
raise CallException(
"Thorchain.UnexpectedEndOfOperations",
"Reached end of operations without a signature.",
)
return resp
@field('address')
@expect(ripple_proto.RippleAddress)
def ripple_get_address(self, address_n, show_display=False):
return self.call(
ripple_proto.RippleGetAddress(address_n=address_n, show_display=show_display)
)
@session
@expect(ripple_proto.RippleSignedTx)
def ripple_sign_tx(self, address_n, msg):
msg.address_n = address_n
return self.call(msg)
@field('entropy')
@expect(proto.Entropy)
def get_entropy(self, size):
return self.call(proto.GetEntropy(size=size))
@field('message')
@expect(proto.Success)
def ping(self, msg, button_protection=False, pin_protection=False, passphrase_protection=False):
msg = proto.Ping(message=msg,
button_protection=button_protection,
pin_protection=pin_protection,
passphrase_protection=passphrase_protection)
return self.call(msg)
def get_device_id(self):
return self.features.device_id
@field('message')
@expect(proto.Success)
def apply_settings(self, label=None, language=None, use_passphrase=None, homescreen=None):
settings = proto.ApplySettings()
if label != None:
settings.label = label
if language:
settings.language = language
if use_passphrase != None:
settings.use_passphrase = use_passphrase
out = self.call(settings)
self.init_device() # Reload Features
return out
@field('message')
@expect(proto.Success)
def apply_policy(self, policy_name, enabled):
policy = types.PolicyType(policy_name=policy_name, enabled=enabled)
apply_policies = proto.ApplyPolicies(policy=[policy])
out = self.call(apply_policies)
self.init_device() # Reload Features
return out
@field('message')
@expect(proto.Success)
def clear_session(self):
return self.call(proto.ClearSession())
@field('message')
@expect(proto.Success)
def change_pin(self, remove=False):
ret = self.call(proto.ChangePin(remove=remove))
self.init_device() # Re-read features
return ret
@expect(proto.MessageSignature)
def sign_message(self, coin_name, n, message, script_type=types.SPENDADDRESS):
n = self._convert_prime(n)
# Convert message to UTF8 NFC (seems to be a bitcoin-qt standard)
message = normalize_nfc(message).encode("utf-8")
return self.call(proto.SignMessage(coin_name=coin_name, address_n=n, message=message, script_type=script_type))
@expect(proto.SignedIdentity)
def sign_identity(self, identity, challenge_hidden, challenge_visual, ecdsa_curve_name=DEFAULT_CURVE):
return self.call(proto.SignIdentity(identity=identity, challenge_hidden=challenge_hidden, challenge_visual=challenge_visual, ecdsa_curve_name=ecdsa_curve_name))
def verify_message(self, coin_name, address, signature, message):
# Convert message to UTF8 NFC (seems to be a bitcoin-qt standard)
message = normalize_nfc(message).encode("utf-8")
try:
resp = self.call(proto.VerifyMessage(address=address, signature=signature, message=message, coin_name=coin_name))
except CallException as e:
resp = e
if isinstance(resp, proto.Success):
return True
return False
@field('value')
@expect(proto.CipheredKeyValue)
def encrypt_keyvalue(self, n, key, value, ask_on_encrypt=True, ask_on_decrypt=True, iv=b''):
n = self._convert_prime(n)
return self.call(proto.CipherKeyValue(address_n=n,
key=key,
value=value,
encrypt=True,
ask_on_encrypt=ask_on_encrypt,
ask_on_decrypt=ask_on_decrypt,
iv=iv))
@field('value')
@expect(proto.CipheredKeyValue)
def decrypt_keyvalue(self, n, key, value, ask_on_encrypt=True, ask_on_decrypt=True, iv=b''):
n = self._convert_prime(n)
return self.call(proto.CipherKeyValue(address_n=n,
key=key,
value=value,
encrypt=False,
ask_on_encrypt=ask_on_encrypt,
ask_on_decrypt=ask_on_decrypt,
iv=iv))
def _prepare_sign_tx(self, coin_name, inputs, outputs):
tx = types.TransactionType()
tx.inputs.extend(inputs)
tx.outputs.extend(outputs)
txes = {None: tx}
txes[b''] = tx
force_bip143 = ['BitcoinGold', 'BitcoinCash', 'BitcoinSV']
if coin_name in force_bip143:
return txes
known_hashes = []
for inp in inputs:
if inp.prev_hash in txes:
continue
if inp.script_type in (types.SPENDP2SHWITNESS,
types.SPENDWITNESS):
continue
if not self.tx_api:
raise Exception('TX_API not defined')
prev_tx = self.tx_api.get_tx(binascii.hexlify(inp.prev_hash).decode('utf-8'))
txes[inp.prev_hash] = prev_tx
return txes
@session
def sign_tx(self, coin_name, inputs, outputs, version=None, lock_time=None, debug_processor=None):
start = time.time()
txes = self._prepare_sign_tx(coin_name, inputs, outputs)
# Prepare and send initial message
tx = proto.SignTx()
tx.inputs_count = len(inputs)
tx.outputs_count = len(outputs)
tx.coin_name = coin_name
if version is not None:
tx.version = version
if lock_time is not None:
tx.lock_time = lock_time
res = self.call(tx)
# Prepare structure for signatures
signatures = [None] * len(inputs)
serialized_tx = b''
counter = 0
while True:
counter += 1
if isinstance(res, proto.Failure):
raise CallException("Signing failed")
if not isinstance(res, proto.TxRequest):
raise CallException("Unexpected message")
# If there's some part of signed transaction, let's add it
if res.HasField('serialized') and res.serialized.HasField('serialized_tx'):
if self.verbose:
log("RECEIVED PART OF SERIALIZED TX (%d BYTES)" % len(res.serialized.serialized_tx))
serialized_tx += res.serialized.serialized_tx
if res.HasField('serialized') and res.serialized.HasField('signature_index'):
if signatures[res.serialized.signature_index] != None:
raise Exception("Signature for index %d already filled" % res.serialized.signature_index)
signatures[res.serialized.signature_index] = res.serialized.signature
if res.request_type == types.TXFINISHED:
# Device didn't ask for more information, finish workflow
break
# Device asked for one more information, let's process it.
if not res.details.tx_hash:
current_tx = txes[None]
else:
current_tx = txes[bytes(res.details.tx_hash)]
if res.request_type == types.TXMETA:
msg = types.TransactionType()
msg.version = current_tx.version
msg.lock_time = current_tx.lock_time
msg.inputs_cnt = len(current_tx.inputs)
if res.details.tx_hash:
msg.outputs_cnt = len(current_tx.bin_outputs)
else:
msg.outputs_cnt = len(current_tx.outputs)
msg.extra_data_len = len(current_tx.extra_data) if current_tx.extra_data else 0
res = self.call(proto.TxAck(tx=msg))
continue
elif res.request_type == types.TXINPUT:
msg = types.TransactionType()
msg.inputs.extend([current_tx.inputs[res.details.request_index], ])
if debug_processor is not None:
# msg needs to be deep copied so when it's modified
# the other messages stay intact
from copy import deepcopy
msg = deepcopy(msg)
# If debug_processor function is provided,
# pass thru it the request and prepared response.
# This is useful for tests, see test_msg_signtx
msg = debug_processor(res, msg)
res = self.call(proto.TxAck(tx=msg))
continue
elif res.request_type == types.TXOUTPUT:
msg = types.TransactionType()
if res.details.tx_hash:
msg.bin_outputs.extend([current_tx.bin_outputs[res.details.request_index], ])
else:
msg.outputs.extend([current_tx.outputs[res.details.request_index], ])
if debug_processor != None:
# msg needs to be deep copied so when it's modified
# the other messages stay intact
from copy import deepcopy
msg = deepcopy(msg)
# If debug_processor function is provided,
# pass thru it the request and prepared response.
# This is useful for tests, see test_msg_signtx
msg = debug_processor(res, msg)
res = self.call(proto.TxAck(tx=msg))
continue
elif res.request_type == types.TXEXTRADATA:
o, l = res.details.extra_data_offset, res.details.extra_data_len
msg = types.TransactionType()
msg.extra_data = current_tx.extra_data[o:o + l]
res = self.call(proto.TxAck(tx=msg))
continue
if None in signatures:
raise Exception("Some signatures are missing!")
if self.verbose:
log("SIGNED IN %.03f SECONDS, CALLED %d MESSAGES, %d BYTES" % \
(time.time() - start, counter, len(serialized_tx)))
return (signatures, serialized_tx)
@field('message')
@expect(proto.Success)
def wipe_device(self):
ret = self.call(proto.WipeDevice())
self.init_device()
return ret
@field('message')
@expect(proto.Success)
def recovery_device(self, use_trezor_method, word_count, passphrase_protection, pin_protection, label, language):
if self.features.initialized:
raise Exception("Device is initialized already. Call wipe_device() and try again.")
if use_trezor_method:
raise Exception("Trezor-style recovery is no longer supported")
elif word_count not in (12, 18, 24):
raise Exception("Invalid word count. Use 12/18/24")
res = self.call(proto.RecoveryDevice(word_count=int(word_count),
passphrase_protection=bool(passphrase_protection),
pin_protection=bool(pin_protection),
label=label,
language=language,
enforce_wordlist=True,
use_character_cipher=True))
self.init_device()
return res
@field('message')
@expect(proto.Success)
def test_recovery_seed(self, word_count, language):
if not self.features.initialized:
raise Exception("Device must already be initialized in order to perform test recovery")
elif word_count not in (12, 18, 24):
raise Exception("Invalid word count. Use 12/18/24")
res = self.call(proto.RecoveryDevice(word_count=int(word_count),
language=language,
enforce_wordlist=True,
use_character_cipher=True,
dry_run=True))
self.init_device()
return res
@field('message')
@expect(proto.Success)
@session
def reset_device(self, display_random, strength, passphrase_protection, pin_protection, label, language):
if self.features.initialized:
raise Exception("Device is initialized already. Call wipe_device() and try again.")
# Begin with device reset workflow
msg = proto.ResetDevice(display_random=display_random,
strength=strength,
language=language,
passphrase_protection=bool(passphrase_protection),
pin_protection=bool(pin_protection),
label=label)
resp = self.call(msg)
if not isinstance(resp, proto.EntropyRequest):
raise Exception("Invalid response, expected EntropyRequest")
external_entropy = self._get_local_entropy()
if self.verbose:
log("Computer generated entropy: " + binascii.hexlify(external_entropy).decode('ascii'))
ret = self.call(proto.EntropyAck(entropy=external_entropy))
self.init_device()
return ret
@field('message')
@expect(proto.Success)
def load_device_by_mnemonic(self, mnemonic, pin, passphrase_protection, label, language, skip_checksum=False):
m = Mnemonic('english')
if not skip_checksum and not m.check(mnemonic):
raise Exception("Invalid mnemonic checksum")
# Convert mnemonic to UTF8 NKFD
mnemonic = Mnemonic.normalize_string(mnemonic)
# Convert mnemonic to ASCII stream
mnemonic = normalize_nfc(mnemonic)
if self.features.initialized:
raise Exception("Device is initialized already. Call wipe_device() and try again.")
resp = self.call(proto.LoadDevice(mnemonic=mnemonic, pin=pin,
passphrase_protection=passphrase_protection,
language=language,
label=label,
skip_checksum=skip_checksum))
self.init_device()
return resp
@field('message')
@expect(proto.Success)
def load_device_by_xprv(self, xprv, pin, passphrase_protection, label, language):
if self.features.initialized:
raise Exception("Device is initialized already. Call wipe_device() and try again.")
if xprv[0:4] not in ('xprv', 'tprv'):
raise Exception("Unknown type of xprv")
if len(xprv) < 100 and len(xprv) > 112:
raise Exception("Invalid length of xprv")
node = types.HDNodeType()
data = binascii.hexlify(tools.b58decode(xprv, None))
if data[90:92] != b'00':
raise Exception("Contain invalid private key")
checksum = binascii.hexlify(hashlib.sha256(hashlib.sha256(binascii.unhexlify(data[:156])).digest()).digest()[:4])
if checksum != data[156:]:
raise Exception("Checksum doesn't match")
# version 0488ade4
# depth 00
# fingerprint 00000000
# child_num 00000000
# chaincode 873dff81c02f525623fd1fe5167eac3a55a049de3d314bb42ee227ffed37d508
# privkey 00e8f32e723decf4051aefac8e2c93c9c5b214313817cdb01a1494b917c8436b35
# checksum e77e9d71
node.depth = int(data[8:10], 16)
node.fingerprint = int(data[10:18], 16)
node.child_num = int(data[18:26], 16)
node.chain_code = binascii.unhexlify(data[26:90])
node.private_key = binascii.unhexlify(data[92:156]) # skip 0x00 indicating privkey
resp = self.call(proto.LoadDevice(node=node,
pin=pin,
passphrase_protection=passphrase_protection,
language=language,
label=label))
self.init_device()
return resp
def firmware_update(self, fp):
if self.features.bootloader_mode == False:
raise Exception("Device must be in bootloader mode")
resp = self.call(proto.FirmwareErase())
if isinstance(resp, proto.Failure) and resp.code == types.Failure_FirmwareError:
return False
data = fp.read()
data_hash = hashlib.sha256(data).digest()
resp = self.call(proto.FirmwareUpload(payload_hash=data_hash, payload=data))
if isinstance(resp, proto.Success):
return True
elif isinstance(resp, proto.Failure) and resp.code == types.Failure_FirmwareError:
return False
raise Exception("Unexpected result %s" % resp)
class KeepKeyClient(ProtocolMixin, TextUIMixin, BaseClient):
pass
class KeepKeyClientVerbose(ProtocolMixin, TextUIMixin, DebugWireMixin, BaseClient):
pass
class KeepKeyDebuglinkClient(ProtocolMixin, DebugLinkMixin, BaseClient):
pass
class KeepKeyDebuglinkClientVerbose(ProtocolMixin, DebugLinkMixin, DebugWireMixin, BaseClient):
pass
| lgpl-3.0 | -2,209,451,958,689,590,800 | 36.169617 | 168 | 0.574699 | false |
Kalle0x12/Test2 | csr_test.py | 1 | 2910 | from __future__ import print_function
import lis_wrapper
import numpy as np
import scipy.sparse
# Define a symmetric 8 x 8 dense upper triangular matrix first.
# This matrix is part of the examples which come with Intel's MKL library
# and is used here for historical reasons.
# A:
# 7.0, 1.0, 2.0, 7.0,
# -4.0, 8.0, 2.0,
# 1.0, 5.0,
# 7.0, 9.0,
# 5.0, 1.0, 5.0,
# -1.0, 5.0,
# 11.0,
# 5.0
A = np.zeros((8, 8), dtype=np.float64)
A[0, 0] = 7.0
A[0, 2] = 1.0
A[0, 5] = 2.0
A[0, 6] = 7.0
A[1, 1] = -4.0
A[1, 2] = 8.0
A[1, 4] = 2.0
A[2, 2] = 1.0
A[2, 7] = 5.0
A[3, 3] = 7.0
A[3, 6] = 9.0
A[4, 4] = 5.0
A[4, 5] = 1.0
A[4, 6] = 5.0
A[5, 5] = -1.0
A[5, 7] = 5.0
A[6, 6] = 11.0
A[7, 7] = 5.0
# print "Dense matrix:"
print(A)
# Dense matrix to sparse matrix in CSR format
Acsr = scipy.sparse.csr_matrix(A)
print("Sparse upper triangular CSR matrix:")
print("values: ", Acsr.data)
# Indices are 0 based
print("index: ", Acsr.indices)
print("pointer: ", Acsr.indptr)
# LIS Manual: Appendix File Formats
# "Note that both the upper and lower triangular entries need to be stored
# irrespective of whether the matrix is symmetric or not."
# Convert the upper triangular CSR matrix Acsr to 'full' CSR matrix Acsr_full
Acsr_full = Acsr + Acsr.T - scipy.sparse.diags(Acsr.diagonal())
print()
print("Sparse 'full' CSR matrix:")
print("values: ", Acsr_full.data)
# Indices are 0 based
print("index: ", Acsr_full.indices)
print("pointer: ", Acsr_full.indptr)
# initial guess for solution x
x = np.zeros(8)
# right hand side
b = np.ones(8)
info = 1 # make LIS more verbose
tol = 1e-6 # convergence tolerance
max_iter = 10000 # maximum number of iterations
logfname = "residuals.log" # log
# in lis_cmd following parameters are set:
# -i cg : conjugate gradient solver
# -p ssor : SSOR preconditioner
# -tol : convergence tolerance
# -maxiter : maximum number of iterations
# -p ssor : SSOR preconditioner
# -ssor_w 1.0 : relaxation coefficient w (0 < w < 2)
# -initx_zeros 0 : don't set initial values for x to 0. The initial guess is passed by x to LIS
# -print mem : Save the residual history to logfile
lis_cmd = "-i cg -tol %e -maxiter %d -p ssor -ssor_w 1.0 -initx_zeros 0 -print mem" % (tol, max_iter)
lis_wrapper.lis(Acsr_full.data, Acsr_full.indices, Acsr_full.indptr, x, b, info, lis_cmd, logfname)
# check solution x with original dense matrix A first
# convert upper triangular matrix AA to 'full' matrix
y = (A + A.T - np.eye(A.shape[0]) * A.diagonal()).dot(x)
assert (np.allclose(b, y))
# check solution with sparse matrix Acsr_full
y = Acsr_full.dot(x)
assert (np.allclose(b, y))
print("Solution x: ", x)
print()
print("A * x:", y)
print("b :", b)
| gpl-3.0 | -5,039,573,383,625,121,000 | 26.714286 | 101 | 0.604467 | false |
ThunderGemios10/The-Super-Duper-Script-Editor | script_map.py | 1 | 74366 | ################################################################################
### Copyright © 2012-2013 BlackDragonHunt
###
### This file is part of the Super Duper Script Editor.
###
### The Super Duper Script Editor is free software: you can redistribute it
### and/or modify it under the terms of the GNU General Public License as
### published by the Free Software Foundation, either version 3 of the License,
### or (at your option) any later version.
###
### The Super Duper Script Editor is distributed in the hope that it will be
### useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with the Super Duper Script Editor.
### If not, see <http://www.gnu.org/licenses/>.
################################################################################
SCRIPT_MAP = [
(
"Main Script",
[
(
"Prologue",
[
"e00_001_000.lin",
"e00_002_000.lin",
"e00_003_001.lin",
"e00_004_003.lin",
"e00_004_001.lin",
"e00_005_004.lin",
"e00_006_007.lin",
"script_pak_e00_0000.lin",
],
),
(
"Chapter 1",
[
(
"(n)everyday life",
[
"e01_001_007.lin",
"e01_001_103.lin",
"e01_002_101.lin",
"e01_002_146.lin",
"e01_003_135.lin",
"e01_004_135.lin",
"e01_005_103.lin",
"e01_006_101.lin",
"e01_006_103.lin",
"e01_006_135.lin",
"e01_006_136.lin",
"e01_006_137.lin",
"e01_006_139.lin",
"e01_006_146.lin",
"e01_007_001.lin",
"e01_007_003.lin",
"e01_007_004.lin",
"e01_007_005.lin",
"e01_007_010.lin",
"e01_008_001.lin",
"e01_008_003.lin",
"e01_008_004.lin",
"e01_008_005.lin",
"e01_008_006.lin",
"e01_008_009.lin",
"e01_008_010.lin",
"e01_008_014.lin",
"e01_008_015.lin",
"e01_008_016.lin",
"e01_008_101.lin",
"e01_008_103.lin",
"e01_008_135.lin",
"e01_008_136.lin",
"e01_008_137.lin",
"e01_008_139.lin",
"e01_008_146.lin",
(
"Free Time 00",
[
"e08_001_000.lin",
"e08_002_000.lin",
"e08_003_000.lin",
"e08_004_000.lin",
"e08_005_000.lin",
"e08_006_000.lin",
"e08_007_000.lin",
"e08_008_000.lin",
"e08_009_000.lin",
"e08_010_000.lin",
"e08_011_000.lin",
"e08_012_000.lin",
"e08_013_000.lin",
"e08_014_000.lin",
"e08_015_000.lin",
]
),
(
"Free Time 01",
[
"e08_001_001.lin",
"e08_002_001.lin",
"e08_003_001.lin",
"e08_004_001.lin",
"e08_005_001.lin",
"e08_006_001.lin",
"e08_007_002.lin",
"e08_008_001.lin",
"e08_009_001.lin",
"e08_010_001.lin",
"e08_011_001.lin",
"e08_012_001.lin",
"e08_013_001.lin",
"e08_014_001.lin",
]
),
"e01_009_001.lin",
"e01_009_003.lin",
"e01_009_004.lin",
"e01_009_005.lin",
"e01_009_006.lin",
"e01_009_009.lin",
"e01_009_010.lin",
"e01_009_014.lin",
"e01_009_015.lin",
"e01_009_016.lin",
"e01_009_101.lin",
"e01_009_103.lin",
"e01_009_135.lin",
"e01_009_136.lin",
"e01_009_137.lin",
"e01_009_139.lin",
"e01_009_146.lin",
(
"Free Time 02",
[
"e08_001_002.lin",
"e08_002_002.lin",
"e08_003_002.lin",
"e08_004_002.lin",
"e08_005_002.lin",
"e08_006_002.lin",
"e08_007_001.lin",
"e08_008_002.lin",
"e08_009_002.lin",
"e08_010_003.lin",
"e08_011_002.lin",
"e08_012_002.lin",
"e08_013_002.lin",
"e08_014_002.lin",
]
),
(
"Free Time 03",
[
"e08_001_003.lin",
"e08_002_003.lin",
"e08_003_003.lin",
"e08_004_003.lin",
"e08_005_003.lin",
"e08_006_003.lin",
"e08_007_003.lin",
"e08_008_003.lin",
"e08_009_003.lin",
"e08_010_004.lin",
"e08_011_003.lin",
"e08_012_003.lin",
"e08_013_003.lin",
"e08_014_003.lin",
]
),
"e01_011_103.lin",
"e01_011_101.lin",
"e01_012_135.lin",
"e01_013_001.lin",
"e01_013_003.lin",
"e01_013_101.lin",
"e01_014_009.lin",
"e01_015_001.lin",
"e01_015_003.lin",
"e01_015_004.lin",
"e01_015_101.lin",
"e01_016_014.lin",
"e01_017_101.lin",
"e01_018_103.lin",
"e01_019_101.lin",
"e01_019_119.lin",
"e01_020_101.lin",
"e01_020_119.lin",
"e01_021_101.lin",
"e01_021_135.lin",
"e01_022_104.lin",
]
),
(
"neveryday life",
[
"e01_023_006.lin",
"e01_022_006.lin",
"e01_024_006.lin",
"e01_025_104.lin",
"e01_025_106.lin",
"e01_027_104.lin",
"e01_029_104.lin",
"e01_024_001.lin",
"e01_024_015.lin",
"e01_024_101.lin",
(
"Generic Text",
[
"e01_024_003.lin",
"e01_024_004.lin",
"e01_024_010.lin",
"e01_024_014.lin",
"e01_024_016.lin",
"e01_024_139.lin",
"e01_024_146.lin",
]
),
"e01_031_119.lin",
"e01_032_009.lin",
"e01_033_135.lin",
"e01_034_136.lin",
"e01_035_135.lin",
"e01_036_137.lin",
"e01_037_101.lin",
"e01_038_137.lin",
"e01_038_156.lin",
"e01_038_138.lin",
"e01_039_006.lin",
"e01_255_000.lin",
"e01_040_001.lin",
"e01_040_201.lin",
"e01_040_203.lin",
]
),
(
"Class Trial",
[
"e01_101_000.lin",
"e01_150_000.lin",
"nonstop_01_001.dat",
"e01_102_000.lin",
"e01_103_000.lin",
"nonstop_01_002.dat",
"e01_104_000.lin",
"e01_105_000.lin",
"e01_151_000.lin",
"nonstop_01_003.dat",
"e01_106_000.lin",
"e01_107_000.lin",
"e01_154_000.lin",
"e01_108_000.lin",
"anagram_11.dat",
"e01_109_000.lin",
"e01_152_000.lin",
"nonstop_01_004.dat",
"e01_110_000.lin",
"e01_111_000.lin",
"nonstop_01_005.dat",
"e01_112_000.lin",
"e01_113_000.lin",
"e01_153_000.lin",
"nonstop_01_006.dat",
"e01_114_000.lin",
"e01_115_000.lin",
"nonstop_01_007.dat",
"e01_116_000.lin",
"e01_117_000.lin",
"nonstop_01_008.dat",
"e01_118_000.lin",
"e01_119_000.lin",
"e01_156_000.lin",
"e01_120_000.lin",
"e01_121_000.lin",
"e01_155_000.lin",
"e01_122_000.lin",
"hs_mtb_s01.pak",
"e01_123_000.lin",
"e01_200_000.lin",
"e01_201_000.lin",
"e01_198_000.lin",
"e01_199_000.lin",
"e01_196_000.lin",
"e01_197_000.lin",
]
),
(
"Generic Text",
[
"e01_000_146.lin",
"script_pak_e01_0000.lin",
"script_pak_e01_0023.lin",
]
),
]
),
(
"Chapter 2",
[
(
"(n)everyday life",
[
"e02_001_103.lin",
"e02_001_105.lin",
"e02_002_135.lin",
"e02_003_006.lin",
"e02_004_006.lin",
"e02_004_004.lin",
"e02_004_001.lin",
"e02_004_021.lin",
"e02_004_031.lin",
"e02_004_032.lin",
"e02_005_022.lin",
"e02_006_026.lin",
"e02_006_027.lin",
"e02_006_030.lin",
"e02_007_135.lin",
"e02_008_103.lin",
"e02_008_101.lin",
"e02_009_135.lin",
"e02_009_001.lin",
"e02_009_021.lin",
"e02_009_026.lin",
"e02_009_101.lin",
"e02_009_136.lin",
"e02_009_140.lin",
"e02_009_141.lin",
"e02_009_146.lin",
"e02_010_022.lin",
"e02_011_103.lin",
"e02_011_001.lin",
"e02_011_021.lin",
"e02_011_022.lin",
"e02_011_101.lin",
(
"Free Time 04",
[
"e08_001_004.lin",
"e08_003_004.lin",
"e08_005_004.lin",
"e08_006_004.lin",
"e08_008_004.lin",
"e08_009_004.lin",
"e08_010_002.lin",
"e08_011_004.lin",
"e08_012_004.lin",
"e08_014_006.lin",
]
),
(
"Free Time 05",
[
"e08_001_005.lin",
"e08_003_005.lin",
"e08_005_005.lin",
"e08_006_005.lin",
"e08_008_005.lin",
"e08_009_005.lin",
"e08_010_003.lin",
"e08_011_005.lin",
"e08_012_005.lin",
"e08_014_007.lin",
]
),
"e02_013_103.lin",
"e02_013_101.lin",
"e02_013_135.lin",
(
"Free Time 06",
[
"e08_001_006.lin",
"e08_003_006.lin",
"e08_005_006.lin",
"e08_006_006.lin",
"e08_008_006.lin",
"e08_009_006.lin",
"e08_010_004.lin",
"e08_011_006.lin",
"e08_012_006.lin",
"e08_014_004.lin",
]
),
(
"Free Time 07",
[
"e08_001_007.lin",
"e08_003_002.lin",
"e08_005_002.lin",
"e08_006_007.lin",
"e08_008_003.lin",
"e08_009_002.lin",
"e08_010_002.lin",
"e08_011_002.lin",
"e08_012_002.lin",
"e08_014_005.lin",
]
),
"e02_014_103.lin",
"e02_016_101.lin",
"e02_016_103.lin",
"e02_017_135.lin",
"e02_017_144.lin",
"e02_018_103.lin",
"e02_018_135.lin",
"e02_019_103.lin",
"e02_020_022.lin",
"e02_020_021.lin",
"e02_021_103.lin",
(
"Free Time 08",
[
"e08_001_008.lin",
"e08_003_003.lin",
"e08_005_003.lin",
"e08_006_008.lin",
"e08_008_002.lin",
"e08_009_003.lin",
"e08_010_003.lin",
"e08_011_003.lin",
"e08_012_003.lin",
"e08_014_003.lin",
]
),
"e02_022_103.lin",
"e02_022_101.lin",
"e02_022_001.lin",
"e02_022_003.lin",
"e02_022_010.lin",
"e02_022_004.lin",
"e02_023_006.lin",
"e02_024_103.lin",
"e02_024_101.lin",
"e02_025_135.lin",
"e02_025_001.lin",
"e02_025_021.lin",
"e02_025_022.lin",
"e02_025_101.lin",
"e02_025_137.lin",
"e02_026_026.lin",
]
),
(
"neveryday life",
[
"e02_027_029.lin",
"e02_028_026.lin",
"e02_028_003.lin",
"e02_029_101.lin",
"e02_030_021.lin",
"e02_030_022.lin",
"e02_030_025.lin",
"e02_032_021.lin",
"e02_032_022.lin",
"e02_032_025.lin",
"e02_032_026.lin",
"e02_032_027.lin",
"e02_032_029.lin",
"e02_032_135.lin",
"e02_032_140.lin",
(
"Generic Text",
[
"e02_032_001.lin",
"e02_032_003.lin",
"e02_032_004.lin",
"e02_032_006.lin",
"e02_032_009.lin",
"e02_032_010.lin",
"e02_032_014.lin",
"e02_032_015.lin",
"e02_032_016.lin",
"e02_032_030.lin",
"e02_032_031.lin",
"e02_032_032.lin",
"e02_032_033.lin",
"e02_032_101.lin",
"e02_032_136.lin",
"e02_032_137.lin",
"e02_032_139.lin",
"e02_032_141.lin",
"e02_032_144.lin",
"e02_032_145.lin",
"e02_032_146.lin",
]
),
"e02_255_006.lin",
"e02_033_001.lin",
"e02_033_201.lin",
"e02_033_204.lin",
]
),
(
"Class Trial",
[
"e02_101_000.lin",
"nonstop_02_001.dat",
"e02_102_000.lin",
"e02_103_000.lin",
"e02_150_000.lin",
"nonstop_02_002.dat",
"e02_104_000.lin",
"e02_105_000.lin",
"e02_106_000.lin",
"anagram_21.dat",
"e02_107_000.lin",
"nonstop_02_003.dat",
"e02_108_000.lin",
"e02_109_000.lin",
"e02_151_000.lin",
"nonstop_02_004.dat",
"e02_110_000.lin",
"e02_111_000.lin",
"nonstop_02_005.dat",
"e02_112_000.lin",
"e02_113_000.lin",
"nonstop_02_006.dat",
"e02_114_000.lin",
"e02_115_000.lin",
"e02_116_000.lin",
"e02_117_000.lin",
"nonstop_02_007.dat",
"e02_118_000.lin",
"e02_119_000.lin",
"nonstop_02_008.dat",
"e02_120_000.lin",
"e02_121_000.lin",
"nonstop_02_009.dat",
"e02_122_000.lin",
"e02_123_000.lin",
"e02_124_000.lin",
"e02_125_000.lin",
"e02_152_000.lin",
"hs_mtb_s02.pak",
"e02_127_000.lin",
"e02_200_000.lin",
"e02_201_000.lin",
"e02_198_000.lin",
"e02_199_000.lin",
"e02_196_000.lin",
"e02_197_000.lin",
]
),
(
"Generic Text",
[
"e02_000_003.lin",
"e02_000_004.lin",
"e02_000_006.lin",
"e02_000_009.lin",
"e02_000_010.lin",
"e02_000_014.lin",
"e02_000_015.lin",
"e02_000_016.lin",
"e02_000_022.lin",
"e02_000_026.lin",
"e02_000_030.lin",
"e02_000_031.lin",
"e02_000_032.lin",
"e02_000_033.lin",
"e02_000_037.lin",
"e02_000_103.lin",
"e02_000_135.lin",
"e02_000_136.lin",
"e02_000_137.lin",
"e02_000_139.lin",
"e02_000_140.lin",
"e02_000_141.lin",
"e02_000_144.lin",
"e02_000_145.lin",
"e02_000_146.lin",
"script_pak_e02_0000.lin",
"script_pak_e02_0004.lin",
"script_pak_e02_0005.lin",
"script_pak_e02_0006.lin",
"script_pak_e02_0007.lin",
"script_pak_e02_0008.lin",
"script_pak_e02_0009.lin",
"script_pak_e02_0010.lin",
"script_pak_e02_0011.lin",
"script_pak_e02_0013.lin",
"script_pak_e02_0014.lin",
"script_pak_e02_0015.lin",
"script_pak_e02_0016.lin",
"script_pak_e02_0017.lin",
"script_pak_e02_0018.lin",
"script_pak_e02_0019.lin",
"script_pak_e02_0021.lin",
"script_pak_e02_0023.lin",
"script_pak_e02_0024.lin",
"script_pak_e02_0025.lin",
"script_pak_e02_0026.lin",
"script_pak_e02_0027.lin",
"script_pak_e02_0028.lin",
"script_pak_e02_0029.lin",
"script_pak_e02_0030.lin",
"script_pak_e02_0031.lin",
]
),
]
),
(
"Chapter 3",
[
(
"(n)everyday life",
[
"e03_001_123.lin",
"e03_002_135.lin",
"e03_003_021.lin",
"e03_003_027.lin",
"e03_003_041.lin",
"e03_003_051.lin",
"e03_003_052.lin",
(
"Generic Text",
[
"e03_003_001.lin",
"e03_003_003.lin",
"e03_003_101.lin",
"e03_003_135.lin",
"e03_003_136.lin",
"e03_003_141.lin",
]
),
"e03_004_042.lin",
"e03_005_044.lin",
"e03_005_046.lin",
"e03_006_047.lin",
"e03_006_048.lin",
"e03_255_003.lin",
"e03_007_135.lin",
"e03_255_004.lin",
"e03_007_141.lin",
"e03_007_142.lin",
"e03_008_101.lin",
"e03_008_141.lin",
"e03_008_135.lin",
"e03_009_006.lin",
"e03_010_103.lin",
"e03_010_101.lin",
"e03_011_135.lin",
"e03_012_103.lin",
"e03_012_001.lin",
"e03_012_021.lin",
"e03_012_041.lin",
"e03_012_101.lin",
"e03_012_136.lin",
(
"Free Time 09",
[
"e08_001_009.lin",
"e08_002_005.lin",
"e08_005_004.lin",
"e08_006_009.lin",
"e08_008_004.lin",
"e08_009_003.lin",
"e08_010_008.lin",
"e08_011_003.lin",
"e08_012_004.lin",
]
),
"e03_013_103.lin",
"e03_013_101.lin",
"e03_014_142.lin",
"e03_015_103.lin",
"e03_015_101.lin",
"e03_016_135.lin",
"e03_016_141.lin",
"e03_017_103.lin",
"e03_017_001.lin",
"e03_017_021.lin",
"e03_017_041.lin",
"e03_017_101.lin",
"e03_017_136.lin",
(
"Free Time 10",
[
"e08_001_010.lin",
"e08_002_006.lin",
"e08_005_007.lin",
"e08_006_010.lin",
"e08_008_005.lin",
"e08_009_004.lin",
"e08_010_007.lin",
"e08_011_004.lin",
"e08_012_005.lin",
]
),
(
"Free Time 11",
[
"e08_001_011.lin",
"e08_002_008.lin",
"e08_005_007.lin",
"e08_006_002.lin",
"e08_008_006.lin",
"e08_009_005.lin",
"e08_010_008.lin",
"e08_011_005.lin",
"e08_012_006.lin",
]
),
"e03_018_103.lin",
"e03_018_001.lin",
"e03_018_021.lin",
"e03_018_041.lin",
"e03_018_101.lin",
"e03_018_136.lin",
"e03_019_103.lin",
"e03_019_101.lin",
"e03_020_135.lin",
"e03_021_103.lin",
"e03_021_001.lin",
"e03_021_021.lin",
"e03_021_041.lin",
"e03_021_101.lin",
"e03_021_136.lin",
(
"Free Time 12",
[
"e08_001_012.lin",
"e08_002_007.lin",
"e08_005_006.lin",
"e08_006_003.lin",
"e08_008_002.lin",
"e08_009_006.lin",
"e08_010_009.lin",
"e08_011_006.lin",
"e08_012_002.lin",
]
),
"e03_022_103.lin",
"e03_022_001.lin",
"e03_022_021.lin",
"e03_022_041.lin",
"e03_022_101.lin",
"e03_022_136.lin",
(
"Free Time 13",
[
"e08_001_013.lin",
"e08_002_008.lin",
"e08_005_004.lin",
"e08_006_004.lin",
"e08_008_003.lin",
"e08_009_007.lin",
"e08_010_010.lin",
"e08_011_002.lin",
"e08_012_002.lin",
]
),
"e03_023_103.lin",
"e03_023_101.lin",
"e03_024_135.lin",
"e03_024_141.lin",
"e03_025_103.lin",
"e03_025_135.lin",
"e03_025_101.lin",
"e03_026_001.lin",
"e03_026_003.lin",
"e03_026_009.lin",
"e03_026_010.lin",
"e03_026_011.lin",
"e03_026_014.lin",
"e03_026_015.lin",
"e03_026_016.lin",
"e03_027_041.lin",
"e03_028_042.lin",
"e03_029_021.lin",
"e03_030_022.lin",
"e03_030_011.lin",
"e03_031_001.lin",
"e03_031_021.lin",
"e03_031_022.lin",
"e03_032_041.lin",
"e03_033_001.lin",
"e03_034_011.lin",
"e03_035_001.lin",
"e03_035_041.lin",
"e03_035_042.lin",
"e03_036_047.lin",
]
),
(
"neveryday life",
[
"e03_037_048.lin",
"e03_037_041.lin",
"e03_037_011.lin",
"e03_034_048.lin",
"e03_038_042.lin",
"e03_038_044.lin",
"e03_038_047.lin",
"e03_038_048.lin",
"e03_038_051.lin",
"e03_038_052.lin",
"e03_038_041.lin",
"e03_039_044.lin",
"e03_039_046.lin",
"e03_040_046.lin",
"e03_255_006.lin",
"e03_040_011.lin",
"e03_040_044.lin",
"e03_040_048.lin",
(
"Generic Text",
[
"e03_040_001.lin",
"e03_040_003.lin",
"e03_040_004.lin",
"e03_040_005.lin",
"e03_040_006.lin",
"e03_040_009.lin",
"e03_040_010.lin",
"e03_040_014.lin",
"e03_040_015.lin",
"e03_040_016.lin",
"e03_040_021.lin",
"e03_040_022.lin",
"e03_040_025.lin",
"e03_040_026.lin",
"e03_040_027.lin",
"e03_040_030.lin",
"e03_040_031.lin",
"e03_040_032.lin",
"e03_040_033.lin",
"e03_040_041.lin",
"e03_040_042.lin",
"e03_040_047.lin",
"e03_040_051.lin",
"e03_040_052.lin",
"e03_040_135.lin",
"e03_040_136.lin",
"e03_040_137.lin",
"e03_040_139.lin",
"e03_040_140.lin",
"e03_040_141.lin",
"e03_040_144.lin",
"e03_040_145.lin",
"e03_040_146.lin",
]
),
"e03_255_005.lin",
"e03_040_101.lin",
"e03_042_117.lin",
"e03_043_101.lin",
"e03_044_030.lin",
"e03_045_021.lin",
"e03_045_041.lin",
"e03_045_046.lin",
"e03_046_001.lin",
"e03_046_201.lin",
"e03_046_205.lin",
]
),
(
"Class Trial",
[
"e03_101_000.lin",
"nonstop_03_001.dat",
"e03_102_000.lin",
"e03_103_000.lin",
"e03_150_000.lin",
"hs_mtb_s03.pak",
"e03_105_000.lin",
"nonstop_03_002.dat",
"e03_106_000.lin",
"e03_107_000.lin",
"nonstop_03_003.dat",
"e03_108_000.lin",
"e03_109_000.lin",
"e03_110_000.lin",
"anagram_31.dat",
"e03_111_000.lin",
"nonstop_03_004.dat",
"e03_112_000.lin",
"e03_113_000.lin",
"nonstop_03_005.dat",
"e03_114_000.lin",
"e03_115_000.lin",
"nonstop_03_006.dat",
"e03_116_000.lin",
"e03_117_000.lin",
"nonstop_03_007.dat",
"e03_118_000.lin",
"e03_119_000.lin",
"nonstop_03_008.dat",
"e03_120_000.lin",
"e03_121_000.lin",
"nonstop_03_009.dat",
"e03_122_000.lin",
"e03_123_000.lin",
"nonstop_03_010.dat",
"e03_124_000.lin",
"e03_125_000.lin",
"nonstop_03_011.dat",
"e03_126_000.lin",
"e03_127_000.lin",
"e03_128_000.lin",
"e03_129_000.lin",
"e03_200_205.lin",
"e03_200_211.lin",
"e03_198_000.lin",
"e03_199_000.lin",
"e03_196_000.lin",
"e03_197_000.lin",
]
),
(
"Post-Trial",
[
"e03_201_101.lin",
"e03_201_141.lin",
"e03_202_021.lin",
"e03_202_033.lin",
"e03_202_035.lin",
"e03_202_036.lin",
"e03_203_001.lin",
"e03_203_004.lin",
"e03_203_006.lin",
]
),
(
"Generic Text",
[
"e03_000_003.lin",
"e03_000_004.lin",
"e03_000_005.lin",
"e03_000_006.lin",
"e03_000_009.lin",
"e03_000_010.lin",
"e03_000_011.lin",
"e03_000_014.lin",
"e03_000_015.lin",
"e03_000_016.lin",
"e03_000_022.lin",
"e03_000_025.lin",
"e03_000_026.lin",
"e03_000_030.lin",
"e03_000_031.lin",
"e03_000_032.lin",
"e03_000_033.lin",
"e03_000_037.lin",
"e03_000_042.lin",
"e03_000_044.lin",
"e03_000_046.lin",
"e03_000_047.lin",
"e03_000_048.lin",
"e03_000_051.lin",
"e03_000_052.lin",
"e03_000_135.lin",
"e03_000_136.lin",
"e03_000_137.lin",
"e03_000_139.lin",
"e03_000_140.lin",
"e03_000_141.lin",
"e03_000_144.lin",
"e03_000_145.lin",
"e03_000_146.lin",
"script_pak_e03_0000.lin",
"script_pak_e03_0004.lin",
"script_pak_e03_0005.lin",
"script_pak_e03_0006.lin",
"script_pak_e03_0007.lin",
"script_pak_e03_0008.lin",
"script_pak_e03_0009.lin",
"script_pak_e03_0010.lin",
"script_pak_e03_0012.lin",
"script_pak_e03_0013.lin",
"script_pak_e03_0014.lin",
"script_pak_e03_0016.lin",
"script_pak_e03_0017.lin",
"script_pak_e03_0018.lin",
"script_pak_e03_0019.lin",
"script_pak_e03_0020.lin",
"script_pak_e03_0021.lin",
"script_pak_e03_0022.lin",
"script_pak_e03_0023.lin",
"script_pak_e03_0025.lin",
"script_pak_e03_0026.lin",
"script_pak_e03_0027.lin",
"script_pak_e03_0028.lin",
"script_pak_e03_0029.lin",
"script_pak_e03_0031.lin",
"script_pak_e03_0032.lin",
"script_pak_e03_0036.lin",
"script_pak_e03_0037.lin",
"script_pak_e03_0038.lin",
"script_pak_e03_0039.lin",
"script_pak_e03_0040.lin",
"script_pak_e03_0041.lin",
"script_pak_e03_0043.lin",
"script_pak_e03_0044.lin",
"script_pak_e03_0045.lin",
]
),
]
),
(
"Chapter 4",
[
(
"(n)everyday life",
[
"e04_001_135.lin",
"e04_002_135.lin",
"e04_003_101.lin",
"e04_003_061.lin",
"e04_003_063.lin",
"e04_003_065.lin",
"e04_003_067.lin",
"e04_003_070.lin",
"e04_255_105.lin",
"e04_004_135.lin",
"e04_005_101.lin",
"e04_005_141.lin",
"e04_006_101.lin",
"e04_006_103.lin",
"e04_007_101.lin",
"e04_007_135.lin",
"e04_008_103.lin",
(
"Free Time 14",
[
"e08_002_009.lin",
"e08_006_005.lin",
"e08_008_007.lin",
"e08_009_002.lin",
"e08_010_006.lin",
"e08_011_009.lin",
]
),
(
"Free Time 15",
[
"e08_002_010.lin",
"e08_006_006.lin",
"e08_008_008.lin",
"e08_009_003.lin",
"e08_010_007.lin",
"e08_011_010.lin",
]
),
"e04_009_103.lin",
"e04_009_101.lin",
"e04_009_001.lin",
"e04_009_004.lin",
"e04_009_006.lin",
"e04_010_103.lin",
"e04_010_101.lin",
"e04_010_135.lin",
"e04_011_103.lin",
(
"Free Time 16",
[
"e08_002_011.lin",
"e08_006_007.lin",
"e08_008_007.lin",
"e08_009_008.lin",
"e08_010_005.lin",
"e08_011_007.lin",
]
),
(
"Free Time 17",
[
"e08_002_012.lin",
"e08_006_008.lin",
"e08_008_008.lin",
"e08_009_009.lin",
"e08_010_005.lin",
"e08_011_008.lin",
]
),
"e04_012_103.lin",
"e04_013_101.lin",
"e04_013_135.lin",
"e04_013_011.lin",
"e04_013_141.lin",
"e04_255_015.lin",
"e04_255_016.lin",
"e04_014_101.lin",
"e04_014_001.lin",
"e04_014_021.lin",
"e04_014_033.lin",
"e04_014_036.lin",
"e04_015_103.lin",
"e04_015_041.lin",
]
),
(
"neveryday life",
[
"e04_016_043.lin",
"e04_017_043.lin",
"e04_255_100.lin",
"e04_255_101.lin",
"e04_017_041.lin",
"e04_017_101.lin",
"e04_255_017.lin",
"e04_255_106.lin",
"e04_017_061.lin",
"e04_017_141.lin",
"e04_018_043.lin",
"e04_255_102.lin",
"e04_018_061.lin",
"e04_018_063.lin",
"e04_018_064.lin",
"e04_255_103.lin",
"e04_255_104.lin",
"e04_255_107.lin",
"e04_255_150.lin",
"e04_019_201.lin",
"e04_019_220.lin",
]
),
(
"Class Trial",
[
"e04_101_000.lin",
"nonstop_04_001.dat",
"e04_102_000.lin",
"e04_103_000.lin",
"nonstop_04_002.dat",
"e04_104_000.lin",
"e04_105_000.lin",
"nonstop_04_003.dat",
"e04_106_000.lin",
"e04_107_000.lin",
"hs_mtb_s04.pak",
"e04_109_000.lin",
"nonstop_04_004.dat",
"e04_110_000.lin",
"e04_111_000.lin",
"e04_112_000.lin",
"anagram_41.dat",
"e04_113_000.lin",
"e04_150_000.lin",
"hs_mtb_s05.pak",
"e04_115_000.lin",
"nonstop_04_005.dat",
"e04_116_000.lin",
"e04_117_000.lin",
"nonstop_04_006.dat",
"e04_118_000.lin",
"e04_119_000.lin",
"e04_120_000.lin",
"anagram_42.dat",
"e04_121_000.lin",
"nonstop_04_007.dat",
"e04_122_000.lin",
"e04_123_000.lin",
"nonstop_04_008.dat",
"e04_124_000.lin",
"e04_125_000.lin",
"hs_mtb_s06.pak",
"e04_127_000.lin",
"e04_128_000.lin",
"e04_129_000.lin",
"e04_200_206.lin",
"e04_200_213.lin",
"e04_198_000.lin",
"e04_199_000.lin",
"e04_195_000.lin",
"e04_196_000.lin",
"e04_197_000.lin",
]
),
(
"Post-Trial",
[
"e04_201_103.lin",
"e04_201_061.lin",
"e04_201_101.lin",
]
),
(
"Generic Text",
[
"e04_000_003.lin",
"e04_000_004.lin",
"e04_000_005.lin",
"e04_000_006.lin",
"e04_000_009.lin",
"e04_000_010.lin",
"e04_000_011.lin",
"e04_000_014.lin",
"e04_000_015.lin",
"e04_000_016.lin",
"e04_000_022.lin",
"e04_000_025.lin",
"e04_000_026.lin",
"e04_000_030.lin",
"e04_000_031.lin",
"e04_000_032.lin",
"e04_000_033.lin",
"e04_000_036.lin",
"e04_000_037.lin",
"e04_000_042.lin",
"e04_000_043.lin",
"e04_000_044.lin",
"e04_000_046.lin",
"e04_000_047.lin",
"e04_000_048.lin",
"e04_000_051.lin",
"e04_000_052.lin",
"e04_000_061.lin",
"e04_000_063.lin",
"e04_000_064.lin",
"e04_000_065.lin",
"e04_000_067.lin",
"e04_000_070.lin",
"e04_000_071.lin",
"e04_000_072.lin",
"e04_000_101.lin",
"e04_000_103.lin",
"e04_000_135.lin",
"e04_000_136.lin",
"e04_000_137.lin",
"e04_000_139.lin",
"e04_000_140.lin",
"e04_000_141.lin",
"e04_000_144.lin",
"e04_000_145.lin",
"e04_000_146.lin",
"e04_255_000.lin",
"e04_255_001.lin",
"e04_255_002.lin",
"e04_255_003.lin",
"e04_255_004.lin",
"e04_255_005.lin",
"e04_255_006.lin",
"e04_255_007.lin",
"e04_255_008.lin",
"e04_255_009.lin",
"e04_255_010.lin",
"e04_255_011.lin",
"e04_255_012.lin",
"e04_255_013.lin",
"e04_255_014.lin",
"e04_255_018.lin",
"e04_255_019.lin",
"e04_255_020.lin",
"e04_255_021.lin",
"e04_255_022.lin",
"e04_255_023.lin",
"e04_255_024.lin",
"e04_255_025.lin",
"e04_255_026.lin",
"e04_255_027.lin",
"e04_255_028.lin",
"e04_255_029.lin",
"e04_255_030.lin",
"e04_255_031.lin",
"e04_255_032.lin",
"e04_255_033.lin",
"e04_255_034.lin",
"e04_255_035.lin",
"e04_255_036.lin",
"e04_255_037.lin",
"e04_255_038.lin",
"e04_255_039.lin",
"e04_255_040.lin",
"e04_255_041.lin",
"e04_255_042.lin",
"script_pak_e04_0000.lin",
"script_pak_e04_0004.lin",
"script_pak_e04_0005.lin",
"script_pak_e04_0006.lin",
"script_pak_e04_0007.lin",
"script_pak_e04_0008.lin",
"script_pak_e04_0009.lin",
"script_pak_e04_0010.lin",
"script_pak_e04_0011.lin",
"script_pak_e04_0012.lin",
"script_pak_e04_0013.lin",
"script_pak_e04_0015.lin",
"script_pak_e04_0016.lin",
"script_pak_e04_0017.lin",
"script_pak_e04_0018.lin",
"script_pak_e04_0019.lin",
"script_pak_e04_0020.lin",
"script_pak_e04_0021.lin",
"script_pak_e04_0022.lin",
"script_pak_e04_0023.lin",
"script_pak_e04_0025.lin",
"script_pak_e04_0026.lin",
"script_pak_e04_0027.lin",
"script_pak_e04_0028.lin",
"script_pak_e04_0029.lin",
"script_pak_e04_0030.lin",
"script_pak_e04_0031.lin",
"script_pak_e04_0032.lin",
"script_pak_e04_0033.lin",
"script_pak_e04_0034.lin",
"script_pak_e04_0035.lin",
"script_pak_e04_0036.lin",
"script_pak_e04_0037.lin",
"script_pak_e04_0038.lin",
"script_pak_e04_0039.lin",
"script_pak_e04_0040.lin",
"script_pak_e04_0041.lin",
"script_pak_e04_0042.lin",
"script_pak_e04_0043.lin",
"script_pak_e04_0044.lin",
"script_pak_e04_0045.lin",
"script_pak_e04_0046.lin",
"script_pak_e04_0047.lin",
"script_pak_e04_0048.lin",
"script_pak_e04_0050.lin",
"script_pak_e04_0051.lin",
"script_pak_e04_0052.lin",
]
),
]
),
(
"Chapter 5",
[
(
"(n)everyday life",
[
"e05_002_135.lin",
"e05_003_101.lin",
"e05_003_001.lin",
"e05_003_021.lin",
"e05_003_061.lin",
"e05_003_081.lin",
"e05_003_083.lin",
"e05_003_085.lin",
"e05_003_086.lin",
"e05_003_087.lin",
"e05_003_091.lin",
"e05_003_092.lin",
"e05_007_135.lin",
"e05_008_103.lin",
"e05_009_141.lin",
"e05_010_101.lin",
"e05_011_103.lin",
"e05_011_101.lin",
"e05_012_135.lin",
(
"Generic Text",
[
"e05_013_001.lin",
"e05_013_021.lin",
"e05_013_061.lin",
"e05_013_101.lin",
"e05_014_001.lin",
"e05_014_021.lin",
"e05_014_061.lin",
"e05_014_101.lin",
]
),
"e05_013_103.lin",
(
"Free Time 18",
[
"e08_002_013.lin",
"e08_006_009.lin",
"e08_009_006.lin",
"e08_010_005.lin",
]
),
"e05_014_103.lin",
(
"Free Time 19",
[
"e08_002_014.lin",
"e08_006_010.lin",
"e08_009_007.lin",
"e08_010_006.lin",
]
),
"e05_015_103.lin",
"e05_016_103.lin",
"e05_016_101.lin",
"e05_017_135.lin",
"e05_017_101.lin",
"e05_017_001.lin",
"e05_017_004.lin",
"e05_018_006.lin",
"e05_019_061.lin",
]
),
(
"neveryday life",
[
"e05_020_083.lin",
"e05_020_084.lin",
"e05_022_081.lin",
"e05_022_041.lin",
"e05_022_021.lin",
"e05_022_001.lin",
"e05_022_101.lin",
"e05_022_061.lin",
"e05_023_083.lin",
"e05_023_061.lin",
"e05_024_068.lin",
"e05_025_083.lin",
"e05_025_084.lin",
"e05_025_085.lin",
"e05_255_000.lin",
"e05_026_006.lin",
(
"Generic Text",
[
"e05_026_001.lin",
"e05_026_003.lin",
"e05_026_009.lin",
"e05_026_011.lin",
"e05_026_021.lin",
"e05_026_022.lin",
"e05_026_025.lin",
"e05_026_026.lin",
"e05_026_030.lin",
"e05_026_042.lin",
"e05_026_044.lin",
"e05_026_046.lin",
"e05_026_048.lin",
"e05_026_061.lin",
"e05_026_063.lin",
"e05_026_065.lin",
"e05_026_067.lin",
"e05_026_068.lin",
"e05_026_081.lin",
"e05_026_087.lin",
"e05_026_092.lin",
]
),
"e05_027_101.lin",
"e05_027_121.lin",
"e05_028_087.lin",
"e05_029_025.lin",
"e05_030_001.lin",
"e05_030_201.lin",
"e05_030_207.lin",
]
),
(
"Class Trial",
[
"e05_101_000.lin",
"nonstop_05_001.dat",
"e05_102_000.lin",
"e05_103_000.lin",
"nonstop_05_002.dat",
"e05_104_000.lin",
"e05_105_000.lin",
"e05_106_000.lin",
"anagram_51.dat",
"e05_107_000.lin",
"e05_109_000.lin",
"nonstop_05_003.dat",
"e05_110_000.lin",
"e05_111_000.lin",
"hs_mtb_s07.pak",
"e05_113_000.lin",
"nonstop_05_004.dat",
"e05_114_000.lin",
"e05_115_000.lin",
"nonstop_05_005.dat",
"e05_116_000.lin",
"e05_117_000.lin",
"nonstop_05_006.dat",
"e05_118_000.lin",
"e05_119_000.lin",
"nonstop_05_007.dat",
"e05_120_000.lin",
"e05_121_000.lin",
"nonstop_05_008.dat",
"e05_122_000.lin",
"e05_123_000.lin",
"e05_124_000.lin",
(
"Bad End Route",
[
"e05_150_000.lin",
"nonstop_05_009.dat",
"e05_151_000.lin",
"e05_152_000.lin",
"e05_153_000.lin",
"e05_154_000.lin",
"e05_155_000.lin",
]
),
"e05_200_207.lin",
"e05_200_214.lin",
"e05_198_000.lin",
"e05_199_000.lin",
"e05_195_000.lin",
"e05_196_000.lin",
"e05_197_000.lin",
]
),
(
"Post-Trial",
[
"e05_201_216.lin",
]
),
(
"Generic Text",
[
"e05_000_001.lin",
"e05_000_003.lin",
"e05_000_004.lin",
"e05_000_005.lin",
"e05_000_006.lin",
"e05_000_009.lin",
"e05_000_010.lin",
"e05_000_011.lin",
"e05_000_014.lin",
"e05_000_015.lin",
"e05_000_016.lin",
"e05_000_021.lin",
"e05_000_022.lin",
"e05_000_025.lin",
"e05_000_026.lin",
"e05_000_030.lin",
"e05_000_031.lin",
"e05_000_032.lin",
"e05_000_033.lin",
"e05_000_037.lin",
"e05_000_042.lin",
"e05_000_044.lin",
"e05_000_046.lin",
"e05_000_047.lin",
"e05_000_048.lin",
"e05_000_051.lin",
"e05_000_052.lin",
"e05_000_061.lin",
"e05_000_063.lin",
"e05_000_065.lin",
"e05_000_066.lin",
"e05_000_067.lin",
"e05_000_068.lin",
"e05_000_070.lin",
"e05_000_071.lin",
"e05_000_072.lin",
"e05_000_083.lin",
"e05_000_084.lin",
"e05_000_085.lin",
"e05_000_086.lin",
"e05_000_087.lin",
"e05_000_089.lin",
"e05_000_090.lin",
"e05_000_091.lin",
"e05_000_092.lin",
"e05_000_103.lin",
"e05_000_135.lin",
"e05_000_136.lin",
"e05_000_137.lin",
"e05_000_139.lin",
"e05_000_140.lin",
"e05_000_141.lin",
"e05_000_144.lin",
"e05_000_145.lin",
"e05_000_146.lin",
"e05_255_100.lin",
"e05_255_101.lin",
"e05_255_102.lin",
"script_pak_e05_0000.lin",
"script_pak_e05_0003.lin",
"script_pak_e05_0004.lin",
"script_pak_e05_0005.lin",
"script_pak_e05_0006.lin",
"script_pak_e05_0007.lin",
"script_pak_e05_0008.lin",
"script_pak_e05_0009.lin",
"script_pak_e05_0010.lin",
"script_pak_e05_0011.lin",
"script_pak_e05_0012.lin",
"script_pak_e05_0013.lin",
"script_pak_e05_0014.lin",
"script_pak_e05_0015.lin",
"script_pak_e05_0016.lin",
"script_pak_e05_0017.lin",
"script_pak_e05_0018.lin",
"script_pak_e05_0019.lin",
"script_pak_e05_0020.lin",
"script_pak_e05_0021.lin",
"script_pak_e05_0022.lin",
"script_pak_e05_0024.lin",
"script_pak_e05_0025.lin",
"script_pak_e05_0026.lin",
"script_pak_e05_0027.lin",
"script_pak_e05_0028.lin",
"script_pak_e05_0029.lin",
"script_pak_e05_0030.lin",
"script_pak_e05_0031.lin",
"script_pak_e05_0032.lin",
"script_pak_e05_0033.lin",
"script_pak_e05_0034.lin",
"script_pak_e05_0035.lin",
"script_pak_e05_0036.lin",
"script_pak_e05_0037.lin",
"script_pak_e05_0038.lin",
"script_pak_e05_0039.lin",
"script_pak_e05_0041.lin",
"script_pak_e05_0042.lin",
"script_pak_e05_0043.lin",
"script_pak_e05_0044.lin",
"script_pak_e05_0045.lin",
"script_pak_e05_0046.lin",
"script_pak_e05_0047.lin",
"script_pak_e05_0048.lin",
"script_pak_e05_0049.lin",
"script_pak_e05_0051.lin",
"script_pak_e05_0052.lin",
"script_pak_e05_0053.lin",
"script_pak_e05_0054.lin",
"script_pak_e05_0055.lin",
"script_pak_e05_0056.lin",
"script_pak_e05_0057.lin",
"script_pak_e05_0058.lin",
"script_pak_e05_0059.lin",
"script_pak_e05_0060.lin",
]
),
]
),
(
"Chapter 6",
[
(
"neveryday life",
[
"e06_001_216.lin",
"e06_003_137.lin",
"e06_003_101.lin",
"e06_003_001.lin",
"e06_003_004.lin",
"e06_004_006.lin",
"e06_005_004.lin",
"e06_005_001.lin",
"e06_005_101.lin",
"e06_006_135.lin",
(
"Generic Text",
[
"e06_007_001.lin",
"e06_007_021.lin",
"e06_007_135.lin",
"e06_007_153.lin",
]
),
"e06_007_066.lin",
"e06_007_068.lin",
"e06_007_069.lin",
"e06_007_061.lin",
"e06_007_083.lin",
"e06_007_085.lin",
"e06_007_087.lin",
"e06_007_089.lin",
"e06_007_101.lin",
"e06_007_148.lin",
"e06_007_151.lin",
"e06_007_152.lin",
"e06_007_150.lin",
"e06_007_157.lin",
"e06_007_158.lin",
"e06_255_000.lin",
"e06_015_004.lin",
"e06_016_006.lin",
"e06_017_004.lin",
"e06_017_025.lin",
"e06_017_083.lin",
"e06_017_135.lin",
"e06_018_089.lin",
"e06_020_009.lin",
"e06_021_001.lin",
"e06_021_201.lin",
"e06_021_208.lin",
]
),
(
"Class Trial",
[
"e06_101_000.lin",
"nonstop_06_001.dat",
"e06_102_000.lin",
"e06_103_000.lin",
"e06_104_000.lin",
"anagram_61.dat",
"e06_105_000.lin",
"nonstop_06_002.dat",
"e06_106_000.lin",
"e06_107_000.lin",
"nonstop_06_003.dat",
"e06_108_000.lin",
"e06_109_000.lin",
"nonstop_06_004.dat",
"e06_110_000.lin",
"e06_111_000.lin",
"nonstop_06_005.dat",
"e06_112_000.lin",
"e06_113_000.lin",
"e06_114_000.lin",
"hs_mtb_s08.pak",
"e06_116_000.lin",
"e06_117_000.lin",
"nonstop_06_006.dat",
"e06_118_000.lin",
"e06_119_000.lin",
"nonstop_06_007.dat",
"e06_120_000.lin",
"e06_121_000.lin",
"e06_122_000.lin",
"anagram_62.dat",
"e06_123_000.lin",
"e06_124_000.lin",
"hs_mtb_s09.pak",
"e06_126_000.lin",
"e06_127_000.lin",
"e06_128_000.lin",
"e06_129_000.lin",
"e06_130_000.lin",
"e06_131_000.lin",
"e06_132_000.lin",
"e06_133_000.lin",
"nonstop_06_008.dat",
"e06_134_000.lin",
"e06_135_000.lin",
"e06_136_000.lin",
"nonstop_06_009.dat",
"e06_137_000.lin",
"e06_138_000.lin",
"e06_139_000.lin",
"e06_140_000.lin",
"e06_141_000.lin",
"anagram_63.dat",
"e06_142_000.lin",
"nonstop_06_010.dat",
"nonstop_06_025.dat",
"e06_143_000.lin",
"e06_144_000.lin",
"hs_mtb_s10.pak",
"e06_146_000.lin",
"e06_200_208.lin",
"e06_198_000.lin",
"e06_199_000.lin",
"e06_195_000.lin",
"e06_196_000.lin",
"e06_197_000.lin",
]
),
(
"Generic Text",
[
"e06_000_001.lin",
"e06_000_003.lin",
"e06_000_004.lin",
"e06_000_005.lin",
"e06_000_006.lin",
"e06_000_009.lin",
"e06_000_010.lin",
"e06_000_011.lin",
"e06_000_014.lin",
"e06_000_015.lin",
"e06_000_016.lin",
"e06_000_021.lin",
"e06_000_022.lin",
"e06_000_025.lin",
"e06_000_026.lin",
"e06_000_030.lin",
"e06_000_031.lin",
"e06_000_032.lin",
"e06_000_033.lin",
"e06_000_037.lin",
"e06_000_042.lin",
"e06_000_044.lin",
"e06_000_046.lin",
"e06_000_047.lin",
"e06_000_048.lin",
"e06_000_051.lin",
"e06_000_052.lin",
"e06_000_061.lin",
"e06_000_063.lin",
"e06_000_065.lin",
"e06_000_066.lin",
"e06_000_067.lin",
"e06_000_068.lin",
"e06_000_069.lin",
"e06_000_070.lin",
"e06_000_071.lin",
"e06_000_072.lin",
"e06_000_083.lin",
"e06_000_087.lin",
"e06_000_089.lin",
"e06_000_090.lin",
"e06_000_091.lin",
"e06_000_092.lin",
"e06_000_101.lin",
"e06_000_135.lin",
"e06_000_136.lin",
"e06_000_137.lin",
"e06_000_139.lin",
"e06_000_140.lin",
"e06_000_141.lin",
"e06_000_144.lin",
"e06_000_145.lin",
"e06_000_146.lin",
"e06_000_148.lin",
"e06_000_149.lin",
"e06_000_150.lin",
"e06_000_151.lin",
"e06_000_152.lin",
"e06_000_153.lin",
"script_pak_e06_0000.lin",
"script_pak_e06_0003.lin",
"script_pak_e06_0004.lin",
"script_pak_e06_0005.lin",
"script_pak_e06_0006.lin",
"script_pak_e06_0007.lin",
"script_pak_e06_0008.lin",
"script_pak_e06_0009.lin",
"script_pak_e06_0010.lin",
"script_pak_e06_0011.lin",
"script_pak_e06_0012.lin",
"script_pak_e06_0013.lin",
"script_pak_e06_0014.lin",
"script_pak_e06_0015.lin",
"script_pak_e06_0016.lin",
"script_pak_e06_0017.lin",
"script_pak_e06_0018.lin",
"script_pak_e06_0019.lin",
"script_pak_e06_0020.lin",
"script_pak_e06_0021.lin",
"script_pak_e06_0022.lin",
"script_pak_e06_0024.lin",
"script_pak_e06_0025.lin",
"script_pak_e06_0026.lin",
"script_pak_e06_0027.lin",
"script_pak_e06_0028.lin",
"script_pak_e06_0029.lin",
"script_pak_e06_0030.lin",
"script_pak_e06_0031.lin",
"script_pak_e06_0032.lin",
"script_pak_e06_0033.lin",
"script_pak_e06_0034.lin",
"script_pak_e06_0035.lin",
"script_pak_e06_0036.lin",
"script_pak_e06_0037.lin",
"script_pak_e06_0038.lin",
"script_pak_e06_0039.lin",
"script_pak_e06_0040.lin",
"script_pak_e06_0042.lin",
"script_pak_e06_0043.lin",
"script_pak_e06_0044.lin",
"script_pak_e06_0045.lin",
"script_pak_e06_0046.lin",
"script_pak_e06_0047.lin",
"script_pak_e06_0048.lin",
"script_pak_e06_0049.lin",
"script_pak_e06_0050.lin",
"script_pak_e06_0051.lin",
"script_pak_e06_0052.lin",
"script_pak_e06_0053.lin",
"script_pak_e06_0054.lin",
"script_pak_e06_0055.lin",
"script_pak_e06_0056.lin",
"script_pak_e06_0057.lin",
"script_pak_e06_0058.lin",
"script_pak_e06_0059.lin",
"script_pak_e06_0060.lin",
"script_pak_e06_0061.lin",
"script_pak_e06_0062.lin",
"script_pak_e06_0063.lin",
]
),
]
),
(
"Epilogue",
[
"e07_001_003.lin",
"script_pak_e07_0000.lin",
]
),
]
),
(
"MTB",
[
"hs_mtb_s01.pak",
"hs_mtb_s02.pak",
"hs_mtb_s03.pak",
"hs_mtb_s04.pak",
"hs_mtb_s05.pak",
"hs_mtb_s06.pak",
"hs_mtb_s07.pak",
"hs_mtb_s08.pak",
"hs_mtb_s09.pak",
"hs_mtb_s10.pak",
(
"Unused",
[
"hs_mtb_s11.pak",
"hs_mtb_s21.pak",
"hs_mtb_s22.pak",
"hs_mtb_s23.pak",
"hs_mtb_s24.pak",
"hs_mtb_s25.pak",
"hs_mtb_s26.pak",
"hs_mtb_s27.pak",
"hs_mtb_s28.pak",
"hs_mtb_s29.pak",
"hs_mtb_s30.pak",
"hs_mtb_s31.pak",
"hs_mtb_s32.pak",
"hs_mtb_s33.pak",
"hs_mtb_s34.pak",
"hs_mtb_s35.pak",
"hs_mtb_s36.pak",
"hs_mtb_s37.pak",
"hs_mtb_s38.pak",
"hs_mtb_s39.pak",
"hs_mtb_s40.pak",
]
),
]
),
(
"Epiphany Anagram",
[
"anagram_11.dat",
"anagram_21.dat",
"anagram_31.dat",
"anagram_41.dat",
"anagram_42.dat",
"anagram_51.dat",
"anagram_61.dat",
"anagram_62.dat",
"anagram_63.dat",
(
"Unused",
[
"anagram_01.dat",
"anagram_52.dat",
"anagram_101.dat",
"anagram_200.dat",
"anagram_201.dat",
"anagram_202.dat",
"anagram_203.dat",
"anagram_204.dat",
"anagram_205.dat",
"anagram_206.dat",
"anagram_207.dat",
"anagram_208.dat",
"anagram_209.dat",
"anagram_210.dat",
"anagram_211.dat",
"anagram_212.dat",
"anagram_213.dat",
"anagram_214.dat",
"anagram_215.dat",
"anagram_216.dat",
"anagram_217.dat",
"anagram_218.dat",
"anagram_219.dat",
"anagram_220.dat",
"anagram_221.dat",
"anagram_222.dat",
"anagram_223.dat",
"anagram_224.dat",
"anagram_225.dat",
"anagram_226.dat",
"anagram_227.dat",
"anagram_228.dat",
"anagram_229.dat",
"anagram_230.dat",
"anagram_231.dat",
"anagram_232.dat",
]
),
]
),
(
"Free Time",
[
(
"Kiyotaka Ishimaru",
[
"e08_001_001.lin",
"e08_001_002.lin",
"e08_001_003.lin",
"e08_001_004.lin",
"e08_001_005.lin",
"e08_001_006.lin",
"e08_001_007.lin",
"e08_001_008.lin",
"e08_001_009.lin",
(
"Present Reactions",
[
"e08_001_000.lin",
"e08_001_017.lin",
]
),
(
"Special Events",
[
"e08_001_020.lin",
"e08_001_021.lin",
"e08_001_022.lin",
"e08_001_023.lin",
]
),
]
),
(
"Super Saiyan Ishimaru",
[
"e08_001_010.lin",
"e08_001_011.lin",
"e08_001_012.lin",
"e08_001_013.lin",
(
"Present Reactions",
[
"e08_001_016.lin",
]
),
(
"Special Events",
[
"e08_001_024.lin",
]
),
]
),
(
"Byakuya Togami",
[
"e08_002_001.lin",
"e08_002_002.lin",
"e08_002_003.lin",
"e08_002_004.lin",
"e08_002_005.lin",
"e08_002_006.lin",
"e08_002_007.lin",
"e08_002_008.lin",
"e08_002_009.lin",
"e08_002_010.lin",
"e08_002_011.lin",
"e08_002_012.lin",
"e08_002_013.lin",
"e08_002_014.lin",
(
"Present Reactions",
[
"e08_002_000.lin",
]
),
(
"Special Events",
[
"e08_002_020.lin",
"e08_002_021.lin",
"e08_002_022.lin",
"e08_002_023.lin",
"e08_002_024.lin",
]
),
]
),
(
"Mondo Oowada",
[
"e08_003_001.lin",
"e08_003_002.lin",
"e08_003_003.lin",
"e08_003_004.lin",
"e08_003_005.lin",
"e08_003_006.lin",
(
"Present Reactions",
[
"e08_003_000.lin",
]
),
(
"Special Events",
[
"e08_003_020.lin",
"e08_003_021.lin",
"e08_003_022.lin",
"e08_003_023.lin",
]
),
]
),
(
"Leon Kuwata",
[
"e08_004_001.lin",
"e08_004_002.lin",
"e08_004_003.lin",
(
"Present Reactions",
[
"e08_004_000.lin",
]
),
(
"Special Events",
[
"e08_004_020.lin",
"e08_004_021.lin",
"e08_004_022.lin",
]
),
]
),
(
"Hifumi Yamada",
[
"e08_005_001.lin",
"e08_005_002.lin",
"e08_005_003.lin",
"e08_005_004.lin",
"e08_005_005.lin",
"e08_005_006.lin",
"e08_005_007.lin",
(
"Present Reactions",
[
"e08_005_000.lin",
]
),
(
"Special Events",
[
"e08_005_020.lin",
"e08_005_021.lin",
"e08_005_022.lin",
"e08_005_023.lin",
"e08_005_024.lin",
]
),
]
),
(
"Yasuhiro Hagakure",
[
"e08_006_001.lin",
"e08_006_002.lin",
"e08_006_003.lin",
"e08_006_004.lin",
"e08_006_005.lin",
"e08_006_006.lin",
"e08_006_007.lin",
"e08_006_008.lin",
"e08_006_009.lin",
"e08_006_010.lin",
(
"Present Reactions",
[
"e08_006_000.lin",
]
),
(
"Special Events",
[
"e08_006_020.lin",
"e08_006_021.lin",
"e08_006_022.lin",
"e08_006_023.lin",
"e08_006_024.lin",
"e08_006_025.lin",
]
),
]
),
(
"Sayaka Maizono",
[
"e08_007_001.lin",
"e08_007_002.lin",
"e08_007_003.lin",
(
"Present Reactions",
[
"e08_007_000.lin",
]
),
(
"Special Events",
[
"e08_007_020.lin",
"e08_007_021.lin",
]
),
]
),
(
"Kyouko Kirigiri",
[
"e08_008_001.lin",
"e08_008_002.lin",
"e08_008_003.lin",
"e08_008_004.lin",
"e08_008_005.lin",
"e08_008_006.lin",
"e08_008_007.lin",
"e08_008_008.lin",
(
"Present Reactions",
[
"e08_008_000.lin",
]
),
(
"Special Events",
[
"e08_008_020.lin",
"e08_008_021.lin",
"e08_008_022.lin",
"e08_008_023.lin",
]
),
]
),
(
"Aoi Asahina",
[
"e08_009_001.lin",
"e08_009_002.lin",
"e08_009_003.lin",
"e08_009_004.lin",
"e08_009_005.lin",
"e08_009_006.lin",
"e08_009_007.lin",
"e08_009_008.lin",
"e08_009_009.lin",
(
"Present Reactions",
[
"e08_009_000.lin",
"e08_009_016.lin",
]
),
(
"Special Events",
[
"e08_009_020.lin",
"e08_009_021.lin",
"e08_009_022.lin",
"e08_009_023.lin",
"e08_009_024.lin",
"e08_009_025.lin",
]
),
]
),
(
"Touko Fukawa",
[
"e08_010_001.lin",
"e08_010_002.lin",
"e08_010_003.lin",
"e08_010_004.lin",
"e08_010_005.lin",
"e08_010_006.lin",
(
"Present Reactions",
[
"e08_010_000.lin",
]
),
(
"Special Events",
[
"e08_010_020.lin",
"e08_010_021.lin",
"e08_010_022.lin",
"e08_010_023.lin",
"e08_010_024.lin",
]
),
]
),
(
"Genocider Shou",
[
"e08_010_007.lin",
"e08_010_008.lin",
"e08_010_009.lin",
"e08_010_010.lin",
(
"Present Reactions",
[
"e08_010_016.lin",
]
),
(
"Special Events",
[
"e08_010_025.lin",
"e08_010_026.lin",
"e08_010_027.lin",
]
),
]
),
(
"Sakura Oogami",
[
"e08_011_001.lin",
"e08_011_002.lin",
"e08_011_003.lin",
"e08_011_004.lin",
"e08_011_005.lin",
"e08_011_006.lin",
"e08_011_007.lin",
"e08_011_008.lin",
"e08_011_009.lin",
"e08_011_010.lin",
(
"Present Reactions",
[
"e08_011_000.lin",
"e08_011_016.lin",
]
),
(
"Special Events",
[
"e08_011_020.lin",
"e08_011_021.lin",
"e08_011_022.lin",
"e08_011_023.lin",
"e08_011_024.lin",
]
),
]
),
(
"Celestia Ludenberg",
[
"e08_012_001.lin",
"e08_012_002.lin",
"e08_012_003.lin",
"e08_012_004.lin",
"e08_012_005.lin",
"e08_012_006.lin",
(
"Present Reactions",
[
"e08_012_000.lin",
]
),
(
"Special Events",
[
"e08_012_020.lin",
"e08_012_021.lin",
"e08_012_022.lin",
"e08_012_023.lin",
"e08_012_024.lin",
"e08_012_025.lin",
]
),
]
),
(
"Junko Enoshima",
[
"e08_013_001.lin",
"e08_013_002.lin",
"e08_013_003.lin",
"e08_013_004.lin",
(
"Present Reactions",
[
"e08_013_000.lin",
]
),
(
"Special Events",
[
"e08_013_020.lin",
"e08_013_021.lin",
"e08_013_022.lin",
]
),
]
),
(
"Chihiro Fujisaki",
[
"e08_014_001.lin",
"e08_014_002.lin",
"e08_014_003.lin",
"e08_014_004.lin",
"e08_014_005.lin",
"e08_014_006.lin",
"e08_014_007.lin",
(
"Present Reactions",
[
"e08_014_000.lin",
"e08_014_016.lin",
]
),
(
"Special Events",
[
"e08_014_020.lin",
"e08_014_021.lin",
"e08_014_022.lin",
"e08_014_023.lin",
]
),
]
),
(
"Tutorial",
[
"e08_015_000.lin",
]
),
]
),
(
"Monokuma Theatre",
[
"e08_000_000.lin",
"e08_000_001.lin",
"e08_000_002.lin",
"e08_000_003.lin",
"e08_000_004.lin",
"e08_000_005.lin",
"e08_000_006.lin",
"e08_000_007.lin",
"e08_000_008.lin",
"e08_000_009.lin",
"e08_000_010.lin",
"e08_000_011.lin",
"e08_000_012.lin",
"e08_000_013.lin",
"e08_000_014.lin",
"e08_000_015.lin",
"e08_000_016.lin",
"script_pak_e08_0002.lin",
]
),
(
"Menu Text",
[
"00_system.pak",
"01_contents.pak",
"02_bgmname.pak",
"03_eventname.pak",
"04_itemname.pak",
"05_itemdescription.pak",
"06_kotodamaname.pak",
"07_kotodamadesc1.pak",
"08_kotodamadesc2.pak",
"09_kotodamadesc3.pak",
"10_special.pak",
"11_report.pak",
"12_skillname.pak",
"13_skilldesc.pak",
"14_rule.pak",
"15_operatemode.pak",
"16_operatedesc.pak",
"17_floorname.pak",
"18_mapname.pak",
"19_handbookmenu.pak",
"20_handbookdesc.pak",
"21_briefing.pak",
"22_profile.pak",
"24_moviename.pak",
"25_skilldeschb.pak",
"26_menu.pak",
"27_artworkname.pak",
"28_credit.pak",
"29_operatemoder.pak",
"30_operatedescr.pak",
"31_operatesysr.pak",
]
),
(
"Misc.",
[
"e08_016_000.lin",
"e08_020_000.lin",
"script_pak_e08_0000.lin",
"event.pak",
"voice.pak",
]
),
"EBOOT Text",
]
### EOF ### | gpl-3.0 | -3,670,312,204,316,532,000 | 28.733707 | 81 | 0.354773 | false |
alobbs/autome | chief/chief-client.py | 1 | 1722 | #!/usr/bin/env python3
import conf
import argparse
import time
import requests
import tabulate
CLEAR = "\x1b[2J\x1b[1;1H"
def get(path):
url = "http://localhost:{}".format(conf.CHIEF_API_PORT)
r = requests.get(url + path)
return r.json()
def table(info, *a, **ka):
if 'tablefmt' not in ka:
ka['tablefmt'] = "fancy_grid"
if type(info) == dict:
info = [list(i) for i in info.items()]
return tabulate.tabulate(info, [], *a, **ka)
elif type(info) == list and type(info[0] == dict):
headers = sorted(info[0].keys())
values = []
for e in info:
values.append([e.get(k, '') for k in headers])
return tabulate.tabulate(values, headers, *a, **ka)
return tabulate.tabulate(info, *a, **ka)
def do(args):
now = time.strftime("%h %d, %H:%M")
if args.cmd == "jobs":
print(now + '\n' + table(get("/jobs/list")))
elif args.cmd == "run":
assert args.job, "--job required"
url = "/jobs/run/{}".format(args.job)
print(now + '\n' + table(get(url)))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("cmd", choices=["jobs", "run", "ping"])
parser.add_argument("--auto", type=int)
parser.add_argument("--job")
args = parser.parse_args()
try:
do(args)
while args.auto:
time.sleep(args.auto)
print(CLEAR)
try:
do(args)
except requests.exceptions.ConnectionError:
now = time.strftime("%h %d, %H:%M")
print(now + " - [ERROR] Autome API server not reachable")
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
| mit | 8,255,673,079,585,231,000 | 24.323529 | 73 | 0.546458 | false |
viewportvr/daysinvr | backend/remixvr/app.py | 1 | 4034 | # -*- coding: utf-8 -*-
"""The app module, containing the app factory function."""
from flask import Flask
from remixvr.extensions import bcrypt, cache, db, migrate, jwt, cors
from remixvr import (commands, user, profile, project, theme, field,
space, activity, activitytype, classroom, school, submission)
from remixvr.settings import ProdConfig
from remixvr.exceptions import InvalidUsage
def create_app(config_object=ProdConfig):
"""An application factory, as explained here:
http://flask.pocoo.org/docs/patterns/appfactories/.
:param config_object: The configuration object to use.
"""
app = Flask(__name__.split('.')[0])
app.url_map.strict_slashes = False
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
register_shellcontext(app)
register_commands(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
bcrypt.init_app(app)
cache.init_app(app)
db.init_app(app)
migrate.init_app(app, db)
jwt.init_app(app)
def register_blueprints(app):
"""Register Flask blueprints."""
origins = app.config.get('CORS_ORIGIN_WHITELIST', '*')
cors.init_app(user.views.blueprint, origins=origins)
cors.init_app(profile.views.blueprint, origins=origins)
cors.init_app(project.views.blueprint, origins=origins)
cors.init_app(theme.views.blueprint, origins=origins)
cors.init_app(field.views.blueprint, origins=origins)
cors.init_app(space.views.blueprint, origins=origins)
cors.init_app(activity.views.blueprint, origins=origins)
cors.init_app(activitytype.views.blueprint, origins=origins)
cors.init_app(classroom.views.blueprint, origins=origins)
cors.init_app(school.views.blueprint, origins=origins)
cors.init_app(submission.views.blueprint, origins=origins)
app.register_blueprint(user.views.blueprint)
app.register_blueprint(profile.views.blueprint)
app.register_blueprint(project.views.blueprint)
app.register_blueprint(theme.views.blueprint)
app.register_blueprint(field.views.blueprint)
app.register_blueprint(space.views.blueprint)
app.register_blueprint(activity.views.blueprint)
app.register_blueprint(activitytype.views.blueprint)
app.register_blueprint(classroom.views.blueprint)
app.register_blueprint(school.views.blueprint)
app.register_blueprint(submission.views.blueprint)
def register_errorhandlers(app):
def errorhandler(error):
response = error.to_json()
response.status_code = error.status_code
return response
app.errorhandler(InvalidUsage)(errorhandler)
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
return {
'db': db,
'User': user.models.User,
'UserProfile': profile.models.UserProfile,
'Project': project.models.Project,
'Theme': theme.models.Theme,
'Field': field.models.Field,
'Position': field.models.Position,
'Text': field.models.Text,
'Number': field.models.Number,
'Audio': field.models.Audio,
'Video': field.models.Video,
'VideoSphere': field.models.VideoSphere,
'Image': field.models.Image,
'PhotoSphere': field.models.PhotoSphere,
'Space': space.models.Space,
'Activity': activity.models.Activity,
'ActivityType': activitytype.models.ActivityType,
'Classroom': classroom.models.Classroom,
'School': school.models.School,
'Submission': submission.models.Submission
}
app.shell_context_processor(shell_context)
def register_commands(app):
"""Register Click commands."""
app.cli.add_command(commands.test)
app.cli.add_command(commands.lint)
app.cli.add_command(commands.clean)
app.cli.add_command(commands.urls)
| mit | -5,504,918,619,888,644,000 | 35.342342 | 82 | 0.685176 | false |
liberiun/cynin-intranet | src/ubify.spaces/ubify/spaces/browser/mindmap.py | 1 | 5085 | ###############################################################################
#cyn.in is an open source Collaborative Knowledge Management Appliance that
#enables teams to seamlessly work together on files, documents and content in
#a secure central environment.
#
#cyn.in v2 an open source appliance is distributed under the GPL v3 license
#along with commercial support options.
#
#cyn.in is a Cynapse Invention.
#
#Copyright (C) 2008 Cynapse India Pvt. Ltd.
#
#This program is free software: you can redistribute it and/or modify it under
#the terms of the GNU General Public License as published by the Free Software
#Foundation, either version 3 of the License, or any later version and observe
#the Additional Terms applicable to this program and must display appropriate
#legal notices. In accordance with Section 7(b) of the GNU General Public
#License version 3, these Appropriate Legal Notices must retain the display of
#the "Powered by cyn.in" AND "A Cynapse Invention" logos. You should have
#received a copy of the detailed Additional Terms License with this program.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
#Public License for more details.
#
#You should have received a copy of the GNU General Public License along with
#this program. If not, see <http://www.gnu.org/licenses/>.
#
#You can contact Cynapse at [email protected] with any problems with cyn.in.
#For any queries regarding the licensing, please send your mails to
# [email protected]
#
#You can also contact Cynapse at:
#802, Building No. 1,
#Dheeraj Sagar, Malad(W)
#Mumbai-400064, India
###############################################################################
from Products.Five import BrowserView
from zope.interface import implements
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from ubify.policy import CyninMessageFactory as _
from ubify.spaces.interfaces import IMindMappable
from ubify.spaces.config import mindmapshowabletypes
from Products.CMFCore.utils import getToolByName
import logging
from ubify.policy.config import spacesdefaultaddablenonfolderishtypes
class SpaceFreeMindMap(BrowserView):
"""Contains backend code the xml template in mindmap.pt
"""
template = ViewPageTemplateFile('mindmap.xml')
recurse = ViewPageTemplateFile('mindmap_recurse.xml')
def __call__(self):
self.logger = logging.getLogger()
self.isfullview = False
self.showleafitems = False
if 'fullviewmapdata' in self.request.steps:
self.isfullview = True
self.typetool= getToolByName(self.context, 'portal_types')
if self.isfullview:
portal = self.context.portal_url.getPortalObject()
mnode = portal
return self.template(mainnode=portal)
else:
if self.context.portal_type == 'ContentRoot':
portal = self.context.portal_url.getPortalObject()
mnode = portal
return self.template(mainnode=portal)
else:
return self.template(mainnode=self.context)
def getTypeIcon(self,obj):
object_typename = obj.portal_type
object_typeobj = self.typetool[object_typename]
fulliconpath = object_typeobj.icon_expr
#self.logger.info('returned typeicon: %s' % (fulliconpath))
return fulliconpath
def getChildren(self,obj):
"""Gets the immediate children of the passed object"""
cat = getToolByName(obj, 'portal_catalog')
currpath = '/'.join(obj.getPhysicalPath())
display_portal_types = mindmapshowabletypes
#import pdb; pdb.set_trace()
if self.showleafitems:
display_portal_types = mindmapshowabletypes + spacesdefaultaddablenonfolderishtypes
else:
if self.context.portal_type == 'Plone Site' or obj.portal_type in ('ContentRoot','ContentSpace'):
display_portal_types = mindmapshowabletypes
else:
display_portal_types = mindmapshowabletypes + spacesdefaultaddablenonfolderishtypes
catresults = cat.searchResults({'path': {'query': currpath, 'depth': 1},'portal_type':display_portal_types})
return catresults
def pathsort(x,y):
""" Sorts by path of object first and then by string"""
#DKG: Unused. Was written to sort a mybrains list based on the paths of the objects in it.
xpath = x.getPath()
ypath = y.getPath()
xsplit = xpath.split('/')
ysplit = ypath.split('/')
if len(xsplit) > len(ysplit):
return 1
elif len(xsplit) < len(ysplit):
return -1
else: #objects are peers in path
if xpath > ypath:
return 1
elif xpath < ypath:
return -1
else: #objects are having same path!?!?!
return 0
| gpl-3.0 | -5,295,264,525,636,877,000 | 40.341463 | 116 | 0.661357 | false |
mikel-egana-aranguren/SADI-Galaxy-Docker | galaxy-dist/eggs/mercurial-2.2.3-py2.7-linux-x86_64-ucs4.egg/hgext/progress.py | 1 | 10862 | # progress.py show progress bars for some actions
#
# Copyright (C) 2010 Augie Fackler <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""show progress bars for some actions
This extension uses the progress information logged by hg commands
to draw progress bars that are as informative as possible. Some progress
bars only offer indeterminate information, while others have a definite
end point.
The following settings are available::
[progress]
delay = 3 # number of seconds (float) before showing the progress bar
changedelay = 1 # changedelay: minimum delay before showing a new topic.
# If set to less than 3 * refresh, that value will
# be used instead.
refresh = 0.1 # time in seconds between refreshes of the progress bar
format = topic bar number estimate # format of the progress bar
width = <none> # if set, the maximum width of the progress information
# (that is, min(width, term width) will be used)
clear-complete = True # clear the progress bar after it's done
disable = False # if true, don't show a progress bar
assume-tty = False # if true, ALWAYS show a progress bar, unless
# disable is given
Valid entries for the format field are topic, bar, number, unit,
estimate, speed, and item. item defaults to the last 20 characters of
the item, but this can be changed by adding either ``-<num>`` which
would take the last num characters, or ``+<num>`` for the first num
characters.
"""
import sys
import time
from mercurial import util
from mercurial.i18n import _
def spacejoin(*args):
return ' '.join(s for s in args if s)
def shouldprint(ui):
return util.isatty(sys.stderr) or ui.configbool('progress', 'assume-tty')
def fmtremaining(seconds):
if seconds < 60:
# i18n: format XX seconds as "XXs"
return _("%02ds") % (seconds)
minutes = seconds // 60
if minutes < 60:
seconds -= minutes * 60
# i18n: format X minutes and YY seconds as "XmYYs"
return _("%dm%02ds") % (minutes, seconds)
# we're going to ignore seconds in this case
minutes += 1
hours = minutes // 60
minutes -= hours * 60
if hours < 30:
# i18n: format X hours and YY minutes as "XhYYm"
return _("%dh%02dm") % (hours, minutes)
# we're going to ignore minutes in this case
hours += 1
days = hours // 24
hours -= days * 24
if days < 15:
# i18n: format X days and YY hours as "XdYYh"
return _("%dd%02dh") % (days, hours)
# we're going to ignore hours in this case
days += 1
weeks = days // 7
days -= weeks * 7
if weeks < 55:
# i18n: format X weeks and YY days as "XwYYd"
return _("%dw%02dd") % (weeks, days)
# we're going to ignore days and treat a year as 52 weeks
weeks += 1
years = weeks // 52
weeks -= years * 52
# i18n: format X years and YY weeks as "XyYYw"
return _("%dy%02dw") % (years, weeks)
class progbar(object):
def __init__(self, ui):
self.ui = ui
self.resetstate()
def resetstate(self):
self.topics = []
self.topicstates = {}
self.starttimes = {}
self.startvals = {}
self.printed = False
self.lastprint = time.time() + float(self.ui.config(
'progress', 'delay', default=3))
self.lasttopic = None
self.indetcount = 0
self.refresh = float(self.ui.config(
'progress', 'refresh', default=0.1))
self.changedelay = max(3 * self.refresh,
float(self.ui.config(
'progress', 'changedelay', default=1)))
self.order = self.ui.configlist(
'progress', 'format',
default=['topic', 'bar', 'number', 'estimate'])
def show(self, now, topic, pos, item, unit, total):
if not shouldprint(self.ui):
return
termwidth = self.width()
self.printed = True
head = ''
needprogress = False
tail = ''
for indicator in self.order:
add = ''
if indicator == 'topic':
add = topic
elif indicator == 'number':
if total:
add = ('% ' + str(len(str(total))) +
's/%s') % (pos, total)
else:
add = str(pos)
elif indicator.startswith('item') and item:
slice = 'end'
if '-' in indicator:
wid = int(indicator.split('-')[1])
elif '+' in indicator:
slice = 'beginning'
wid = int(indicator.split('+')[1])
else:
wid = 20
if slice == 'end':
add = item[-wid:]
else:
add = item[:wid]
add += (wid - len(add)) * ' '
elif indicator == 'bar':
add = ''
needprogress = True
elif indicator == 'unit' and unit:
add = unit
elif indicator == 'estimate':
add = self.estimate(topic, pos, total, now)
elif indicator == 'speed':
add = self.speed(topic, pos, unit, now)
if not needprogress:
head = spacejoin(head, add)
else:
tail = spacejoin(tail, add)
if needprogress:
used = 0
if head:
used += len(head) + 1
if tail:
used += len(tail) + 1
progwidth = termwidth - used - 3
if total and pos <= total:
amt = pos * progwidth // total
bar = '=' * (amt - 1)
if amt > 0:
bar += '>'
bar += ' ' * (progwidth - amt)
else:
progwidth -= 3
self.indetcount += 1
# mod the count by twice the width so we can make the
# cursor bounce between the right and left sides
amt = self.indetcount % (2 * progwidth)
amt -= progwidth
bar = (' ' * int(progwidth - abs(amt)) + '<=>' +
' ' * int(abs(amt)))
prog = ''.join(('[', bar , ']'))
out = spacejoin(head, prog, tail)
else:
out = spacejoin(head, tail)
sys.stderr.write('\r' + out[:termwidth])
self.lasttopic = topic
sys.stderr.flush()
def clear(self):
if not shouldprint(self.ui):
return
sys.stderr.write('\r%s\r' % (' ' * self.width()))
def complete(self):
if not shouldprint(self.ui):
return
if self.ui.configbool('progress', 'clear-complete', default=True):
self.clear()
else:
sys.stderr.write('\n')
sys.stderr.flush()
def width(self):
tw = self.ui.termwidth()
return min(int(self.ui.config('progress', 'width', default=tw)), tw)
def estimate(self, topic, pos, total, now):
if total is None:
return ''
initialpos = self.startvals[topic]
target = total - initialpos
delta = pos - initialpos
if delta > 0:
elapsed = now - self.starttimes[topic]
if elapsed > float(
self.ui.config('progress', 'estimate', default=2)):
seconds = (elapsed * (target - delta)) // delta + 1
return fmtremaining(seconds)
return ''
def speed(self, topic, pos, unit, now):
initialpos = self.startvals[topic]
delta = pos - initialpos
elapsed = now - self.starttimes[topic]
if elapsed > float(
self.ui.config('progress', 'estimate', default=2)):
return _('%d %s/sec') % (delta / elapsed, unit)
return ''
def progress(self, topic, pos, item='', unit='', total=None):
now = time.time()
if pos is None:
self.starttimes.pop(topic, None)
self.startvals.pop(topic, None)
self.topicstates.pop(topic, None)
# reset the progress bar if this is the outermost topic
if self.topics and self.topics[0] == topic and self.printed:
self.complete()
self.resetstate()
# truncate the list of topics assuming all topics within
# this one are also closed
if topic in self.topics:
self.topics = self.topics[:self.topics.index(topic)]
else:
if topic not in self.topics:
self.starttimes[topic] = now
self.startvals[topic] = pos
self.topics.append(topic)
self.topicstates[topic] = pos, item, unit, total
if now - self.lastprint >= self.refresh and self.topics:
if (self.lasttopic is None # first time we printed
# not a topic change
or topic == self.lasttopic
# it's been long enough we should print anyway
or now - self.lastprint >= self.changedelay):
self.lastprint = now
self.show(now, topic, *self.topicstates[topic])
_singleton = None
def uisetup(ui):
global _singleton
class progressui(ui.__class__):
_progbar = None
def _quiet(self):
return self.debugflag or self.quiet
def progress(self, *args, **opts):
if not self._quiet():
self._progbar.progress(*args, **opts)
return super(progressui, self).progress(*args, **opts)
def write(self, *args, **opts):
if not self._quiet() and self._progbar.printed:
self._progbar.clear()
return super(progressui, self).write(*args, **opts)
def write_err(self, *args, **opts):
if not self._quiet() and self._progbar.printed:
self._progbar.clear()
return super(progressui, self).write_err(*args, **opts)
# Apps that derive a class from ui.ui() can use
# setconfig('progress', 'disable', 'True') to disable this extension
if ui.configbool('progress', 'disable'):
return
if shouldprint(ui) and not ui.debugflag and not ui.quiet:
ui.__class__ = progressui
# we instantiate one globally shared progress bar to avoid
# competing progress bars when multiple UI objects get created
if not progressui._progbar:
if _singleton is None:
_singleton = progbar(ui)
progressui._progbar = _singleton
def reposetup(ui, repo):
uisetup(repo.ui)
| gpl-3.0 | -1,684,386,729,704,327,200 | 35.820339 | 77 | 0.537194 | false |
volalex/endlessctf | scoreboard/models.py | 1 | 2683 | # encoding: UTF-8
import os
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import pre_delete, pre_save
from django.dispatch import receiver
class Category(models.Model):
title = models.CharField(max_length=50)
position = models.IntegerField(name='position', unique=True)
def __str__(self):
return self.title
class Meta:
ordering = ('position',)
class News(models.Model):
title = models.CharField(max_length=50, name="title", verbose_name="Заголовок новости")
text = models.TextField(name="text", verbose_name="Текст новости")
create_date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title
class Meta:
ordering = ('create_date',)
verbose_name_plural = "News"
class Task(models.Model):
name = models.CharField(max_length=100, blank=False)
score = models.IntegerField(name='score', blank=False)
category = models.ForeignKey(Category, blank=False)
text = models.TextField(name='text', blank=False)
task_file = models.FileField(verbose_name="Task file", upload_to="task_files", blank=True)
flag = models.CharField(max_length=100, blank=False)
is_enabled = models.BooleanField(default=False)
def __str__(self):
return self.name
def gen_file_link(self):
if self.task_file:
return "<a href='%s'>File</a>" % self.task_file.url
else:
return ""
@receiver(models.signals.post_delete, sender=Task)
def auto_delete_file_on_delete(sender, instance, **kwargs):
"""Deletes file from filesystem
when corresponding `Task` object is deleted.
"""
try:
if instance.file:
if os.path.isfile(instance.file.path):
os.remove(instance.file.path)
except AttributeError:
pass
@receiver(models.signals.pre_save, sender=Task)
def auto_delete_file_on_change(sender, instance, **kwargs):
"""Deletes file from filesystem
when corresponding `Task` object is changed.
"""
if not instance.pk:
return False
try:
old_file = Task.objects.get(pk=instance.pk).task_file
except Task.DoesNotExist:
return False
if not old_file:
return False
new_file = instance.task_file
if not old_file == new_file:
if os.path.isfile(old_file.path):
os.remove(old_file.path)
class SolvedTasks(models.Model):
task = models.ForeignKey(Task)
team = models.ForeignKey(User)
solved_at = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = (('task', 'team'),)
| gpl-2.0 | 6,566,926,016,593,865,000 | 26.947368 | 94 | 0.654991 | false |
jetuk/pywr | examples/two_reservoir_moea.py | 1 | 8924 | """
This example shows the trade-off (pareto frontier) of deficit against cost by altering a reservoir control curve.
Two types of control curve are possible. The first is a monthly control curve containing one value for each
month. The second is a harmonic control curve with cosine terms around a mean. Both Parameter objects
are part of pywr.parameters.
Inspyred is used in this example to perform a multi-objective optimisation using the NSGA-II algorithm. The
script should be run twice (once with --harmonic) to generate results for both types of control curve. Following
this --plot can be used to generate an animation and PNG of the pareto frontier.
"""
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import datetime
import inspyred
from pywr.core import Model, Input, Output, Link, Storage
from pywr.parameters import ArrayIndexedParameter, MonthlyProfileParameter, AnnualHarmonicSeriesParameter
from pywr.parameters.control_curves import ControlCurveParameter
from pywr.recorders import TotalDeficitNodeRecorder, TotalFlowNodeRecorder, AggregatedRecorder
from pywr.optimisation.moea import InspyredOptimisationModel
def create_model(harmonic=True):
# import flow timeseries for catchments
flow = pd.read_csv(os.path.join('data', 'thames_stochastic_flow.gz'))
flow['Date'] = flow['Date'].apply(pd.to_datetime)
flow.set_index('Date', inplace=True)
# resample input to weekly average
flow = flow.resample('7D', how='mean')
flow_parameter = ArrayIndexedParameter(flow['flow'].values)
model = InspyredOptimisationModel(
solver='glpk',
start=flow.index[0],
end=flow.index[365*10], # roughly 10 years
timestep=datetime.timedelta(7), # weekly time-step
)
catchment1 = Input(model, 'catchment1', min_flow=flow_parameter, max_flow=flow_parameter)
catchment2 = Input(model, 'catchment2', min_flow=flow_parameter, max_flow=flow_parameter)
reservoir1 = Storage(model, 'reservoir1', min_volume=3000, max_volume=20000, volume=16000)
reservoir2 = Storage(model, 'reservoir2', min_volume=3000, max_volume=20000, volume=16000)
if harmonic:
control_curve = AnnualHarmonicSeriesParameter(0.5, [0.5], [0.0], mean_upper_bounds=1.0, amplitude_upper_bounds=1.0)
else:
control_curve = MonthlyProfileParameter(np.array([0.0]*12), lower_bounds=0.0, upper_bounds=1.0)
control_curve.is_variable = True
controller = ControlCurveParameter(reservoir1, control_curve, [0.0, 10.0])
transfer = Link(model, 'transfer', max_flow=controller, cost=-500)
demand1 = Output(model, 'demand1', max_flow=45.0, cost=-101)
demand2 = Output(model, 'demand2', max_flow=20.0, cost=-100)
river1 = Link(model, 'river1')
river2 = Link(model, 'river2')
# compensation flows from reservoirs
compensation1 = Link(model, 'compensation1', max_flow=5.0, cost=-9999)
compensation2 = Link(model, 'compensation2', max_flow=5.0, cost=-9998)
terminator = Output(model, 'terminator', cost=1.0)
catchment1.connect(reservoir1)
catchment2.connect(reservoir2)
reservoir1.connect(demand1)
reservoir2.connect(demand2)
reservoir2.connect(transfer)
transfer.connect(reservoir1)
reservoir1.connect(river1)
reservoir2.connect(river2)
river1.connect(terminator)
river2.connect(terminator)
reservoir1.connect(compensation1)
reservoir2.connect(compensation2)
compensation1.connect(terminator)
compensation2.connect(terminator)
r1 = TotalDeficitNodeRecorder(model, demand1)
r2 = TotalDeficitNodeRecorder(model, demand2)
r3 = AggregatedRecorder(model, [r1, r2], agg_func="mean")
r3.is_objective = True
r4 = TotalFlowNodeRecorder(model, transfer)
r4.is_objective = True
return model
def moea_main(prng=None, display=False, harmonic=False):
from random import Random
from time import time
if prng is None:
prng = Random()
prng.seed(time())
script_name = os.path.splitext(os.path.basename(__file__))[0]
stats_file = open('{}-{}-statistics-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
individuals_file = open('{}-{}-individuals-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
problem = create_model(harmonic=harmonic)
problem.setup()
ea = inspyred.ec.emo.NSGA2(prng)
ea.variator = [inspyred.ec.variators.blend_crossover,
inspyred.ec.variators.gaussian_mutation]
ea.terminator = inspyred.ec.terminators.generation_termination
ea.observer = [
inspyred.ec.observers.file_observer,
]
final_pop = ea.evolve(generator=problem.generator,
evaluator=problem.evaluator,
pop_size=25,
bounder=problem.bounder,
maximize=False,
max_generations=50,
statistics_file=stats_file,
individuals_file=individuals_file)
# Save the final population archive to CSV files
stats_file = open('{}-{}-final-statistics-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
individuals_file = open('{}-{}-final-individuals-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
inspyred.ec.observers.file_observer(ea.archive, 'final', None,
args={'statistics_file': stats_file, 'individuals_file': individuals_file})
if display:
final_arc = ea.archive
print('Best Solutions: \n')
for f in final_arc:
print(f)
x = []
y = []
for f in final_arc:
x.append(f.fitness[0])
y.append(f.fitness[1])
plt.scatter(x, y, c='b')
plt.xlabel('Total demand deficit [Ml/d]')
plt.ylabel('Total Transferred volume [Ml/d]')
title = 'Harmonic Control Curve' if harmonic else 'Monthly Control Curve'
plt.savefig('{0} Example ({1}).pdf'.format(ea.__class__.__name__, title), format='pdf')
plt.show()
return ea
def load_individuals(filename):
""" Read an inspyred individuals file in to two pandas.DataFrame objects.
There is one DataFrame for the objectives and another for the variables.
"""
import ast
index = []
all_objs = []
all_vars = []
with open(filename, 'r') as f:
for row in f.readlines():
gen, pop_id, objs, vars = ast.literal_eval(row.strip())
index.append((gen, pop_id))
all_objs.append(objs)
all_vars.append(vars)
index = pd.MultiIndex.from_tuples(index, names=['generation', 'individual'])
return pd.DataFrame(all_objs, index=index), pd.DataFrame(all_vars, index=index)
def animate_generations(objective_data, colors):
"""
Animate the pareto frontier plot over the saved generations.
"""
import matplotlib.animation as animation
def update_line(gen, dfs, ax, xmax, ymax):
ax.cla()
artists = []
for i in range(gen+1):
for c, key in zip(colors, sorted(dfs.keys())):
df = dfs[key]
scat = ax.scatter(df.loc[i][0], df.loc[i][1], alpha=0.8**(gen-i), color=c,
label=key if i == gen else None, clip_on=True, zorder=100)
artists.append(scat)
ax.set_title('Generation: {:d}'.format(gen))
ax.set_xlabel('Total demand deficit [Ml/d]')
ax.set_ylabel('Total Transferred volume [Ml/d]')
ax.set_xlim(0, xmax)
ax.set_ylim(0, ymax)
ax.legend()
ax.grid()
return artists
fig, ax = plt.subplots(figsize=(10, 10))
last_gen = list(objective_data.values())[0].index[-1][0]
last_gen = int(last_gen)
xmax = max(df.loc[last_gen][0].max() for df in objective_data.values())
ymax = max(df.loc[last_gen][1].max() for df in objective_data.values())
line_ani = animation.FuncAnimation(fig, update_line, last_gen+1,
fargs=(objective_data, ax, xmax, ymax), interval=400, repeat=False)
line_ani.save('generations.mp4', bitrate=1024,)
fig.savefig('generations.png')
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--harmonic', action='store_true', help='Use an harmonic control curve.')
parser.add_argument('--plot', action='store_true', help='Plot the pareto frontier.')
args = parser.parse_args()
if args.plot:
objs, vars = {}, {}
for cctype in ('monthly', 'harmonic'):
objs[cctype], vars[cctype] = load_individuals('two_reservoir_moea-{}-individuals-file.csv'.format(cctype))
animate_generations(objs, ('b', 'r'))
plt.show()
else:
moea_main(display=True, harmonic=args.harmonic)
| gpl-3.0 | 277,628,569,962,579,800 | 37.465517 | 127 | 0.651277 | false |
ver228/tierpsy-tracker | tierpsy/gui/BatchProcessing_ui.py | 1 | 13026 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'BatchProcessing.ui'
#
# Created by: PyQt5 UI code generator 5.12.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_BatchProcessing(object):
def setupUi(self, BatchProcessing):
BatchProcessing.setObjectName("BatchProcessing")
BatchProcessing.resize(594, 504)
self.centralwidget = QtWidgets.QWidget(BatchProcessing)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.gridLayout_2 = QtWidgets.QGridLayout()
self.gridLayout_2.setObjectName("gridLayout_2")
self.p_video_dir_root = QtWidgets.QLineEdit(self.centralwidget)
self.p_video_dir_root.setObjectName("p_video_dir_root")
self.gridLayout_2.addWidget(self.p_video_dir_root, 0, 2, 1, 1)
self.pushButton_videosDir = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_videosDir.setObjectName("pushButton_videosDir")
self.gridLayout_2.addWidget(self.pushButton_videosDir, 0, 1, 1, 1)
self.p_tmp_dir_root = QtWidgets.QLineEdit(self.centralwidget)
self.p_tmp_dir_root.setObjectName("p_tmp_dir_root")
self.gridLayout_2.addWidget(self.p_tmp_dir_root, 5, 2, 1, 1)
self.p_videos_list = QtWidgets.QLineEdit(self.centralwidget)
self.p_videos_list.setEnabled(True)
self.p_videos_list.setObjectName("p_videos_list")
self.gridLayout_2.addWidget(self.p_videos_list, 1, 2, 1, 1)
self.p_mask_dir_root = QtWidgets.QLineEdit(self.centralwidget)
self.p_mask_dir_root.setObjectName("p_mask_dir_root")
self.gridLayout_2.addWidget(self.p_mask_dir_root, 2, 2, 1, 1)
self.pushButton_tmpDir = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_tmpDir.setObjectName("pushButton_tmpDir")
self.gridLayout_2.addWidget(self.pushButton_tmpDir, 5, 1, 1, 1)
self.pushButton_txtFileList = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_txtFileList.setEnabled(True)
self.pushButton_txtFileList.setObjectName("pushButton_txtFileList")
self.gridLayout_2.addWidget(self.pushButton_txtFileList, 1, 1, 1, 1)
self.pushButton_masksDir = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_masksDir.setObjectName("pushButton_masksDir")
self.gridLayout_2.addWidget(self.pushButton_masksDir, 2, 1, 1, 1)
self.p_results_dir_root = QtWidgets.QLineEdit(self.centralwidget)
self.p_results_dir_root.setObjectName("p_results_dir_root")
self.gridLayout_2.addWidget(self.p_results_dir_root, 3, 2, 1, 1)
self.pushButton_paramFile = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_paramFile.setObjectName("pushButton_paramFile")
self.gridLayout_2.addWidget(self.pushButton_paramFile, 4, 1, 1, 1)
self.pushButton_resultsDir = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_resultsDir.setObjectName("pushButton_resultsDir")
self.gridLayout_2.addWidget(self.pushButton_resultsDir, 3, 1, 1, 1)
self.checkBox_txtFileList = QtWidgets.QCheckBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_txtFileList.sizePolicy().hasHeightForWidth())
self.checkBox_txtFileList.setSizePolicy(sizePolicy)
self.checkBox_txtFileList.setText("")
self.checkBox_txtFileList.setObjectName("checkBox_txtFileList")
self.gridLayout_2.addWidget(self.checkBox_txtFileList, 1, 0, 1, 1)
self.checkBox_tmpDir = QtWidgets.QCheckBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_tmpDir.sizePolicy().hasHeightForWidth())
self.checkBox_tmpDir.setSizePolicy(sizePolicy)
self.checkBox_tmpDir.setText("")
self.checkBox_tmpDir.setObjectName("checkBox_tmpDir")
self.gridLayout_2.addWidget(self.checkBox_tmpDir, 5, 0, 1, 1)
self.p_json_file = QtWidgets.QComboBox(self.centralwidget)
self.p_json_file.setEditable(True)
self.p_json_file.setObjectName("p_json_file")
self.gridLayout_2.addWidget(self.p_json_file, 4, 2, 1, 1)
self.verticalLayout.addLayout(self.gridLayout_2)
self.verticalLayout_2.addLayout(self.verticalLayout)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.gridLayout_3 = QtWidgets.QGridLayout()
self.gridLayout_3.setObjectName("gridLayout_3")
self.p_is_debug = QtWidgets.QCheckBox(self.centralwidget)
self.p_is_debug.setObjectName("p_is_debug")
self.gridLayout_3.addWidget(self.p_is_debug, 5, 2, 1, 1)
self.label_numMaxProc = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_numMaxProc.sizePolicy().hasHeightForWidth())
self.label_numMaxProc.setSizePolicy(sizePolicy)
self.label_numMaxProc.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_numMaxProc.setWordWrap(True)
self.label_numMaxProc.setObjectName("label_numMaxProc")
self.gridLayout_3.addWidget(self.label_numMaxProc, 2, 1, 1, 1)
self.label = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
self.label.setObjectName("label")
self.gridLayout_3.addWidget(self.label, 0, 3, 1, 1)
self.p_force_start_point = QtWidgets.QComboBox(self.centralwidget)
self.p_force_start_point.setObjectName("p_force_start_point")
self.gridLayout_3.addWidget(self.p_force_start_point, 1, 3, 1, 3)
self.p_copy_unfinished = QtWidgets.QCheckBox(self.centralwidget)
self.p_copy_unfinished.setObjectName("p_copy_unfinished")
self.gridLayout_3.addWidget(self.p_copy_unfinished, 5, 1, 1, 1)
self.p_max_num_process = QtWidgets.QSpinBox(self.centralwidget)
self.p_max_num_process.setObjectName("p_max_num_process")
self.gridLayout_3.addWidget(self.p_max_num_process, 3, 1, 1, 1)
self.p_pattern_exclude = QtWidgets.QLineEdit(self.centralwidget)
self.p_pattern_exclude.setObjectName("p_pattern_exclude")
self.gridLayout_3.addWidget(self.p_pattern_exclude, 1, 2, 1, 1)
self.label_2 = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
self.label_2.setObjectName("label_2")
self.gridLayout_3.addWidget(self.label_2, 2, 3, 1, 1)
self.p_end_point = QtWidgets.QComboBox(self.centralwidget)
self.p_end_point.setObjectName("p_end_point")
self.gridLayout_3.addWidget(self.p_end_point, 3, 3, 1, 3)
self.p_pattern_include = QtWidgets.QLineEdit(self.centralwidget)
self.p_pattern_include.setObjectName("p_pattern_include")
self.gridLayout_3.addWidget(self.p_pattern_include, 1, 1, 1, 1)
self.label_patternExc = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_patternExc.sizePolicy().hasHeightForWidth())
self.label_patternExc.setSizePolicy(sizePolicy)
self.label_patternExc.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_patternExc.setWordWrap(True)
self.label_patternExc.setObjectName("label_patternExc")
self.gridLayout_3.addWidget(self.label_patternExc, 0, 2, 1, 1)
self.label_patternIn = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_patternIn.sizePolicy().hasHeightForWidth())
self.label_patternIn.setSizePolicy(sizePolicy)
self.label_patternIn.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_patternIn.setWordWrap(True)
self.label_patternIn.setObjectName("label_patternIn")
self.gridLayout_3.addWidget(self.label_patternIn, 0, 1, 1, 1)
self.p_is_copy_video = QtWidgets.QCheckBox(self.centralwidget)
self.p_is_copy_video.setObjectName("p_is_copy_video")
self.gridLayout_3.addWidget(self.p_is_copy_video, 6, 1, 1, 1)
self.p_unmet_requirements = QtWidgets.QCheckBox(self.centralwidget)
self.p_unmet_requirements.setObjectName("p_unmet_requirements")
self.gridLayout_3.addWidget(self.p_unmet_requirements, 6, 2, 1, 1)
self.p_only_summary = QtWidgets.QCheckBox(self.centralwidget)
self.p_only_summary.setObjectName("p_only_summary")
self.gridLayout_3.addWidget(self.p_only_summary, 7, 1, 1, 1)
self.pushButton_start = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(18)
self.pushButton_start.setFont(font)
self.pushButton_start.setObjectName("pushButton_start")
self.gridLayout_3.addWidget(self.pushButton_start, 5, 3, 3, 1)
self.horizontalLayout_2.addLayout(self.gridLayout_3)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
BatchProcessing.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(BatchProcessing)
self.menubar.setGeometry(QtCore.QRect(0, 0, 594, 22))
self.menubar.setObjectName("menubar")
BatchProcessing.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(BatchProcessing)
self.statusbar.setObjectName("statusbar")
BatchProcessing.setStatusBar(self.statusbar)
self.retranslateUi(BatchProcessing)
QtCore.QMetaObject.connectSlotsByName(BatchProcessing)
def retranslateUi(self, BatchProcessing):
_translate = QtCore.QCoreApplication.translate
BatchProcessing.setWindowTitle(_translate("BatchProcessing", "Batch Processing"))
self.pushButton_videosDir.setText(_translate("BatchProcessing", "Original Videos Dir"))
self.pushButton_tmpDir.setText(_translate("BatchProcessing", "Temporary Dir"))
self.pushButton_txtFileList.setText(_translate("BatchProcessing", "Individual Files List"))
self.pushButton_masksDir.setText(_translate("BatchProcessing", "Masked Videos Dir"))
self.pushButton_paramFile.setText(_translate("BatchProcessing", "Parameters File"))
self.pushButton_resultsDir.setText(_translate("BatchProcessing", "Tracking Results Dir"))
self.p_is_debug.setText(_translate("BatchProcessing", "Print debug information"))
self.label_numMaxProc.setText(_translate("BatchProcessing", "Maximum Number of Processes"))
self.label.setText(_translate("BatchProcessing", "Analysis Start Point"))
self.p_copy_unfinished.setText(_translate("BatchProcessing", "Copy Unfinished Analysis"))
self.label_2.setText(_translate("BatchProcessing", "Analysis End Point"))
self.label_patternExc.setText(_translate("BatchProcessing", "File Pattern to Exclude"))
self.label_patternIn.setText(_translate("BatchProcessing", "File Pattern to Include"))
self.p_is_copy_video.setText(_translate("BatchProcessing", "Copy Raw Videos to Temp Dir"))
self.p_unmet_requirements.setText(_translate("BatchProcessing", "Print Unmet Requirements"))
self.p_only_summary.setText(_translate("BatchProcessing", "Only Display Progress Summary"))
self.pushButton_start.setText(_translate("BatchProcessing", "START"))
| mit | -3,348,856,700,559,731,000 | 63.167488 | 109 | 0.720482 | false |
MyRobotLab/pyrobotlab | home/Papaouitai/azureTranslator.py | 1 | 5374 | import urllib, urllib2
import json
from datetime import datetime
acapelaSpeech = Runtime.createAndStart("speech", "AcapelaSpeech")
client_id = 'your client id'
client_secret = 'your client secret'
azure = Runtime.createAndStart('azure', "ProgramAB")
htmlfilter = Runtime.createAndStart("htmlfilter", "HtmlFilter")
azure.addTextListener(htmlfilter)
htmlfilter.addTextListener(acapelaSpeech)
azure.startSession('default', 'azure')
def datestring (display_format="%a, %d %b %Y %H:%M:%S", datetime_object=None):
if datetime_object is None:
datetime_object = datetime.utcnow()
return datetime.strftime(datetime_object, display_format)
def get_access_token ():
data = urllib.urlencode({
'client_id' : client_id,
'client_secret' : client_secret,
'grant_type' : 'client_credentials',
'scope' : 'http://api.microsofttranslator.com'
})
try:
request = urllib2.Request('https://datamarket.accesscontrol.windows.net/v2/OAuth2-13')
request.add_data(data)
response = urllib2.urlopen(request)
response_data = json.loads(response.read())
if response_data.has_key('access_token'):
return response_data['access_token']
except urllib2.URLError, e:
if hasattr(e, 'reason'):
print datestring(), 'Could not connect to the server:', e.reason
elif hasattr(e, 'code'):
print datestring(), 'Server error: ', e.code
except TypeError:
print datestring(), 'Bad data from server'
supported_languages = { # as defined here: http://msdn.microsoft.com/en-us/library/hh456380.aspx
'ar' : ' Arabic',
# 'bs-Latn' : 'Bosnian (Latin)',
# 'bg' : 'Bulgarian',
# 'ca' : 'Catalan',
# 'zh-CHS' : 'Chinese (Simplified)',
# 'zh-CHT' : 'Chinese (Traditional)',
# 'hr' : 'Croatian',
# 'cs' : 'Czech',
'da' : 'Danish',
'nl' : 'Dutch',
'en' : 'English',
# 'et' : 'Estonian',
# 'fi' : 'Finnish',
'fr' : 'French',
'de' : 'German',
'el' : 'Greek',
# 'ht' : 'Haitian Creole',
# 'he' : 'Hebrew',
# 'hi' : 'Hindi',
# 'mww' : 'Hmong Daw',
# 'hu' : 'Hungarian',
# 'id' : 'Indonesian',
'it' : 'Italian',
# 'ja' : 'Japanese',
# 'sw' : 'Kiswahili',
# 'tlh' : 'Klingon',
# 'tlh-Qaak' : 'Klingon (pIqaD)',
# 'ko' : 'Korean',
# 'lv' : 'Latvian',
# 'lt' : 'Lithuanian',
# 'ms' : 'Malay',
# 'mt' : 'Maltese',
'no' : 'Norwegian',
# 'fa' : 'Persian',
# 'pl' : 'Polish',
# 'pt' : 'Portuguese',
# 'otq' : 'Querétaro Otomi',
# 'ro' : 'Romanian',
# 'ru' : 'Russian',
# 'sr-Cyrl' : 'Serbian (Cyrillic)',
# 'sr-Latn' : 'Serbian (Latin)',
# 'sk' : 'Slovak',
# 'sl' : 'Slovenian',
'es' : 'Spanish',
'sv' : 'Swedish',
# 'th' : 'Thai',
# 'tr' : 'Turkish',
# 'uk' : 'Ukrainian',
# 'ur' : 'Urdu',
# 'vi' : 'Vietnamese',
# 'cy' : 'Welsh',
# 'yua' : 'Yucatec Maya',
}
male_languages = {
'ar' : ' Nizar',
'da' : 'Rasmus',
'nl' : 'Jeroen',
'en' : 'Ryan',
'fr' : 'Antoine',
'de' : 'Klaus',
'el' : 'Dimitris',
'it' : 'Vittorio',
'no' : 'Olav',
'es' : 'Antonio',
'sv' : 'Emil',
}
en_languages = {
'arab' : ' ar',
'danish' : 'da',
'dutch' : 'nl',
'english' : 'en',
'french' : 'fr',
'german' : 'de',
'greek' : 'el',
'italian' : 'it',
'norway' : 'no',
'spanish' : 'es',
'sweden' : 'sv',
}
def print_supported_languages ():
codes = []
for k,v in supported_languages.items():
codes.append('\t'.join([k, '=', v]))
return '\n'.join(codes)
def to_bytestring (s):
if s:
if isinstance(s, str):
return s
else:
return s.encode('utf-8')
def translate (access_token, text, to_lang, from_lang=None):
if not access_token:
azure.getResponse('Say Sorry, the access token is invalid'
else:
if to_lang not in supported_languages.keys():
azure.getResponse("Say I haven't learned this language")
print print_supported_languages()
else:
data = { 'text' : to_bytestring(text), 'to' : to_lang }
if from_lang:
if from_lang not in supported_languages.keys():
azure.getResponse("Say I haven't learned this language")
print print_supported_languages()
return
else:
data['from'] = from_lang
try:
request = urllib2.Request('http://api.microsofttranslator.com/v2/Http.svc/Translate?'+urllib.urlencode(data))
request.add_header('Authorization', 'Bearer '+access_token)
response = urllib2.urlopen(request)
return response.read().replace('<string xmlns="http://schemas.microsoft.com/2003/10/Serialization/">', '').replace('</string>', '')
except urllib2.URLError, e:
if hasattr(e, 'reason'):
print datestring(), 'Could not connect to the server:', e.reason
elif hasattr(e, 'code'):
print datestring(), 'Server error: ', e.code
def translateText(text,language):
to = en_languages[language]
t_text = translate(get_access_token (),text,to)
acapelaSpeech.setVoice(male_languages[to])
azure.getResponse('Say '+t_text)
| apache-2.0 | 7,953,758,695,786,910,000 | 29.185393 | 147 | 0.547925 | false |
all-of-us/raw-data-repository | rdr_service/lib_fhir/fhirclient_4_0_0/models/imagingstudy_tests.py | 1 | 8189 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b on 2019-05-07.
# 2019, SMART Health IT.
import os
import io
import unittest
import json
from . import imagingstudy
from .fhirdate import FHIRDate
class ImagingStudyTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("ImagingStudy", js["resourceType"])
return imagingstudy.ImagingStudy(js)
def testImagingStudy1(self):
inst = self.instantiate_from("imagingstudy-example-xr.json")
self.assertIsNotNone(inst, "Must have instantiated a ImagingStudy instance")
self.implImagingStudy1(inst)
js = inst.as_json()
self.assertEqual("ImagingStudy", js["resourceType"])
inst2 = imagingstudy.ImagingStudy(js)
self.implImagingStudy1(inst2)
def implImagingStudy1(self, inst):
self.assertEqual(inst.id, "example-xr")
self.assertEqual(inst.identifier[0].system, "urn:dicom:uid")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "urn:oid:2.16.124.113543.6003.1154777499.30246.19789.3503430046")
self.assertEqual(inst.identifier[1].type.coding[0].code, "ACSN")
self.assertEqual(inst.identifier[1].type.coding[0].system, "http://terminology.hl7.org/CodeSystem/v2-0203")
self.assertEqual(inst.identifier[1].use, "usual")
self.assertEqual(inst.identifier[1].value, "W12342398")
self.assertEqual(inst.identifier[2].use, "secondary")
self.assertEqual(inst.identifier[2].value, "55551234")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.modality[0].code, "DX")
self.assertEqual(inst.modality[0].system, "http://dicom.nema.org/resources/ontology/DCM")
self.assertEqual(inst.note[0].text, "XR Wrist 3+ Views")
self.assertEqual(inst.numberOfInstances, 2)
self.assertEqual(inst.numberOfSeries, 1)
self.assertEqual(inst.procedureCode[0].coding[0].code, "RPID2589")
self.assertEqual(inst.procedureCode[0].coding[0].display, "XR Wrist 3+ Views")
self.assertEqual(inst.procedureCode[0].coding[0].system, "http://www.radlex.org")
self.assertEqual(inst.procedureCode[0].text, "XR Wrist 3+ Views")
self.assertEqual(inst.reasonCode[0].coding[0].code, "357009")
self.assertEqual(inst.reasonCode[0].coding[0].display, "Closed fracture of trapezoidal bone of wrist")
self.assertEqual(inst.reasonCode[0].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.series[0].bodySite.code, "T-15460")
self.assertEqual(inst.series[0].bodySite.display, "Wrist Joint")
self.assertEqual(inst.series[0].bodySite.system, "http://snomed.info/sct")
self.assertEqual(inst.series[0].description, "XR Wrist 3+ Views")
self.assertEqual(inst.series[0].instance[0].number, 1)
self.assertEqual(inst.series[0].instance[0].sopClass.code, "urn:oid:1.2.840.10008.5.1.4.1.1.2")
self.assertEqual(inst.series[0].instance[0].sopClass.system, "urn:ietf:rfc:3986")
self.assertEqual(inst.series[0].instance[0].title, "PA VIEW")
self.assertEqual(inst.series[0].instance[0].uid, "2.16.124.113543.6003.1154777499.30246.19789.3503430045.1.1")
self.assertEqual(inst.series[0].instance[1].number, 2)
self.assertEqual(inst.series[0].instance[1].sopClass.code, "urn:oid:1.2.840.10008.5.1.4.1.1.2")
self.assertEqual(inst.series[0].instance[1].sopClass.system, "urn:ietf:rfc:3986")
self.assertEqual(inst.series[0].instance[1].title, "LL VIEW")
self.assertEqual(inst.series[0].instance[1].uid, "2.16.124.113543.6003.1154777499.30246.19789.3503430045.1.2")
self.assertEqual(inst.series[0].laterality.code, "419161000")
self.assertEqual(inst.series[0].laterality.display, "Unilateral left")
self.assertEqual(inst.series[0].laterality.system, "http://snomed.info/sct")
self.assertEqual(inst.series[0].modality.code, "DX")
self.assertEqual(inst.series[0].modality.system, "http://dicom.nema.org/resources/ontology/DCM")
self.assertEqual(inst.series[0].number, 3)
self.assertEqual(inst.series[0].numberOfInstances, 2)
self.assertEqual(inst.series[0].performer[0].function.coding[0].code, "PRF")
self.assertEqual(inst.series[0].performer[0].function.coding[0].system, "http://terminology.hl7.org/CodeSystem/v3-ParticipationType")
self.assertEqual(inst.series[0].started.date, FHIRDate("2011-01-01T11:01:20+03:00").date)
self.assertEqual(inst.series[0].started.as_json(), "2011-01-01T11:01:20+03:00")
self.assertEqual(inst.series[0].uid, "2.16.124.113543.6003.1154777499.30246.19789.3503430045.1")
self.assertEqual(inst.started.date, FHIRDate("2017-01-01T11:01:20+03:00").date)
self.assertEqual(inst.started.as_json(), "2017-01-01T11:01:20+03:00")
self.assertEqual(inst.status, "available")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">XR Wrist 3+ Views. John Smith (MRN: 09236). Accession: W12342398. Performed: 2017-01-01. 1 series, 2 images.</div>")
self.assertEqual(inst.text.status, "generated")
def testImagingStudy2(self):
inst = self.instantiate_from("imagingstudy-example.json")
self.assertIsNotNone(inst, "Must have instantiated a ImagingStudy instance")
self.implImagingStudy2(inst)
js = inst.as_json()
self.assertEqual("ImagingStudy", js["resourceType"])
inst2 = imagingstudy.ImagingStudy(js)
self.implImagingStudy2(inst2)
def implImagingStudy2(self, inst):
self.assertEqual(inst.id, "example")
self.assertEqual(inst.identifier[0].system, "urn:dicom:uid")
self.assertEqual(inst.identifier[0].value, "urn:oid:2.16.124.113543.6003.1154777499.30246.19789.3503430045")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.numberOfInstances, 1)
self.assertEqual(inst.numberOfSeries, 1)
self.assertEqual(inst.series[0].bodySite.code, "67734004")
self.assertEqual(inst.series[0].bodySite.display, "Upper Trunk Structure")
self.assertEqual(inst.series[0].bodySite.system, "http://snomed.info/sct")
self.assertEqual(inst.series[0].description, "CT Surview 180")
self.assertEqual(inst.series[0].instance[0].number, 1)
self.assertEqual(inst.series[0].instance[0].sopClass.code, "urn:oid:1.2.840.10008.5.1.4.1.1.2")
self.assertEqual(inst.series[0].instance[0].sopClass.system, "urn:ietf:rfc:3986")
self.assertEqual(inst.series[0].instance[0].uid, "2.16.124.113543.6003.189642796.63084.16748.2599092903")
self.assertEqual(inst.series[0].modality.code, "CT")
self.assertEqual(inst.series[0].modality.system, "http://dicom.nema.org/resources/ontology/DCM")
self.assertEqual(inst.series[0].number, 3)
self.assertEqual(inst.series[0].numberOfInstances, 1)
self.assertEqual(inst.series[0].uid, "2.16.124.113543.6003.2588828330.45298.17418.2723805630")
self.assertEqual(inst.started.date, FHIRDate("2011-01-01T11:01:20+03:00").date)
self.assertEqual(inst.started.as_json(), "2011-01-01T11:01:20+03:00")
self.assertEqual(inst.status, "available")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">CT Chest. John Smith (MRN: 09236). Accession: W12342398. Performed: 2011-01-01. 3 series, 12 images.</div>")
self.assertEqual(inst.text.status, "generated")
| bsd-3-clause | 1,004,375,434,319,766,000 | 62.48062 | 201 | 0.68592 | false |
wli/django-allauth | allauth/socialaccount/providers/paypal/provider.py | 1 | 1138 | from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class PaypalAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
return self.account.extra_data.get('name',
super(PaypalAccount, self).to_str())
class PaypalProvider(OAuth2Provider):
id = 'paypal'
name = 'Paypal'
account_class = PaypalAccount
def get_default_scope(self):
# See: https://developer.paypal.com/docs/integration/direct/identity/attributes/ # noqa
return ['openid', 'email']
def extract_uid(self, data):
return str(data['user_id'])
def extract_common_fields(self, data):
# See: https://developer.paypal.com/docs/api/#get-user-information
return dict(first_name=data.get('given_name', ''),
last_name=data.get('family_name', ''),
email=data.get('email'))
providers.registry.register(PaypalProvider)
| mit | 2,561,766,692,793,844,700 | 32.470588 | 96 | 0.654657 | false |
remyzane/flask_http_api | fair/execute.py | 1 | 4899 | import os
import json
from flask import request
from fair.utility import text_to_html
class CaseStorage(object):
def get_case(self, view, method):
raise NotImplementedError
def save_case(self, api_path, method, param_mode, params, code):
raise NotImplementedError
def save_config(self, api_path, method, post_type, json_p, params):
raise NotImplementedError
@staticmethod
def params_not_equal(old_params, new_params):
for param in old_params:
if old_params[param] != new_params.get(param):
return True
for param in new_params:
if new_params[param] != old_params.get(param):
return True
return False
class CaseLocalStorage(CaseStorage):
def __init__(self, workspace):
self.workspace = workspace
def get_case(self, view, method):
from fair.plugin.jsonp import JsonP
context = {'api_config': {}, 'api_json_p': None}
api_config_path = os.path.join(self.get_case_dir(view.uri, method.__name__.upper()), '__config__')
if os.path.exists(api_config_path):
with open(api_config_path, 'r') as config:
context['api_config'] = json.load(config)
# title, description = method.meta.title, method.meta.description
# context['api_uri'] = view.uri
# context['api_path'] = 'http://' + request.environ['HTTP_HOST'] + view.uri
# context['api_method'] = method.__name__.upper()
# context['api_params'] = get_api_params(method.meta.param_list, context.get('api_config'))
# context['api_description'] = text_to_html(title + (os.linesep*2 if description else '') + description)
# context['api_params_config'] = {}
context['api_codes'] = self.get_sorted_code(view, method)
for plugin in method.api.plugins:
if isinstance(plugin, JsonP):
context['api_json_p'] = plugin.callback_field_name
return context
def get_exe_case(self, view, method, code):
use_cases = ''
case_path = os.path.join(self.get_case_dir(view.uri, method.__name__.upper()), code)
if os.path.exists(case_path):
data_file = open(case_path, 'r')
for line in data_file.readlines():
line = line.replace(os.linesep, '')
if use_cases:
use_cases += ', ' + line
else:
use_cases += line
data_file.close()
return '[%s]' % use_cases
def get_case_dir(self, api_uri, method_name):
api_path = '_'.join(api_uri[1:].split('/'))
case_dir = os.path.realpath(os.path.join(self.workspace, 'exe_ui', api_path, method_name))
if not os.path.exists(case_dir):
os.makedirs(case_dir)
return case_dir
def save_case(self, api_path, method, param_mode, params, code):
result = []
case_path = os.path.join(self.get_case_dir(api_path, method), code)
new_data = json.dumps({
'param_mode': param_mode,
'params': params
}) + os.linesep
# read old record
if os.path.exists(case_path):
data_file = open(case_path, 'r')
for line in data_file.readlines():
line_data = json.loads(line)
if line_data['param_mode'] != param_mode or self.params_not_equal(line_data['params'], params):
result.append(line)
data_file.close()
# add new record
result.append(new_data)
# save the latest 10 record
data_file = open(case_path, 'w')
for line in result[-10:]:
data_file.write(line)
data_file.close()
return {'result': 'success'}
def save_config(self, api_path, method, post_type, json_p, params):
config_path = os.path.join(self.get_case_dir(api_path, method), '__config__')
# save configure
data_file = open(config_path, 'w')
data_file.write(json.dumps({'method': method, 'post_type': post_type, 'json_p': json_p, 'params': params}))
data_file.close()
return {'result': 'success'}
def get_sorted_code(self, view, method):
codes = []
is_param_type = False
for error_code in method.api.code_index:
error_message = method.api.code_dict[error_code]
if error_code.startswith('param_type_error_') and not is_param_type:
codes.append(('----', None, None))
is_param_type = True
if is_param_type and not error_code.startswith('param_type_error_'):
codes.append(('----', None, None))
is_param_type = False
codes.append((error_code, text_to_html(error_message),
self.get_exe_case(view, method, error_code)))
return codes
| apache-2.0 | 1,550,499,759,231,576,800 | 36.976744 | 115 | 0.567463 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.