code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('stats', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Dispensed',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),
('total_approved', models.IntegerField(help_text='Number of projects approved in any instance.')),
('total_dispensed', models.IntegerField(help_text='Number of projects that did not go to 2nd round of votes.')),
('dispensed_by_plenary', models.IntegerField(help_text='Those projects dispensed due to `acuerdo del pleno`.')),
('dispensed_by_spokesmen', models.IntegerField(help_text='Those projects dispensed due to `junta de portavoces`.')),
('dispensed_others', models.IntegerField(help_text='All other projects dispensed, and those with no specific reason.')),
],
options={
},
bases=(models.Model,),
),
]
| proyectosdeley/proyectos_de_ley | proyectos_de_ley/stats/migrations/0002_dispensed.py | Python | mit | 1,175 |
#This will be the thread responsible for the matchmaking which operates as follows:
#There are four lists where the players are divided into based on their rank.
#List 1 is for ranks 0,1,2.
#List 2 is for ranks 3,4,5.
#List 3 is for ranks 6,7,8.
#List 4 is for ranks 9,10.
#When a player waits for a match too long, this thread will start looking for
#players in adjacent lists, first in the higher category list and then in the
#lower one.
#Each player has a dictionary associated with him, which will store his info
#and some other parameters, like his network info to connect to him.
#This thread support only 2 operations:
# 1) Add to match making lists
# 2) Terminate itself
MAX_LOOPS = 10
MAX_WAIT = 10
import Queue,time,random
#inputQueue is for getting players from account threads
#outputQueue is for sending match tokens to the thread that handles the matches
#exitQueue is used for exiting the thread
def mmThread(inputQueue,exitQueue,outputQueue):
#Lists for all difficulties
noviceList = []
apprenticeList = []
adeptList = []
expertList = []
#put them in one list
playerList = [noviceList,apprenticeList,adeptList,expertList]
#This list contains the players that have waited for too long in their Queue
needRematch = []
while True:
loopCounter = 0
#Check for exit signal
try:
exit = exitQueue.get(False)
if exit:
break
except:
pass
#loop over new entries at most MAX_LOOPS times then do it again
while loopCounter < MAX_LOOPS:
try:
#Get new player and add him to a list according to his rank
newPlayer = inputQueue.get(False)
playerRank = newPlayer.get('rank')
listIndex = playerRank // 3
newPlayer['entryTime'] = time.time()
playerList[listIndex].append(newPlayer)
print 'MMTHREAD : Got user '
print 'MMTHREAD: USER RANK IS %d ' % playerRank
except Queue.Empty:
break
loopCounter += 1
#First check for players in the rematch Queue
for player in needRematch[:]:
position = player.get('rank') // 3
foundMatch = False
#Check for empty list
if len(playerList[position]) == 0 or playerList[position][0] != player:
continue
#Check for enemy player one list above this player
if position + 1 < len(playerList) and len(playerList[position+1]) >= 1:
foundMatch = True
firstPlayer = playerList[position].pop(0)
secondPlayer = playerList[position+1].pop(0)
needRematch.remove(player)
elif (position - 1 >= 0) and len(playerList[position-1]) >= 1:
#Else check for enemy player one list below this player
foundMatch = True
firstPlayer = playerList[position].pop(0)
secondPlayer = playerList[position-1].pop(0)
needRematch.remove(player)
#Add player tokens to Queue for game play thread
if foundMatch:
bothPlayers = [firstPlayer,secondPlayer]
data = {'turn':0,'players':bothPlayers}
print'Add new Player token'
outputQueue.put(data)
#Match players in same list
for category in playerList:
while True:
try:
#Try to pop two players from the list
#If successfull, put their token into game play thread Queue
firstPlayer = None
secondPlayer = None
firstPlayer = category.pop(0)
secondPlayer = category.pop(0)
bothPlayers = [firstPlayer,secondPlayer]
turn = random.randint(0,1)
data = {'turn':turn,'players':bothPlayers}
print'Add new Player token'
outputQueue.put(data)
except:
#Else if only one player is found , but him back
if secondPlayer == None and firstPlayer != None:
category.insert(0,firstPlayer)
break
#Check for players that didnt find a match for a long time and alert thread
for i in range(0,3):
if len(playerList[i]) > 0:
if time.time() - playerList[i][0].get('entryTime') >= MAX_WAIT:
needRematch.append(playerList[i][0])
print 'match making thread out'
| Shalantor/Connect4 | server/matchMakingThread.py | Python | mit | 4,679 |
# coding: utf-8
from geventwebsocket.handler import WebSocketHandler
from gevent import pywsgi, sleep
import json
import MySQLdb
class JPC:
#
# 初期化
#
def __init__(self, filepath_config):
import hashlib
# 設定ファイルをロード
fp = open(filepath_config, 'r')
config = json.load(fp)
fp.close()
# 設定をクラス変数に格納
self.host = config['host']
self.port = config['port']
self.langlist = json.load(open(config['langfile'], 'r'))
self.enckey = hashlib.md5(config['key']).digest()
self.db_host = config['db_host']
self.db_name = config['db_name']
self.db_username = config['db_username']
self.db_password = config['db_password']
return
#
# チェック
#
def execute(self):
import codecs
import commands
import os
import pwd
# 情報を取得
code = self.packet['code']
lang = self.packet['lang']
script = self.langlist['compile'][lang]
extension = self.langlist['extension'][lang]
# 必要なデータを生成
filepath_in = self.randstr(8) + extension
filepath_out = self.randstr(8)
username = self.randstr(16)
# /tmpに移動
os.chdir('/tmp/')
# ユーザーを作成する
try:
os.system("useradd -M {0}".format(username))
pwnam = pwd.getpwnam(username)
except Exception:
return
# コードを生成
fp = codecs.open(filepath_in, 'w', 'utf-8')
fp.write(code)
fp.close()
# コンパイル
compile_result = commands.getoutput(
script.format(input=filepath_in, output=filepath_out)
)
# コードを削除
try:
os.remove(filepath_in)
except Exception:
pass
# コンパイル結果を送信
try:
self.ws.send(json.dumps({'compile': compile_result}))
except Exception:
pass
# コンパイルできているか
if not os.path.exists(filepath_out):
print("[INFO] コンパイルに失敗しました。")
return
# 実行ファイルの権限を変更
try:
os.chmod(filepath_out, 0500)
os.chown(filepath_out, pwnam.pw_uid, pwnam.pw_gid)
# 出力例も一応
os.chown(self.record['output_code'], pwnam.pw_uid, pwnam.pw_gid)
except Exception:
try:
os.remove(filepath_out)
os.system("userdel -r {0}".format(username))
except Exception:
print("[ERROR] /tmp/{0}の削除に失敗しました。".format(filepath_out))
print("[ERROR] ユーザー{0}の削除に失敗しました。".format(username))
return
# チェックする
clear = True
for n in range(int(self.record['exec_time'])):
print("[INFO] {0}回目の試行が開始されました。".format(n + 1))
# 実行開始を宣言
try:
self.ws.send(json.dumps({'attempt': n + 1}))
except Exception:
pass
# 入力を生成
self.input_data = commands.getoutput(
self.record['input_code'] + " " + str(n)
)
# 出力を生成
self.output_data = self.run_command(username, self.record['output_code'])
# 実行結果を取得
result = self.run_command(username, './'+filepath_out)
#print "Input : ", self.input_data
#print "Answer : ", self.output_data
#print "Result : ", result
# タイムアウト
if result == False:
self.ws.send(json.dumps({'failure': n + 1}))
clear = False
print("[INFO] タイムアウトしました。")
continue
# 結果が違う
if self.output_data.rstrip('\n') != result.rstrip('\n'):
self.ws.send(json.dumps({'failure': n + 1}))
clear = False
print("[INFO] 結果に誤りがあります。")
continue
# 実行結果を宣言
try:
self.ws.send(json.dumps({'success': n + 1}))
print("[INFO] チェックが成功しました。")
except Exception:
pass
# 成功通知
if clear:
self.ws.send('{"complete":"success"}')
self.update_db()
else:
self.ws.send('{"complete":"failure"}')
# 実行ファイルを削除
try:
os.remove(filepath_out)
os.system("userdel -r {0}".format(username))
except Exception:
print("[ERROR] /tmp/{0}の削除に失敗しました。".format(filepath_out))
print("[ERROR] ユーザー{0}の削除に失敗しました。".format(username))
return
#
# コマンドを制限付きで実行
#
def run_command(self, username, filepath):
import subprocess
import time
import sys
# プロセスを生成
proc = subprocess.Popen(
[
'su',
username,
'-c',
'ulimit -v {0}; {1}'.format(
str(self.record['limit_memory']),
filepath
)
],
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
stdin = subprocess.PIPE,
)
# 入力を送る
proc.stdin.write(self.input_data.rstrip('\n') + '\n')
proc.stdin.close()
# 時間制限を設定
deadline = time.time() + float(self.record['limit_time']) / 1000.0
while time.time() < deadline and proc.poll() == None:
time.sleep(0.20)
# タイムアウト
if proc.poll() == None:
if float(sys.version[:3]) >= 2.6:
proc.terminate()
return False
# 正常終了
stdout = proc.stdout.read()
return stdout
#
# 点数を追加
#
def update_db(self):
import time
cursor = self.db.cursor(MySQLdb.cursors.DictCursor)
# スコアを追加
cursor.execute("UPDATE account SET score=score+{score} WHERE user='{user}';".format(score=int(self.record['score']), user=self.user))
# 解答済み問題を追加
cursor.execute("UPDATE account SET solved=concat('{id},', solved) WHERE user='{user}';".format(id=self.record['id'], user=self.user))
# 解答数をインクリメント
cursor.execute("UPDATE problem SET solved=solved+1 WHERE id={id};".format(id=self.record['id']))
# 解答ユーザーを更新
cursor.execute("UPDATE problem SET solved_user='{user}' WHERE id={id};".format(user=self.user, id=self.record['id']))
# 解答時間を更新
cursor.execute("UPDATE problem SET last_date='{date}' WHERE id={id};".format(date=time.strftime('%Y-%m-%d %H:%M:%S'), id=self.record['id']))
cursor.close()
self.db.commit()
return
#
# 新規要求を処理
#
def handle(self, env, response):
self.ws = env['wsgi.websocket']
print("[INFO] 新しい要求を受信しました。")
# 要求を取得
self.packet = self.ws.receive()
if not self.analyse_packet(): return
# 問題を取得
self.get_problem()
# 実行
self.execute()
return
#
# 問題の詳細を取得
#
def get_problem(self):
cursor = self.db.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT * FROM problem WHERE id={id};".format(id=self.packet['id']))
self.record = cursor.fetchall()[0]
cursor.close()
return
#
# データを解析
#
def analyse_packet(self):
from Crypto.Cipher import AES
# パケットをJSONとして展開
try:
self.packet = json.loads(self.packet)
except Exception:
print("[ERROR] JSONの展開に失敗しました。")
return False
# データの整合性を確認
if not self.check_payload():
print("[ERROR] 不正なデータであると判別されました。")
self.ws.send('{"error":"無効なデータが送信されました。"}')
return False
# ユーザー名を復号化
iv = self.packet['iv'].decode('base64')
enc_user = self.packet['user'].decode('base64')
aes = AES.new(self.enckey, AES.MODE_CBC, iv)
self.user = aes.decrypt(enc_user).replace('\x00', '')
print("[INFO] この試行のユーザーは{0}です。".format(self.user))
# エスケープ
self.user = MySQLdb.escape_string(self.user)
self.packet['id'] = int(self.packet['id'])
return True
#
# payloadが有効かを調べる
#
def check_payload(self):
# 最低限の情報が記載されているか
if 'lang' not in self.packet : return False
if 'code' not in self.packet : return False
if 'id' not in self.packet : return False
if 'iv' not in self.packet : return False
if 'user' not in self.packet : return False
# 言語が使用可能か
if 'compile' not in self.langlist : return False
if 'extension' not in self.langlist : return False
if self.packet['lang'] not in self.langlist['compile'] : return False
if self.packet['lang'] not in self.langlist['extension'] : return False
# データが正しい
return True
#
# ランダムな文字列を生成
#
def randstr(self, length):
import random
import string
return ''.join([
random.choice(string.ascii_letters + string.digits)
for i in range(length)
])
#
# リクエストを受ける
#
def procon(self, env, response):
path = env['PATH_INFO']
if path == "/":
return self.handle(env, response)
return
#
# サーバーを稼働させる
#
def run(self):
# サーバー初期化
server = pywsgi.WSGIServer(
(self.host, self.port),
self.procon,
handler_class = WebSocketHandler
)
# SQLへの接続
self.db = MySQLdb.connect(host = self.db_host,
db = self.db_name,
user = self.db_username,
passwd = self.db_password,
charset = 'utf8',
)
# サーバー稼働
server.serve_forever()
return
| ptr-yudai/JokenPC | server/JPC.py | Python | mit | 11,068 |
"""
download a file named filename from the atsc301 downloads directory
and save it as a local file with the same name.
command line example::
python -m a301utils.a301_readfile photon_data.csv
module example::
from a301utils.a301_readfile import download
download('photon_data.csv')
"""
import argparse
import requests
from pathlib import Path
import sys
import os
import shutil
def download(filename):
"""
copy file filename from http://clouds.eos.ubc.ca/~phil/courses/atsc301/downloads to
the local directory
Parameters
----------
filename: string
name of file to fetch from
Returns
-------
Side effect: Creates a copy of that file in the local directory
"""
url = 'https://clouds.eos.ubc.ca/~phil/courses/atsc301/downloads/{}'.format(filename)
filepath = Path('./{}'.format(filename))
if filepath.exists():
the_size = filepath.stat().st_size
print(('\n{} already exists\n'
'and is {} bytes\n'
'will not overwrite\n').format(filename,the_size))
return None
tempfile = str(filepath) + '_tmp'
temppath = Path(tempfile)
with open(tempfile, 'wb') as localfile:
response = requests.get(url, stream=True)
if not response.ok:
print('response: ',response)
raise Exception('Something is wrong, requests.get() failed with filename {}'.format(filename))
for block in response.iter_content(1024):
if not block:
break
localfile.write(block)
the_size=temppath.stat().st_size
if the_size < 10.e3:
print('Warning -- your file is tiny (smaller than 10 Kbyte)\nDid something go wrong?')
shutil.move(tempfile,filename)
the_size=filepath.stat().st_size
print('downloaded {}\nsize = {}'.format(filename,the_size))
return None
if __name__ == "__main__":
linebreaks=argparse.RawTextHelpFormatter
descrip=__doc__.lstrip()
parser = argparse.ArgumentParser(formatter_class=linebreaks,description=descrip)
parser.add_argument('filename',type=str,help='name of file to download')
args=parser.parse_args()
download(args.filename)
| a301-teaching/a301_code | a301utils/a301_readfile.py | Python | mit | 2,239 |
#!/usr/bin/env python3
import matplotlib.pyplot as plt
from math import sqrt
from math import log
dx = [1/sqrt(16), 1/sqrt(64), 1/sqrt(256), 1/sqrt(1024)]
dx_tri = [1/sqrt(32), 1/sqrt(128), 1/sqrt(512), 1/sqrt(2048)]
dx_pert = [0.0270466, 0.0134827, 0.00680914, 0.00367054]
dx_fp = [0.122799, 0.081584, 0.0445639, 0.0225922, 0.0113763]
fp_actual = 0.0441995
rl2_euler = [0.00059068, 0.000113051, 2.26156e-05, 5.11884e-06]
rl2_euler_tri = [0.00101603, 0.000277795, 6.37774e-05, 1.4947e-05]
rl2_euler_tri_pert = [0.00053851, 0.000121805, 2.67446e-05, 4.97857e-05]
rl2_euler_tri_limited = [0.00234712, 0.000548344, 0.000139978, 3.56414e-05]
rl2_euler_lp_tri_limited = [0.00242227, 0.000586065, 0.000140727]
rl2_euler_limited = [0.00187271, 0.000435096, 0.000120633, 2.90233e-05]
rl2_euler_lp_limited = [0.00180033, 0.000422567, 0.000120477, 2.90644e-05]
rl2_ns = [0.000576472, 0.000132735, 7.0506e-05, 6.67272e-05]
rl2_ns_fp = [abs(fp_actual - 0.008118), abs(fp_actual - 0.015667), abs(fp_actual - 0.026915), abs(fp_actual - 0.037524), abs(fp_actual - 0.042895)]
print("rho euler l2: "+str(log(rl2_euler[2]/rl2_euler[3])/log(dx[2]/dx[3])))
print("rho euler tri l2: "+str(log(rl2_euler_tri[2]/rl2_euler_tri[3])/log(dx_tri[2]/dx_tri[3])))
print("rho euler tri perturbed l2: "+str(log(rl2_euler_tri_pert[1]/rl2_euler_tri_pert[2])/log(dx_pert[1]/dx_pert[2])))
print("rho euler tri limited l2: "+str(log(rl2_euler_tri_limited[2]/rl2_euler_tri_limited[3])/log(dx_tri[2]/dx_tri[3])))
print("rho euler lp tri limited l2: "+str(log(rl2_euler_lp_tri_limited[1]/rl2_euler_lp_tri_limited[2])/log(dx_tri[1]/dx_tri[2])))
print("rho euler limited l2: "+str(log(rl2_euler_limited[2]/rl2_euler_limited[3])/log(dx[2]/dx[3])))
print("rho euler lp limited l2: "+str(log(rl2_euler_lp_limited[2]/rl2_euler_lp_limited[3])/log(dx[2]/dx[3])))
print("rho ns l2: "+str(log(rl2_ns[0]/rl2_ns[1])/log(dx[0]/dx[1])))
print("rho ns end l2: "+str(log(rl2_ns[2]/rl2_ns[3])/log(dx[2]/dx[3])))
print("rho ns fp l2: "+str(log(rl2_ns_fp[0]/rl2_ns_fp[1])/log(dx_fp[0]/dx_fp[1])))
print("rho ns fp end l2: "+str(log(rl2_ns_fp[3]/rl2_ns_fp[4])/log(dx_fp[3]/dx_fp[4])))
plt.figure()
hlines = plt.loglog(dx, rl2_euler, dx, rl2_ns, dx, rl2_euler_limited, dx, rl2_euler_lp_limited, dx_tri, rl2_euler_tri, dx_tri, rl2_euler_tri_limited, dx_pert[0:3], rl2_euler_tri_pert[0:3], dx_fp, rl2_ns_fp)
plt.rc('text', usetex=True)
plt.xlabel("Grid size")
plt.ylabel("$L_2$ error")
plt.legend(hlines, ["euler", "NS manufactured", "euler scalar limited", "euler lp limited", "euler tri", "euler tri limited", "euler tri pert", "NS flat plate"])
plt.grid(True,which="both")
plt.show()
| Rob-Rau/EbbCFD | ms_refinement/plot_conv.py | Python | mit | 2,727 |
import sublime, sublime_plugin
import os.path
import platform
def compare_file_names(x, y):
if platform.system() == 'Windows' or platform.system() == 'Darwin':
return x.lower() == y.lower()
else:
return x == y
class SwitchFileCommand(sublime_plugin.WindowCommand):
def run(self, extensions=[]):
if not self.window.active_view():
return
fname = self.window.active_view().file_name()
if not fname:
return
path = os.path.dirname(fname)
base, ext = os.path.splitext(fname)
start = 0
count = len(extensions)
if ext != "":
ext = ext[1:]
for i in range(0, len(extensions)):
if compare_file_names(extensions[i], ext):
start = i + 1
count -= 1
break
for i in range(0, count):
idx = (start + i) % len(extensions)
new_path = base + '.' + extensions[idx]
if os.path.exists(new_path):
self.window.open_file(new_path)
break
| koery/win-sublime | Data/Packages/Default/switch_file.py | Python | mit | 1,112 |
from __future__ import print_function
from datetime import datetime
import inspect
import json
import logging
import os
try:
import pkg_resources
except ImportError: # pragma: no cover
pkg_resources = None
import random
import re
import subprocess
import shutil
import string
import sys
import time
import boto3
import botocore
import climax
from lambda_uploader.package import build_package
from merry import Merry
import yaml
from . import plugins
from .cfn import get_cfn_template
from .helpers import render_template
merry = Merry(logger_name='slam', debug='unittest' in sys.modules)
f = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
h = logging.FileHandler('slam_error.log')
h.setFormatter(f)
merry.logger.addHandler(h)
@merry._try
@climax.group()
@climax.argument('--config-file', '-c', default='slam.yaml',
help='The slam configuration file. Defaults to slam.yaml.')
def main(config_file):
return {'config_file': config_file}
@merry._except(RuntimeError, ValueError)
def on_error(e): # pragma: no cover
"""Error handler
RuntimeError or ValueError exceptions raised by commands will be handled
by this function.
"""
exname = {'RuntimeError': 'Runtime error', 'Value Error': 'Value error'}
sys.stderr.write('{}: {}\n'.format(exname[e.__class__.__name__], str(e)))
sys.stderr.write('See file slam_error.log for additional details.\n')
sys.exit(1)
@merry._except(Exception)
def on_unexpected_error(e): # pragma: no cover
"""Catch-all error handler
Unexpected errors will be handled by this function.
"""
sys.stderr.write('Unexpected error: {} ({})\n'.format(
str(e), e.__class__.__name__))
sys.stderr.write('See file slam_error.log for additional details.\n')
sys.exit(1)
def _load_config(config_file='slam.yaml'):
try:
with open(config_file) as f:
return yaml.load(f, Loader=yaml.FullLoader)
except IOError:
# there is no config file in the current directory
raise RuntimeError('Config file {} not found. Did you run '
'"slam init"?'.format(config_file))
@main.command()
@climax.argument('--runtime', default=None,
help=('The Lambda runtime to use, such as python2.7 or '
'python3.6'))
@climax.argument('--requirements', default='requirements.txt',
help='The location of the project\'s requirements file.')
@climax.argument('--stages', default='dev',
help='Comma-separated list of stage environments to deploy.')
@climax.argument('--memory', type=int, default=128,
help=('The memory allocation for the lambda function in '
'megabytes.'))
@climax.argument('--timeout', type=int, default=10,
help='The timeout for the lambda function in seconds.')
@climax.argument('--bucket',
help='S3 bucket where lambda packages are stored.')
@climax.argument('--description', default='Deployed with slam.',
help='Description of the API.')
@climax.argument('--name',
help='API name.')
@climax.argument('function',
help='The function or callable to deploy, in the format '
'module:function.')
def init(name, description, bucket, timeout, memory, stages, requirements,
function, runtime, config_file, **kwargs):
"""Generate a configuration file."""
if os.path.exists(config_file):
raise RuntimeError('Please delete the old version {} if you want to '
'reconfigure your project.'.format(config_file))
module, app = function.split(':')
if not name:
name = module.replace('_', '-')
if not re.match('^[a-zA-Z][-a-zA-Z0-9]*$', name):
raise ValueError('The name {} is invalid, only letters, numbers and '
'dashes are allowed.'.format(name))
if not bucket:
random_suffix = ''.join(
random.choice(string.ascii_lowercase + string.digits)
for n in range(8))
bucket = '{}-{}'.format(name.lower(), random_suffix)
stages = [s.strip() for s in stages.split(',')]
if runtime is None:
if sys.version_info[0] == 2: # pragma: no cover
runtime = 'python2.7'
else:
runtime = 'python3.6'
# generate slam.yaml
template_file = os.path.join(os.path.dirname(__file__),
'templates/slam.yaml')
with open(template_file) as f:
template = f.read()
template = render_template(template, name=name, description=description,
module=module, app=app, bucket=bucket,
timeout=timeout, memory=memory,
requirements=requirements, stages=stages,
devstage=stages[0], runtime=runtime)
with open(config_file, 'wt') as f:
f.write(template)
# plugins
config = _load_config(config_file)
for name, plugin in plugins.items():
# write plugin documentation as a comment in config file
with open(config_file, 'at') as f:
f.write('\n\n# ' + (plugin.__doc__ or name).replace(
'\n', '\n# ') + '\n')
if hasattr(plugin, 'init'):
arguments = {k: v for k, v in kwargs.items()
if k in getattr(plugin.init, '_argnames', [])}
plugin_config = plugin.init.func(config=config, **arguments)
if plugin_config:
with open(config_file, 'at') as f:
yaml.dump({name: plugin_config}, f,
default_flow_style=False)
print('The configuration file for your project has been generated. '
'Remember to add {} to source control.'.format(config_file))
def _run_command(cmd):
try:
proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, err = proc.communicate()
except OSError:
raise RuntimeError('Invalid command {}'.format(cmd))
if proc.returncode != 0:
print(out)
raise(RuntimeError('Command failed with exit code {}.'.format(
proc.returncode)))
return out
def _run_lambda_function(event, context, app, config): # pragma: no cover
"""Run the function. This is the default when no plugins (such as wsgi)
define an alternative run function."""
args = event.get('args', [])
kwargs = event.get('kwargs', {})
# first attempt to invoke the function passing the lambda event and context
try:
ret = app(*args, event=event, context=context, **kwargs)
except TypeError:
# try again without passing the event and context
ret = app(*args, **kwargs)
return ret
def _generate_lambda_handler(config, output='.slam/handler.py'):
"""Generate a handler.py file for the lambda function start up."""
# Determine what the start up code is. The default is to just run the
# function, but it can be overriden by a plugin such as wsgi for a more
# elaborated way to run the function.
run_function = _run_lambda_function
for name, plugin in plugins.items():
if name in config and hasattr(plugin, 'run_lambda_function'):
run_function = plugin.run_lambda_function
run_code = ''.join(inspect.getsourcelines(run_function)[0][1:])
# generate handler.py
with open(os.path.join(os.path.dirname(__file__),
'templates/handler.py.template')) as f:
template = f.read()
template = render_template(template, module=config['function']['module'],
app=config['function']['app'],
run_lambda_function=run_code,
config_json=json.dumps(config,
separators=(',', ':')))
with open(output, 'wt') as f:
f.write(template + '\n')
def _build(config, rebuild_deps=False):
package = datetime.utcnow().strftime("lambda_package.%Y%m%d_%H%M%S.zip")
ignore = ['\\.slam\\/venv\\/.*$', '\\.pyc$']
if os.environ.get('VIRTUAL_ENV'):
# make sure the currently active virtualenv is not included in the pkg
venv = os.path.relpath(os.environ['VIRTUAL_ENV'], os.getcwd())
if not venv.startswith('.'):
ignore.append(venv.replace('/', '\\/') + '\\/.*$')
# create .slam directory if it doesn't exist yet
if not os.path.exists('.slam'):
os.mkdir('.slam')
_generate_lambda_handler(config)
# create or update virtualenv
if rebuild_deps:
if os.path.exists('.slam/venv'):
shutil.rmtree('.slam/venv')
if not os.path.exists('.slam/venv'):
_run_command('virtualenv .slam/venv')
_run_command('.slam/venv/bin/pip install -r ' + config['requirements'])
# build lambda package
build_package('.', config['requirements'], virtualenv='.slam/venv',
extra_files=['.slam/handler.py'], ignore=ignore,
zipfile_name=package)
# cleanup lambda uploader's temp directory
if os.path.exists('.lambda_uploader_temp'):
shutil.rmtree('.lambda_uploader_temp')
return package
def _get_aws_region(): # pragma: no cover
return boto3.session.Session().region_name
def _ensure_bucket_exists(s3, bucket, region): # pragma: no cover
try:
s3.head_bucket(Bucket=bucket)
except botocore.exceptions.ClientError:
if region != 'us-east-1':
s3.create_bucket(Bucket=bucket, CreateBucketConfiguration={
'LocationConstraint': region})
else:
s3.create_bucket(Bucket=bucket)
def _get_from_stack(stack, source, key):
value = None
if source + 's' not in stack:
raise ValueError('Invalid stack attribute' + str(stack))
for p in stack[source + 's']:
if p[source + 'Key'] == key:
value = p[source + 'Value']
break
return value
def _print_status(config):
cfn = boto3.client('cloudformation')
lmb = boto3.client('lambda')
try:
stack = cfn.describe_stacks(StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
print('{} has not been deployed yet.'.format(config['name']))
else:
print('{} is deployed!'.format(config['name']))
print(' Function name: {}'.format(
_get_from_stack(stack, 'Output', 'FunctionArn').split(':')[-1]))
print(' S3 bucket: {}'.format(config['aws']['s3_bucket']))
print(' Stages:')
stages = list(config['stage_environments'].keys())
stages.sort()
plugin_status = {}
for name, plugin in plugins.items():
if name in config and hasattr(plugin, 'status'):
statuses = plugin.status(config, stack)
if statuses:
for s, status in statuses.items():
plugin_status.setdefault(s, []).append(status)
for s in stages:
fd = None
try:
fd = lmb.get_function(FunctionName=_get_from_stack(
stack, 'Output', 'FunctionArn'), Qualifier=s)
except botocore.exceptions.ClientError: # pragma: no cover
continue
v = ':{}'.format(fd['Configuration']['Version'])
if s in plugin_status and len(plugin_status[s]) > 0:
print(' {}{}: {}'.format(s, v,
' '.join(plugin_status[s])))
else:
print(' {}{}'.format(s, v))
@main.command()
@climax.argument('--rebuild-deps', action='store_true',
help='Reinstall all dependencies.')
def build(rebuild_deps, config_file):
"""Build lambda package."""
config = _load_config(config_file)
print("Building lambda package...")
package = _build(config, rebuild_deps=rebuild_deps)
print("{} has been built successfully.".format(package))
@main.command()
@climax.argument('--stage',
help=('Stage to deploy to. Defaults to the stage designated '
'as the development stage'))
@climax.argument('--lambda-package',
help='Custom lambda zip package to deploy.')
@climax.argument('--no-lambda', action='store_true',
help='Do no deploy a new lambda.')
@climax.argument('--rebuild-deps', action='store_true',
help='Reinstall all dependencies.')
def deploy(stage, lambda_package, no_lambda, rebuild_deps, config_file):
"""Deploy the project to the development stage."""
config = _load_config(config_file)
if stage is None:
stage = config['devstage']
s3 = boto3.client('s3')
cfn = boto3.client('cloudformation')
region = _get_aws_region()
# obtain previous deployment if it exists
previous_deployment = None
try:
previous_deployment = cfn.describe_stacks(
StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
pass
# build lambda package if required
built_package = False
new_package = True
if lambda_package is None and not no_lambda:
print("Building lambda package...")
lambda_package = _build(config, rebuild_deps=rebuild_deps)
built_package = True
elif lambda_package is None:
# preserve package from previous deployment
new_package = False
lambda_package = _get_from_stack(previous_deployment, 'Parameter',
'LambdaS3Key')
# create S3 bucket if it doesn't exist yet
bucket = config['aws']['s3_bucket']
_ensure_bucket_exists(s3, bucket, region)
# upload lambda package to S3
if new_package:
s3.upload_file(lambda_package, bucket, lambda_package)
if built_package:
# we created the package, so now that is on S3 we can delete it
os.remove(lambda_package)
# prepare cloudformation template
template_body = get_cfn_template(config)
parameters = [
{'ParameterKey': 'LambdaS3Bucket', 'ParameterValue': bucket},
{'ParameterKey': 'LambdaS3Key', 'ParameterValue': lambda_package},
]
stages = list(config['stage_environments'].keys())
stages.sort()
for s in stages:
param = s.title() + 'Version'
if s != stage:
v = _get_from_stack(previous_deployment, 'Parameter', param) \
if previous_deployment else '$LATEST'
v = v or '$LATEST'
else:
v = '$LATEST'
parameters.append({'ParameterKey': param, 'ParameterValue': v})
# run the cloudformation template
if previous_deployment is None:
print('Deploying {}:{}...'.format(config['name'], stage))
cfn.create_stack(StackName=config['name'], TemplateBody=template_body,
Parameters=parameters,
Capabilities=['CAPABILITY_IAM'])
waiter = cfn.get_waiter('stack_create_complete')
else:
print('Updating {}:{}...'.format(config['name'], stage))
cfn.update_stack(StackName=config['name'], TemplateBody=template_body,
Parameters=parameters,
Capabilities=['CAPABILITY_IAM'])
waiter = cfn.get_waiter('stack_update_complete')
# wait for cloudformation to do its thing
try:
waiter.wait(StackName=config['name'])
except botocore.exceptions.ClientError:
# the update failed, so we remove the lambda package from S3
if built_package:
s3.delete_object(Bucket=bucket, Key=lambda_package)
raise
else:
if previous_deployment and new_package:
# the update succeeded, so it is safe to delete the lambda package
# used by the previous deployment
old_pkg = _get_from_stack(previous_deployment, 'Parameter',
'LambdaS3Key')
s3.delete_object(Bucket=bucket, Key=old_pkg)
# we are done, show status info and exit
_print_status(config)
@main.command()
@climax.argument('--version',
help=('Stage name or numeric version to publish. '
'Defaults to the development stage.'))
@climax.argument('stage', help='Stage to publish to.')
def publish(version, stage, config_file):
"""Publish a version of the project to a stage."""
config = _load_config(config_file)
cfn = boto3.client('cloudformation')
if version is None:
version = config['devstage']
elif version not in config['stage_environments'].keys() and \
not version.isdigit():
raise ValueError('Invalid version. Use a stage name or a numeric '
'version number.')
if version == stage:
raise ValueError('Cannot deploy a stage into itself.')
# obtain previous deployment
try:
previous_deployment = cfn.describe_stacks(
StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
raise RuntimeError('This project has not been deployed yet.')
# preserve package from previous deployment
bucket = _get_from_stack(previous_deployment, 'Parameter',
'LambdaS3Bucket')
lambda_package = _get_from_stack(previous_deployment, 'Parameter',
'LambdaS3Key')
# prepare cloudformation template
template_body = get_cfn_template(config)
parameters = [
{'ParameterKey': 'LambdaS3Bucket', 'ParameterValue': bucket},
{'ParameterKey': 'LambdaS3Key', 'ParameterValue': lambda_package},
]
stages = list(config['stage_environments'].keys())
stages.sort()
for s in stages:
param = s.title() + 'Version'
if s != stage:
v = _get_from_stack(previous_deployment, 'Parameter', param) \
if previous_deployment else '$LATEST'
v = v or '$LATEST'
else:
if version.isdigit():
# explicit version number
v = version
else:
# publish version from a stage
v = _get_from_stack(previous_deployment, 'Parameter',
version.title() + 'Version')
if v == '$LATEST':
# publish a new version from $LATEST
lmb = boto3.client('lambda')
v = lmb.publish_version(FunctionName=_get_from_stack(
previous_deployment, 'Output', 'FunctionArn'))[
'Version']
parameters.append({'ParameterKey': param, 'ParameterValue': v})
# run the cloudformation template
print('Publishing {}:{} to {}...'.format(config['name'], version, stage))
cfn.update_stack(StackName=config['name'], TemplateBody=template_body,
Parameters=parameters,
Capabilities=['CAPABILITY_IAM'])
waiter = cfn.get_waiter('stack_update_complete')
# wait for cloudformation to do its thing
try:
waiter.wait(StackName=config['name'])
except botocore.exceptions.ClientError:
raise
# we are done, show status info and exit
_print_status(config)
@main.command()
@climax.argument('args', nargs='*',
help='Input arguments for the function. Use arg=value for '
'strings, or arg:=value for integer, booleans or JSON '
'structures.')
@climax.argument('--dry-run', action='store_true',
help='Just check that the function can be invoked.')
@climax.argument('--nowait', action='store_true',
help='Invoke the function but don\'t wait for it to return.')
@climax.argument('--stage', help='Stage of the invoked function. Defaults to '
'the development stage')
def invoke(stage, nowait, dry_run, config_file, args):
"""Invoke the lambda function."""
config = _load_config(config_file)
if stage is None:
stage = config['devstage']
cfn = boto3.client('cloudformation')
lmb = boto3.client('lambda')
try:
stack = cfn.describe_stacks(StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
raise RuntimeError('This project has not been deployed yet.')
function = _get_from_stack(stack, 'Output', 'FunctionArn')
if dry_run:
invocation_type = 'DryRun'
elif nowait:
invocation_type = 'Event'
else:
invocation_type = 'RequestResponse'
# parse input arguments
data = {}
for arg in args:
s = arg.split('=', 1)
if len(s) != 2:
raise ValueError('Invalid argument ' + arg)
if s[0][-1] == ':':
# JSON argument
data[s[0][:-1]] = json.loads(s[1])
else:
# string argument
data[s[0]] = s[1]
rv = lmb.invoke(FunctionName=function, InvocationType=invocation_type,
Qualifier=stage,
Payload=json.dumps({'kwargs': data}, sort_keys=True))
if rv['StatusCode'] != 200 and rv['StatusCode'] != 202:
raise RuntimeError('Unexpected error. Status code = {}.'.format(
rv['StatusCode']))
if invocation_type == 'RequestResponse':
payload = json.loads(rv['Payload'].read().decode('utf-8'))
if 'FunctionError' in rv:
if 'stackTrace' in payload:
print('Traceback (most recent call last):')
for frame in payload['stackTrace']:
print(' File "{}", line {}, in {}'.format(
frame[0], frame[1], frame[2]))
print(' ' + frame[3])
print('{}: {}'.format(payload['errorType'],
payload['errorMessage']))
else:
raise RuntimeError('Unknown error')
else:
print(str(payload))
@main.command()
@climax.argument('--no-logs', action='store_true', help='Do not delete logs.')
def delete(no_logs, config_file):
"""Delete the project."""
config = _load_config(config_file)
s3 = boto3.client('s3')
cfn = boto3.client('cloudformation')
logs = boto3.client('logs')
try:
stack = cfn.describe_stacks(StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
raise RuntimeError('This project has not been deployed yet.')
bucket = _get_from_stack(stack, 'Parameter', 'LambdaS3Bucket')
lambda_package = _get_from_stack(stack, 'Parameter', 'LambdaS3Key')
function = _get_from_stack(stack, 'Output', 'FunctionArn').split(':')[-1]
api_id = _get_from_stack(stack, 'Output', 'ApiId')
if api_id:
log_groups = ['API-Gateway-Execution-Logs_' + api_id + '/' + stage
for stage in config['stage_environments'].keys()]
else:
log_groups = []
log_groups.append('/aws/lambda/' + function)
print('Deleting {}...'.format(config['name']))
cfn.delete_stack(StackName=config['name'])
waiter = cfn.get_waiter('stack_delete_complete')
waiter.wait(StackName=config['name'])
if not no_logs:
print('Deleting logs...')
for log_group in log_groups:
try:
logs.delete_log_group(logGroupName=log_group)
except botocore.exceptions.ClientError:
print(' Log group {} could not be deleted.'.format(log_group))
print('Deleting files...')
try:
s3.delete_object(Bucket=bucket, Key=lambda_package)
s3.delete_bucket(Bucket=bucket)
except botocore.exceptions.ClientError:
print(' S3 bucket {} could not be deleted.'.format(bucket))
@main.command()
def status(config_file):
"""Show deployment status for the project."""
config = _load_config(config_file)
_print_status(config)
@main.command()
@climax.argument('--tail', '-t', action='store_true',
help='Tail the log stream')
@climax.argument('--period', '-p', default='1m',
help=('How far back to start, in weeks (1w), days (2d), '
'hours (3h), minutes (4m) or seconds (5s). Default '
'is 1m.'))
@climax.argument('--stage',
help=('Stage to show logs for. Defaults to the stage '
'designated as the development stage'))
def logs(stage, period, tail, config_file):
"""Dump logs to the console."""
config = _load_config(config_file)
if stage is None:
stage = config['devstage']
cfn = boto3.client('cloudformation')
try:
stack = cfn.describe_stacks(StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
print('{} has not been deployed yet.'.format(config['name']))
return
function = _get_from_stack(stack, 'Output', 'FunctionArn').split(':')[-1]
version = _get_from_stack(stack, 'Parameter', stage.title() + 'Version')
api_id = _get_from_stack(stack, 'Output', 'ApiId')
try:
start = float(period[:-1])
except ValueError:
raise ValueError('Invalid period ' + period)
if period[-1] == 's':
start = time.time() - start
elif period[-1] == 'm':
start = time.time() - start * 60
elif period[-1] == 'h':
start = time.time() - start * 60 * 60
elif period[-1] == 'd':
start = time.time() - start * 60 * 60 * 24
elif period[-1] == 'w':
start = time.time() - start * 60 * 60 * 24 * 7
else:
raise ValueError('Invalid period ' + period)
start = int(start * 1000)
logs = boto3.client('logs')
lambda_log_group = '/aws/lambda/' + function
log_groups = [lambda_log_group]
if api_id:
log_groups.append('API-Gateway-Execution-Logs_' + api_id + '/' + stage)
log_version = '[' + version + ']'
log_start = {g: start for g in log_groups}
while True:
kwargs = {}
events = []
for log_group in log_groups:
while True:
try:
filtered_logs = logs.filter_log_events(
logGroupName=log_group,
startTime=log_start[log_group],
interleaved=True, **kwargs)
except botocore.exceptions.ClientError:
# the log group does not exist yet
filtered_logs = {'events': []}
if log_group == lambda_log_group:
events += [ev for ev in filtered_logs['events']
if log_version in ev['logStreamName']]
else:
events += filtered_logs['events']
if len(filtered_logs['events']):
log_start[log_group] = \
filtered_logs['events'][-1]['timestamp'] + 1
if 'nextToken' not in filtered_logs:
break
kwargs['nextToken'] = filtered_logs['nextToken']
events.sort(key=lambda ev: ev['timestamp'])
for ev in events:
tm = datetime.fromtimestamp(ev['timestamp'] / 1000)
print(tm.strftime('%b %d %X ') + ev['message'].strip())
if not tail:
break
time.sleep(5)
@main.command()
def template(config_file):
"""Print the default Cloudformation deployment template."""
config = _load_config(config_file)
print(get_cfn_template(config, pretty=True))
def register_plugins():
"""find any installed plugins and register them."""
if pkg_resources: # pragma: no cover
for ep in pkg_resources.iter_entry_points('slam_plugins'):
plugin = ep.load()
# add any init options to the main init command
if hasattr(plugin, 'init') and hasattr(plugin.init, '_arguments'):
for arg in plugin.init._arguments:
init.parser.add_argument(*arg[0], **arg[1])
init._arguments += plugin.init._arguments
init._argnames += plugin.init._argnames
plugins[ep.name] = plugin
register_plugins() # pragma: no cover
| miguelgrinberg/slam | slam/cli.py | Python | mit | 28,330 |
import sys
import mechanize
import re
import json
import time
import urllib
import dogcatcher
import HTMLParser
import os
h = HTMLParser.HTMLParser()
cdir = os.path.dirname(os.path.abspath(__file__)) + "/"
tmpdir = cdir + "tmp/"
voter_state = "SC"
source = "State"
result = [("authory_name", "first_name", "last_name", "county_name", "fips",
"street", "city", "address_state", "zip_code",
"po_street", "po_city", "po_state", "po_zip_code",
"reg_authority_name", "reg_first", "reg_last",
"reg_street", "reg_city", "reg_state", "reg_zip_code",
"reg_po_street", "reg_po_city", "reg_po_state", "reg_po_zip_code",
"reg_phone", "reg_fax", "reg_email", "reg_website", "reg_hours",
"phone", "fax", "email", "website", "hours", "voter_state", "source", "review")]
#Every county is on a different webpage so we have to cycle through them all.
#To do so, we go elsewhere, extract a list of counties, then later grab a series of web pages based on that list.
#(Writing it to a file isn't strictly necessary, but saves some time down the line.)
file_path = tmpdir + "south_carolina-counties.html"
url = "http://www.scvotes.org/how_to_register_absentee_voting"
data = urllib.urlopen(url).read()
output = open(file_path,"w")
output.write(data)
output.close()
data = open(file_path).read()
#First, we trim the counties page to the minimum needed information, which starts at the list of per-county links.
data = data.partition("<a href=\"/how_to_register_absentee_voting/abbeville\" class=\"page-next\"")[0]
#For each county, we grab a URL ender (county_links) and the county name, as represented in the URL (county_links_names).
county_link_re = re.compile("(/how_to_register_absentee_voting/.+?)\">")
county_link_name_re = re.compile("/how_to_register_absentee_voting/(.+?)\">")
county_links = county_link_re.findall(data)
county_link_names = county_link_name_re.findall(data)
#Once we have those in place, we start setting up regexes that are used in cleaning individual counties.
county_name_re = re.compile(">([^<>]+? County) .+?<[pbr /]>")
relevant_re = re.compile("(<div class=\"content.+?)<!-- end content", re.DOTALL)
phone_re =re.compile(">[^x]*?(\(*\d{3}\)*[ -]*\d{3}-.+?)[<F]")
phone_format_re = re.compile("(\(*\d{3}\)* *\d{3}-\d{4})")
area_code_re = re.compile("\(\d{3}\) ")
digit_re = re.compile("\d")
fax_re = re.compile("Fax.+?(\(*\d{3}\)*.+?)<")
official_name_1_re = re.compile("Director[</u>]* *[:-] *([A-Za-z\. -]+).+?<")
official_name_2_re = re.compile("<[br /p]*>([A-Za-z\. -]+?)<[^<>]*><[^<>]*>[Email: ]*<a href=\"mailto:")
official_name_3_re = re.compile("<[br /p]*>([A-Za-z\. -]+?)<[^<>]*><[^<>]*><[^<>]*><a href=\"mailto:")
official_name_4_re = re.compile("<[br /p]*>([A-Za-z\. -]+?)<[^<>]*><[^<>]*><[^<>]*><a href=\"/files")
official_name_5_re = re.compile(">([A-Za-z\. -]+?), [^<>]*?Director")
official_name_6_re = re.compile("Fax .+?<[^<>]*><[^<>]*>([A-Za-z\. -]+?)<")
website_re = re.compile("a href=\"(h.+?)\"")
#email_re = re.compile("mailto:%*2*0*(.+?) *\".*?>")
email_re = re.compile("[A-Za-z\.-]+?@[A-Za-z\.-]+")
email_junk_re = re.compile("@[^<>]+?\.[cg]o[mv](.*?)<")
font_re = re.compile("</*font.+?>")
style_re = re.compile("(style.+?\")>")
span_re = re.compile("</*span.+?>")
w_re = re.compile("</*w:.+?>")
u_re = re.compile("</*u>")
m_re = re.compile("</*m:.+?>")
set_re = re.compile("{.+?}")
comment_re = re.compile("<!--.+?>")
charleston_re = re.compile(" [A-Z][A-Z](.+?)\d{5}[\d-]*")
richland_fix_re = re.compile("Military and Overseas Correspondence.+?</a>")
address_re = re.compile("<[br p/]*>([^<>]*\d[^>]+?<.+?\d{5}[\d-]*) *<[brp/ ]*>")
csz_re = re.compile("[\d>] *([A-Za-z \.]+?,* [A-Z][A-Z] +\d{5}[\d-]*)")
po_re = re.compile("(P*o*s*t* *Of*i*c*e* .+?)<")
city_re = re.compile("(.+?),* [A-Z][A-Z] ")
state_re = re.compile(" ([A-Z][A-Z]) ")
zip_re = re.compile("\d{5}[\d-]*")
zip_mod_re = re.compile("\(\d{5}[\d-]*\)")
mailing_region_re = re.compile("Mailing Address.+?[A-Z][A-Z] \d{5}[\d-]* *<[brp/ ]*>")
for link in county_links:
authority_name, first_name, last_name, county_name, town_name, fips, street, city, address_state, zip_code, po_street, po_city, po_state, po_zip_code, reg_authority_name, reg_first, reg_last, reg_street, reg_city, reg_state, reg_zip_code, reg_po_street, reg_po_city, reg_po_state, reg_po_zip_code, reg_phone, reg_fax, reg_email, reg_website, reg_hours, phone, fax, email, website, hours, review = dogcatcher.begin(voter_state)
link_name = county_link_names[county_links.index(link)]
file_name = tmpdir + link_name + "-sc-clerks.html"
url = "http://www.scvotes.org" + link
data = urllib.urlopen(url).read()
output = open(file_name,"w")
output.write(data)
output.close()
county = open(file_name).read()
#Trimming the county.
county = relevant_re.findall(county)[0]
#There are a tremendous number of useless HTML tags or county-specific fixes. This code cleans them up so we don't have to deal with them elsewhere.
for junk in email_junk_re.findall(county):
county = county.replace(junk,"")
for font in font_re.findall(county):
county = county.replace(font,"")
for style in style_re.findall(county):
county = county.replace(style,"")
for span in span_re.findall(county):
county = county.replace(span,"")
for w in w_re.findall(county):
county = county.replace(w,"")
for u in u_re.findall(county):
county = county.replace(u,"")
for m in m_re.findall(county):
county = county.replace(m,"")
for comment in comment_re.findall(county):
county = county.replace(comment,"")
for s in set_re.findall(county):
county = county.replace(s,"")
for item in charleston_re.findall(county):
county = county.replace(item," ")
for item in richland_fix_re.findall(county):
county = county.replace(item," ")
#fixing errors in Dillon, Florence, and Newberry Counties
county = county.replace("sedwardsvr17","<a href=\"mailto:[email protected]\"").replace("%3",":").replace("%40","@").replace("brogers","<a href=\"mailto:[email protected]\"")
county_name = county_name_re.findall(county)[0].replace(" County","").strip()
print "__________________________________"
#unique case in Aiken County:
if county_name == "Aiken County":
reg_email = "[email protected]"
county.replace("[email protected]","")
phone = dogcatcher.find_phone(phone_re, county)
for item in phone_re.findall(county):
county = county.replace(item, "")
#Many of the fax numbers don't have area codes. So we grab the first area code we find in the block of phone numbers and give it to the fax number.
area_code = area_code_re.findall(phone)[0]
fax = dogcatcher.find_phone(fax_re, county, area_code)
for item in fax_re.findall(county):
county = county.replace(item, "")
county = county.replace("Fax", "")
#unique case in Greenwood County, which gives a separate phone number for registration-related contacts:
if county_name == "Greenwood County":
phone = "(864) 942-3152, (864) 942-3153, (864) 942-5667"
fax = "(804) 942-5664"
county = county.replace(phone,"").replace(fax,"")
reg_phone = "(864) 942-8585"
county.replace("(864) 942-8585","")
reg_fax = "(846) 942-5664"
county.replace("942-5664","")
#Some counties have a registration-only email address. In those counties, the absentee email has "absentee" in it.
#Websites have similar problems
print county
email = dogcatcher.find_emails(email_re, county)
if "absentee" in email:
emails = email.split(", ")
email = ""
for item in emails:
county = county.replace(item, "")
if "absentee" in item:
email = email + ", " + item
else:
reg_email = reg_email + ", " + item
email = email.strip(", ")
reg_email = reg_email.strip(", ")
else:
for item in email_re.findall(county):
county = county.replace(item, "")
website = dogcatcher.find_website(website_re, county)
if "absentee" in website:
websites = website.split(", ")
website = ""
for item in websites:
county = county.replace(item, "")
if "absentee" in item:
website = website + ", " + item
else:
reg_website = reg_website + ", " + item
else:
for item in website_re.findall(county):
county = county.replace(item, "")
website = website.strip(", ")
reg_website = reg_website.strip(", ")
print [email]
#There are many forms the official's name can take. This tries all of them.
if official_name_1_re.findall(county):
official_name = official_name_1_re.findall(county)[0].strip()
elif official_name_2_re.findall(county):
official_name = official_name_2_re.findall(county)[0].strip()
elif official_name_3_re.findall(county):
official_name = official_name_3_re.findall(county)[0].strip()
elif official_name_4_re.findall(county):
official_name = official_name_4_re.findall(county)[0].strip()
elif official_name_5_re.findall(county):
official_name = official_name_5_re.findall(county)[0].strip()
elif official_name_6_re.findall(county):
official_name = official_name_6_re.findall(county)[0].strip()
else:
official_name = ""
if official_name:
first_name, last_name, review = dogcatcher.split_name(official_name, review)
county = county.replace(official_name,"")
print "++++++++++++++++++++++++++++++++++++++"
if county_name == "Charleston County":
county = county.replace("Post Office","Mailing Address:<> Post Office")
#Some counties don't put a marked "Mailing Address" section, but do have a separate mailing address.
#So first, we check whether the county has "Mailing Address" in it.
if "Mailing Address" not in county:
#This section finds the full address. After finding the address, it identifies a city/state/zip (csz) combination and a PO Box number if that exists.
#It removes both the CSZ and the PO Address (if it exists) from the full address, leaving behind a street address with some garbage.
#It then cleans up the street address and pulls the city, state, and zip out of the csz, and assigns them as appropriate to the street address and state.
address = address_re.findall(county)[0]
csz = csz_re.findall(address)[0]
address = address.replace(csz,"")
try:
po_street = po_re.findall(address)[0].replace("</b><p>","")
except:
po_street = ""
street = address.replace(po_street,"").replace(csz,"").replace("</b><p>","")
street = street.replace("<p>",", ").replace("</p>",", ").replace("<br />",", ").replace(",,",", ").replace(" ,",",").replace(",,",", ").replace(", , ",", ").strip(" /,")
if po_street:
po_city = city_re.findall(csz)[0]
po_state = state_re.findall(csz)[0]
po_zip_code = zip_re.findall(csz)[0]
if street:
city = city_re.findall(csz)[0]
address_state = state_re.findall(csz)[0]
zip_code = zip_re.findall(csz)[0]
else:
#If there's an explicitly stated mailing address, we find it, and then pull the mailing address out of it.
#At the same time, we cut the mailing address out of the entire county and find a physical address in what's left of the county.
#We then clean both of those addresses appropriately.
mailing_region = mailing_region_re.findall(county)[0]
county = county.replace(mailing_region,"")
mailing_addresss = address_re.findall(mailing_region)[0]
po_street = po_re.findall(mailing_addresss)[0]
csz = csz_re.findall(mailing_addresss)[0]
po_city = city_re.findall(csz)[0]
po_state = state_re.findall(csz)[0]
po_zip_code = zip_re.findall(csz)[0]
address = address_re.findall(county)[0]
csz = csz_re.findall(address)[0]
street = address.replace(csz,"").replace("</b><p>","")
street = street.replace("<p>",", ").replace("</p>",", ").replace("<br />",", ").replace(",,",", ").replace(" ,",",").replace(",,",", ").replace(", , ",", ").strip(" /,")
city = city_re.findall(csz)[0]
address_state = state_re.findall(csz)[0]
zip_code = zip_re.findall(csz)[0]
#Some of the addresses have a more detailed zip code appended to the street address or po_street.
#This checks for that, reassigns the and removes it if it appears.
if zip_mod_re.findall(street):
zip_code = zip_mod_re.findall(street)[0].strip("()")
street = street.replace(zip_code,"").strip(" ()")
if zip_mod_re.findall(po_street):
po_zip_code = zip_mod_re.findall(po_street)[0].strip("()")
po_street = po_street.replace(zip_code,"").strip(" ()")
fips = dogcatcher.find_fips(county_name, voter_state)
result.append([authority_name, first_name, last_name, county_name, fips,
street, city, address_state, zip_code,
po_street, po_city, po_state, po_zip_code,
reg_authority_name, reg_first, reg_last,
reg_street, reg_city, reg_state, reg_zip_code,
reg_po_street, reg_po_city, reg_po_state, reg_po_zip_code,
reg_phone, reg_fax, reg_email, reg_website, reg_hours,
phone, fax, email, website, hours, voter_state, source, review])
#This outputs the results to a separate text file.
dogcatcher.output(result, voter_state, cdir)
| democracyworks/dog-catcher | south_carolina.py | Python | mit | 12,842 |
"""The tests for the MQTT cover platform."""
from unittest.mock import patch
import pytest
from homeassistant.components import cover
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_CURRENT_TILT_POSITION,
ATTR_POSITION,
ATTR_TILT_POSITION,
)
from homeassistant.components.mqtt import CONF_STATE_TOPIC
from homeassistant.components.mqtt.cover import (
CONF_GET_POSITION_TEMPLATE,
CONF_GET_POSITION_TOPIC,
CONF_SET_POSITION_TEMPLATE,
CONF_SET_POSITION_TOPIC,
CONF_TILT_COMMAND_TEMPLATE,
CONF_TILT_COMMAND_TOPIC,
CONF_TILT_STATUS_TEMPLATE,
CONF_TILT_STATUS_TOPIC,
MQTT_COVER_ATTRIBUTES_BLOCKED,
MqttCover,
)
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_ENTITY_ID,
CONF_VALUE_TEMPLATE,
SERVICE_CLOSE_COVER,
SERVICE_CLOSE_COVER_TILT,
SERVICE_OPEN_COVER,
SERVICE_OPEN_COVER_TILT,
SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION,
SERVICE_STOP_COVER,
SERVICE_TOGGLE,
SERVICE_TOGGLE_COVER_TILT,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
STATE_UNKNOWN,
)
from homeassistant.setup import async_setup_component
from .test_common import (
help_test_availability_when_connection_lost,
help_test_availability_without_topic,
help_test_custom_availability_payload,
help_test_default_availability_payload,
help_test_discovery_broken,
help_test_discovery_removal,
help_test_discovery_update,
help_test_discovery_update_attr,
help_test_discovery_update_unchanged,
help_test_encoding_subscribable_topics,
help_test_entity_debug_info_message,
help_test_entity_device_info_remove,
help_test_entity_device_info_update,
help_test_entity_device_info_with_connection,
help_test_entity_device_info_with_identifier,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
help_test_publishing_with_custom_encoding,
help_test_reloadable,
help_test_reloadable_late,
help_test_setting_attribute_via_mqtt_json_message,
help_test_setting_attribute_with_template,
help_test_setting_blocked_attribute_via_mqtt_json_message,
help_test_unique_id,
help_test_update_with_json_attrs_bad_JSON,
help_test_update_with_json_attrs_not_dict,
)
from tests.common import async_fire_mqtt_message
DEFAULT_CONFIG = {
cover.DOMAIN: {"platform": "mqtt", "name": "test", "state_topic": "test-topic"}
}
async def test_state_via_state_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", STATE_CLOSED)
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async_fire_mqtt_message(hass, "state-topic", STATE_OPEN)
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async def test_opening_and_closing_state_via_custom_state_payload(hass, mqtt_mock):
"""Test the controlling opening and closing state via a custom payload."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"state_opening": "34",
"state_closing": "--43",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "34")
state = hass.states.get("cover.test")
assert state.state == STATE_OPENING
async_fire_mqtt_message(hass, "state-topic", "--43")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSING
async_fire_mqtt_message(hass, "state-topic", STATE_CLOSED)
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async def test_open_closed_state_from_position_optimistic(hass, mqtt_mock):
"""Test the state after setting the position using optimistic mode."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "position-topic",
"set_position_topic": "set-position-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"optimistic": True,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_POSITION: 0},
blocking=True,
)
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
assert state.attributes.get(ATTR_ASSUMED_STATE)
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_POSITION: 100},
blocking=True,
)
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
assert state.attributes.get(ATTR_ASSUMED_STATE)
async def test_position_via_position_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "get-position-topic",
"position_open": 100,
"position_closed": 0,
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "get-position-topic", "0")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async_fire_mqtt_message(hass, "get-position-topic", "100")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async def test_state_via_template(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"value_template": "\
{% if (value | multiply(0.01) | int) == 0 %}\
closed\
{% else %}\
open\
{% endif %}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "state-topic", "10000")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async_fire_mqtt_message(hass, "state-topic", "99")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async def test_state_via_template_and_entity_id(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"value_template": '\
{% if value == "open" or value == "closed" %}\
{{ value }}\
{% else %}\
{{ states(entity_id) }}\
{% endif %}',
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "state-topic", "open")
async_fire_mqtt_message(hass, "state-topic", "invalid")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async_fire_mqtt_message(hass, "state-topic", "closed")
async_fire_mqtt_message(hass, "state-topic", "invalid")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async def test_state_via_template_with_json_value(hass, mqtt_mock, caplog):
"""Test the controlling state via topic with JSON value."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"value_template": "{{ value_json.Var1 }}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "state-topic", '{ "Var1": "open", "Var2": "other" }')
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async_fire_mqtt_message(
hass, "state-topic", '{ "Var1": "closed", "Var2": "other" }'
)
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async_fire_mqtt_message(hass, "state-topic", '{ "Var2": "other" }')
assert (
"Template variable warning: 'dict object' has no attribute 'Var1' when rendering"
) in caplog.text
async def test_position_via_template_and_entity_id(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "get-position-topic",
"command_topic": "command-topic",
"qos": 0,
"position_template": '\
{% if state_attr(entity_id, "current_position") == None %}\
{{ value }}\
{% else %}\
{{ state_attr(entity_id, "current_position") + value | int }}\
{% endif %}',
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "get-position-topic", "10")
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position == 10
async_fire_mqtt_message(hass, "get-position-topic", "10")
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position == 20
@pytest.mark.parametrize(
"config, assumed_state",
[
({"command_topic": "abc"}, True),
({"command_topic": "abc", "state_topic": "abc"}, False),
# ({"set_position_topic": "abc"}, True), - not a valid configuration
({"set_position_topic": "abc", "position_topic": "abc"}, False),
({"tilt_command_topic": "abc"}, True),
({"tilt_command_topic": "abc", "tilt_status_topic": "abc"}, False),
],
)
async def test_optimistic_flag(hass, mqtt_mock, config, assumed_state):
"""Test assumed_state is set correctly."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{cover.DOMAIN: {**config, "platform": "mqtt", "name": "test", "qos": 0}},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
if assumed_state:
assert ATTR_ASSUMED_STATE in state.attributes
else:
assert ATTR_ASSUMED_STATE not in state.attributes
async def test_optimistic_state_change(hass, mqtt_mock):
"""Test changing state optimistically."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"qos": 0,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
assert state.attributes.get(ATTR_ASSUMED_STATE)
await hass.services.async_call(
cover.DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
await hass.services.async_call(
cover.DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
await hass.services.async_call(
cover.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
await hass.services.async_call(
cover.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False)
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async def test_optimistic_state_change_with_position(hass, mqtt_mock):
"""Test changing state optimistically."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"optimistic": True,
"command_topic": "command-topic",
"position_topic": "position-topic",
"qos": 0,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
assert state.attributes.get(ATTR_ASSUMED_STATE)
assert state.attributes.get(ATTR_CURRENT_POSITION) is None
await hass.services.async_call(
cover.DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
assert state.attributes.get(ATTR_CURRENT_POSITION) == 100
await hass.services.async_call(
cover.DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
assert state.attributes.get(ATTR_CURRENT_POSITION) == 0
await hass.services.async_call(
cover.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
assert state.attributes.get(ATTR_CURRENT_POSITION) == 100
await hass.services.async_call(
cover.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False)
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
assert state.attributes.get(ATTR_CURRENT_POSITION) == 0
async def test_send_open_cover_command(hass, mqtt_mock):
"""Test the sending of open_cover."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 2,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
await hass.services.async_call(
cover.DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 2, False)
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
async def test_send_close_cover_command(hass, mqtt_mock):
"""Test the sending of close_cover."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 2,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
await hass.services.async_call(
cover.DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 2, False)
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
async def test_send_stop__cover_command(hass, mqtt_mock):
"""Test the sending of stop_cover."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 2,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
await hass.services.async_call(
cover.DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "STOP", 2, False)
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
async def test_current_cover_position(hass, mqtt_mock):
"""Test the current cover position."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "get-position-topic",
"command_topic": "command-topic",
"position_open": 100,
"position_closed": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
state_attributes_dict = hass.states.get("cover.test").attributes
assert ATTR_CURRENT_POSITION not in state_attributes_dict
assert ATTR_CURRENT_TILT_POSITION not in state_attributes_dict
assert 4 & hass.states.get("cover.test").attributes["supported_features"] != 4
async_fire_mqtt_message(hass, "get-position-topic", "0")
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position == 0
async_fire_mqtt_message(hass, "get-position-topic", "50")
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position == 50
async_fire_mqtt_message(hass, "get-position-topic", "non-numeric")
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position == 50
async_fire_mqtt_message(hass, "get-position-topic", "101")
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position == 100
async def test_current_cover_position_inverted(hass, mqtt_mock):
"""Test the current cover position."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "get-position-topic",
"command_topic": "command-topic",
"position_open": 0,
"position_closed": 100,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
state_attributes_dict = hass.states.get("cover.test").attributes
assert ATTR_CURRENT_POSITION not in state_attributes_dict
assert ATTR_CURRENT_TILT_POSITION not in state_attributes_dict
assert 4 & hass.states.get("cover.test").attributes["supported_features"] != 4
async_fire_mqtt_message(hass, "get-position-topic", "100")
current_percentage_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_percentage_cover_position == 0
assert hass.states.get("cover.test").state == STATE_CLOSED
async_fire_mqtt_message(hass, "get-position-topic", "0")
current_percentage_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_percentage_cover_position == 100
assert hass.states.get("cover.test").state == STATE_OPEN
async_fire_mqtt_message(hass, "get-position-topic", "50")
current_percentage_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_percentage_cover_position == 50
assert hass.states.get("cover.test").state == STATE_OPEN
async_fire_mqtt_message(hass, "get-position-topic", "non-numeric")
current_percentage_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_percentage_cover_position == 50
assert hass.states.get("cover.test").state == STATE_OPEN
async_fire_mqtt_message(hass, "get-position-topic", "101")
current_percentage_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_percentage_cover_position == 0
assert hass.states.get("cover.test").state == STATE_CLOSED
async def test_optimistic_position(hass, mqtt_mock):
"""Test optimistic position is not supported."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state is None
async def test_position_update(hass, mqtt_mock):
"""Test cover position update from received MQTT message."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "get-position-topic",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
"position_open": 100,
"position_closed": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
state_attributes_dict = hass.states.get("cover.test").attributes
assert ATTR_CURRENT_POSITION not in state_attributes_dict
assert ATTR_CURRENT_TILT_POSITION not in state_attributes_dict
assert 4 & hass.states.get("cover.test").attributes["supported_features"] == 4
async_fire_mqtt_message(hass, "get-position-topic", "22")
state_attributes_dict = hass.states.get("cover.test").attributes
assert ATTR_CURRENT_POSITION in state_attributes_dict
assert ATTR_CURRENT_TILT_POSITION not in state_attributes_dict
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position == 22
@pytest.mark.parametrize(
"pos_template,pos_call,pos_message",
[("{{position-1}}", 43, "42"), ("{{100-62}}", 100, "38")],
)
async def test_set_position_templated(
hass, mqtt_mock, pos_template, pos_call, pos_message
):
"""Test setting cover position via template."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "get-position-topic",
"command_topic": "command-topic",
"position_open": 100,
"position_closed": 0,
"set_position_topic": "set-position-topic",
"set_position_template": pos_template,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_POSITION: pos_call},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"set-position-topic", pos_message, 0, False
)
async def test_set_position_templated_and_attributes(hass, mqtt_mock):
"""Test setting cover position via template and using entities attributes."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "get-position-topic",
"command_topic": "command-topic",
"position_open": 100,
"position_closed": 0,
"set_position_topic": "set-position-topic",
"set_position_template": '\
{% if position > 99 %}\
{% if state_attr(entity_id, "current_position") == None %}\
{{ 5 }}\
{% else %}\
{{ 23 }}\
{% endif %}\
{% else %}\
{{ 42 }}\
{% endif %}',
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_POSITION: 100},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with("set-position-topic", "5", 0, False)
async def test_set_tilt_templated(hass, mqtt_mock):
"""Test setting cover tilt position via template."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "get-position-topic",
"command_topic": "command-topic",
"tilt_command_topic": "tilt-command-topic",
"position_open": 100,
"position_closed": 0,
"set_position_topic": "set-position-topic",
"set_position_template": "{{position-1}}",
"tilt_command_template": "{{tilt_position+1}}",
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_TILT_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_TILT_POSITION: 41},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "42", 0, False
)
async def test_set_tilt_templated_and_attributes(hass, mqtt_mock):
"""Test setting cover tilt position via template and using entities attributes."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "get-position-topic",
"command_topic": "command-topic",
"tilt_command_topic": "tilt-command-topic",
"position_open": 100,
"position_closed": 0,
"set_position_topic": "set-position-topic",
"set_position_template": "{{position-1}}",
"tilt_command_template": "{"
'"enitity_id": "{{ entity_id }}",'
'"value": {{ value }},'
'"tilt_position": {{ tilt_position }}'
"}",
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_TILT_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_TILT_POSITION: 45},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic",
'{"enitity_id": "cover.test","value": 45,"tilt_position": 45}',
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_OPEN_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic",
'{"enitity_id": "cover.test","value": 100,"tilt_position": 100}',
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_CLOSE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic",
'{"enitity_id": "cover.test","value": 0,"tilt_position": 0}',
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_TOGGLE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic",
'{"enitity_id": "cover.test","value": 100,"tilt_position": 100}',
0,
False,
)
async def test_set_position_untemplated(hass, mqtt_mock):
"""Test setting cover position via template."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "state-topic",
"command_topic": "command-topic",
"set_position_topic": "position-topic",
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_POSITION: 62},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with("position-topic", "62", 0, False)
async def test_set_position_untemplated_custom_percentage_range(hass, mqtt_mock):
"""Test setting cover position via template."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "state-topic",
"command_topic": "command-topic",
"set_position_topic": "position-topic",
"position_open": 0,
"position_closed": 100,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_POSITION: 38},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with("position-topic", "62", 0, False)
async def test_no_command_topic(hass, mqtt_mock):
"""Test with no command topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command",
"tilt_status_topic": "tilt-status",
}
},
)
await hass.async_block_till_done()
assert hass.states.get("cover.test").attributes["supported_features"] == 240
async def test_no_payload_close(hass, mqtt_mock):
"""Test with no close payload."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": None,
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
assert hass.states.get("cover.test").attributes["supported_features"] == 9
async def test_no_payload_open(hass, mqtt_mock):
"""Test with no open payload."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"qos": 0,
"payload_open": None,
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
assert hass.states.get("cover.test").attributes["supported_features"] == 10
async def test_no_payload_stop(hass, mqtt_mock):
"""Test with no stop payload."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": None,
}
},
)
await hass.async_block_till_done()
assert hass.states.get("cover.test").attributes["supported_features"] == 3
async def test_with_command_topic_and_tilt(hass, mqtt_mock):
"""Test with command topic and tilt config."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"command_topic": "test",
"platform": "mqtt",
"name": "test",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command",
"tilt_status_topic": "tilt-status",
}
},
)
await hass.async_block_till_done()
assert hass.states.get("cover.test").attributes["supported_features"] == 251
async def test_tilt_defaults(hass, mqtt_mock):
"""Test the defaults."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command",
"tilt_status_topic": "tilt-status",
}
},
)
await hass.async_block_till_done()
state_attributes_dict = hass.states.get("cover.test").attributes
assert ATTR_CURRENT_TILT_POSITION in state_attributes_dict
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_position == STATE_UNKNOWN
async def test_tilt_via_invocation_defaults(hass, mqtt_mock):
"""Test tilt defaults on close/open."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_OPEN_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "100", 0, False
)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_CLOSE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with("tilt-command-topic", "0", 0, False)
mqtt_mock.async_publish.reset_mock()
# Close tilt status would be received from device when non-optimistic
async_fire_mqtt_message(hass, "tilt-status-topic", "0")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 0
await hass.services.async_call(
cover.DOMAIN,
SERVICE_TOGGLE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "100", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Open tilt status would be received from device when non-optimistic
async_fire_mqtt_message(hass, "tilt-status-topic", "100")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 100
await hass.services.async_call(
cover.DOMAIN,
SERVICE_TOGGLE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with("tilt-command-topic", "0", 0, False)
async def test_tilt_given_value(hass, mqtt_mock):
"""Test tilting to a given value."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_opened_value": 80,
"tilt_closed_value": 25,
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_OPEN_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "80", 0, False
)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_CLOSE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "25", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Close tilt status would be received from device when non-optimistic
async_fire_mqtt_message(hass, "tilt-status-topic", "25")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 25
await hass.services.async_call(
cover.DOMAIN,
SERVICE_TOGGLE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "80", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Open tilt status would be received from device when non-optimistic
async_fire_mqtt_message(hass, "tilt-status-topic", "80")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 80
await hass.services.async_call(
cover.DOMAIN,
SERVICE_TOGGLE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "25", 0, False
)
async def test_tilt_given_value_optimistic(hass, mqtt_mock):
"""Test tilting to a given value."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_opened_value": 80,
"tilt_closed_value": 25,
"tilt_optimistic": True,
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_OPEN_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 80
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "80", 0, False
)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_TILT_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_TILT_POSITION: 50},
blocking=True,
)
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 50
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "50", 0, False
)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_CLOSE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 25
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "25", 0, False
)
async def test_tilt_given_value_altered_range(hass, mqtt_mock):
"""Test tilting to a given value."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_opened_value": 25,
"tilt_closed_value": 0,
"tilt_min": 0,
"tilt_max": 50,
"tilt_optimistic": True,
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_OPEN_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 50
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "25", 0, False
)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_CLOSE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 0
mqtt_mock.async_publish.assert_called_once_with("tilt-command-topic", "0", 0, False)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_TOGGLE_COVER_TILT,
{ATTR_ENTITY_ID: "cover.test"},
blocking=True,
)
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 50
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "25", 0, False
)
async def test_tilt_via_topic(hass, mqtt_mock):
"""Test tilt by updating status via MQTT."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tilt-status-topic", "0")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 0
async_fire_mqtt_message(hass, "tilt-status-topic", "50")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 50
async def test_tilt_via_topic_template(hass, mqtt_mock):
"""Test tilt by updating status via MQTT and template."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_status_template": "{{ (value | multiply(0.01)) | int }}",
"tilt_opened_value": 400,
"tilt_closed_value": 125,
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tilt-status-topic", "99")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 0
async_fire_mqtt_message(hass, "tilt-status-topic", "5000")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 50
async def test_tilt_via_topic_template_json_value(hass, mqtt_mock, caplog):
"""Test tilt by updating status via MQTT and template with JSON value."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_status_template": "{{ value_json.Var1 }}",
"tilt_opened_value": 400,
"tilt_closed_value": 125,
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tilt-status-topic", '{"Var1": 9, "Var2": 30}')
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 9
async_fire_mqtt_message(hass, "tilt-status-topic", '{"Var1": 50, "Var2": 10}')
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 50
async_fire_mqtt_message(hass, "tilt-status-topic", '{"Var2": 10}')
assert (
"Template variable warning: 'dict object' has no attribute 'Var1' when rendering"
) in caplog.text
async def test_tilt_via_topic_altered_range(hass, mqtt_mock):
"""Test tilt status via MQTT with altered tilt range."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_min": 0,
"tilt_max": 50,
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tilt-status-topic", "0")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 0
async_fire_mqtt_message(hass, "tilt-status-topic", "50")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 100
async_fire_mqtt_message(hass, "tilt-status-topic", "25")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 50
async def test_tilt_status_out_of_range_warning(hass, caplog, mqtt_mock):
"""Test tilt status via MQTT tilt out of range warning message."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_min": 0,
"tilt_max": 50,
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tilt-status-topic", "60")
assert (
"Payload '60' is out of range, must be between '0' and '50' inclusive"
) in caplog.text
async def test_tilt_status_not_numeric_warning(hass, caplog, mqtt_mock):
"""Test tilt status via MQTT tilt not numeric warning message."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_min": 0,
"tilt_max": 50,
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tilt-status-topic", "abc")
assert ("Payload 'abc' is not numeric") in caplog.text
async def test_tilt_via_topic_altered_range_inverted(hass, mqtt_mock):
"""Test tilt status via MQTT with altered tilt range and inverted tilt position."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_min": 50,
"tilt_max": 0,
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tilt-status-topic", "0")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 100
async_fire_mqtt_message(hass, "tilt-status-topic", "50")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 0
async_fire_mqtt_message(hass, "tilt-status-topic", "25")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 50
async def test_tilt_via_topic_template_altered_range(hass, mqtt_mock):
"""Test tilt status via MQTT and template with altered tilt range."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_status_template": "{{ (value | multiply(0.01)) | int }}",
"tilt_opened_value": 400,
"tilt_closed_value": 125,
"tilt_min": 0,
"tilt_max": 50,
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tilt-status-topic", "99")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 0
async_fire_mqtt_message(hass, "tilt-status-topic", "5000")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 100
async_fire_mqtt_message(hass, "tilt-status-topic", "2500")
current_cover_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_tilt_position == 50
async def test_tilt_position(hass, mqtt_mock):
"""Test tilt via method invocation."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_TILT_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_TILT_POSITION: 50},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "50", 0, False
)
async def test_tilt_position_templated(hass, mqtt_mock):
"""Test tilt position via template."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_command_template": "{{100-32}}",
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_TILT_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_TILT_POSITION: 100},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "68", 0, False
)
async def test_tilt_position_altered_range(hass, mqtt_mock):
"""Test tilt via method invocation with altered range."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"tilt_opened_value": 400,
"tilt_closed_value": 125,
"tilt_min": 0,
"tilt_max": 50,
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_TILT_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_TILT_POSITION: 50},
blocking=True,
)
mqtt_mock.async_publish.assert_called_once_with(
"tilt-command-topic", "25", 0, False
)
async def test_find_percentage_in_range_defaults(hass, mqtt_mock):
"""Test find percentage in range with default range."""
mqtt_cover = MqttCover(
hass,
{
"name": "cover.test",
"state_topic": "state-topic",
"get_position_topic": None,
"command_topic": "command-topic",
"availability_topic": None,
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"qos": 0,
"retain": False,
"state_open": "OPEN",
"state_closed": "CLOSE",
"position_open": 100,
"position_closed": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"payload_available": None,
"payload_not_available": None,
"optimistic": False,
"value_template": None,
"tilt_open_position": 100,
"tilt_closed_position": 0,
"tilt_min": 0,
"tilt_max": 100,
"tilt_optimistic": False,
"set_position_topic": None,
"set_position_template": None,
"unique_id": None,
"device_config": None,
},
None,
None,
)
assert mqtt_cover.find_percentage_in_range(44) == 44
assert mqtt_cover.find_percentage_in_range(44, "cover") == 44
async def test_find_percentage_in_range_altered(hass, mqtt_mock):
"""Test find percentage in range with altered range."""
mqtt_cover = MqttCover(
hass,
{
"name": "cover.test",
"state_topic": "state-topic",
"get_position_topic": None,
"command_topic": "command-topic",
"availability_topic": None,
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"qos": 0,
"retain": False,
"state_open": "OPEN",
"state_closed": "CLOSE",
"position_open": 180,
"position_closed": 80,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"payload_available": None,
"payload_not_available": None,
"optimistic": False,
"value_template": None,
"tilt_open_position": 180,
"tilt_closed_position": 80,
"tilt_min": 80,
"tilt_max": 180,
"tilt_optimistic": False,
"set_position_topic": None,
"set_position_template": None,
"unique_id": None,
"device_config": None,
},
None,
None,
)
assert mqtt_cover.find_percentage_in_range(120) == 40
assert mqtt_cover.find_percentage_in_range(120, "cover") == 40
async def test_find_percentage_in_range_defaults_inverted(hass, mqtt_mock):
"""Test find percentage in range with default range but inverted."""
mqtt_cover = MqttCover(
hass,
{
"name": "cover.test",
"state_topic": "state-topic",
"get_position_topic": None,
"command_topic": "command-topic",
"availability_topic": None,
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"qos": 0,
"retain": False,
"state_open": "OPEN",
"state_closed": "CLOSE",
"position_open": 0,
"position_closed": 100,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"payload_available": None,
"payload_not_available": None,
"optimistic": False,
"value_template": None,
"tilt_open_position": 100,
"tilt_closed_position": 0,
"tilt_min": 100,
"tilt_max": 0,
"tilt_optimistic": False,
"set_position_topic": None,
"set_position_template": None,
"unique_id": None,
"device_config": None,
},
None,
None,
)
assert mqtt_cover.find_percentage_in_range(44) == 56
assert mqtt_cover.find_percentage_in_range(44, "cover") == 56
async def test_find_percentage_in_range_altered_inverted(hass, mqtt_mock):
"""Test find percentage in range with altered range and inverted."""
mqtt_cover = MqttCover(
hass,
{
"name": "cover.test",
"state_topic": "state-topic",
"get_position_topic": None,
"command_topic": "command-topic",
"availability_topic": None,
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"qos": 0,
"retain": False,
"state_open": "OPEN",
"state_closed": "CLOSE",
"position_open": 80,
"position_closed": 180,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"payload_available": None,
"payload_not_available": None,
"optimistic": False,
"value_template": None,
"tilt_open_position": 180,
"tilt_closed_position": 80,
"tilt_min": 180,
"tilt_max": 80,
"tilt_optimistic": False,
"set_position_topic": None,
"set_position_template": None,
"unique_id": None,
"device_config": None,
},
None,
None,
)
assert mqtt_cover.find_percentage_in_range(120) == 60
assert mqtt_cover.find_percentage_in_range(120, "cover") == 60
async def test_find_in_range_defaults(hass, mqtt_mock):
"""Test find in range with default range."""
mqtt_cover = MqttCover(
hass,
{
"name": "cover.test",
"state_topic": "state-topic",
"get_position_topic": None,
"command_topic": "command-topic",
"availability_topic": None,
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"qos": 0,
"retain": False,
"state_open": "OPEN",
"state_closed": "CLOSE",
"position_open": 100,
"position_closed": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"payload_available": None,
"payload_not_available": None,
"optimistic": False,
"value_template": None,
"tilt_open_position": 100,
"tilt_closed_position": 0,
"tilt_min": 0,
"tilt_max": 100,
"tilt_optimistic": False,
"set_position_topic": None,
"set_position_template": None,
"unique_id": None,
"device_config": None,
},
None,
None,
)
assert mqtt_cover.find_in_range_from_percent(44) == 44
assert mqtt_cover.find_in_range_from_percent(44, "cover") == 44
async def test_find_in_range_altered(hass, mqtt_mock):
"""Test find in range with altered range."""
mqtt_cover = MqttCover(
hass,
{
"name": "cover.test",
"state_topic": "state-topic",
"get_position_topic": None,
"command_topic": "command-topic",
"availability_topic": None,
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"qos": 0,
"retain": False,
"state_open": "OPEN",
"state_closed": "CLOSE",
"position_open": 180,
"position_closed": 80,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"payload_available": None,
"payload_not_available": None,
"optimistic": False,
"value_template": None,
"tilt_open_position": 180,
"tilt_closed_position": 80,
"tilt_min": 80,
"tilt_max": 180,
"tilt_optimistic": False,
"set_position_topic": None,
"set_position_template": None,
"unique_id": None,
"device_config": None,
},
None,
None,
)
assert mqtt_cover.find_in_range_from_percent(40) == 120
assert mqtt_cover.find_in_range_from_percent(40, "cover") == 120
async def test_find_in_range_defaults_inverted(hass, mqtt_mock):
"""Test find in range with default range but inverted."""
mqtt_cover = MqttCover(
hass,
{
"name": "cover.test",
"state_topic": "state-topic",
"get_position_topic": None,
"command_topic": "command-topic",
"availability_topic": None,
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"qos": 0,
"retain": False,
"state_open": "OPEN",
"state_closed": "CLOSE",
"position_open": 0,
"position_closed": 100,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"payload_available": None,
"payload_not_available": None,
"optimistic": False,
"value_template": None,
"tilt_open_position": 100,
"tilt_closed_position": 0,
"tilt_min": 100,
"tilt_max": 0,
"tilt_optimistic": False,
"set_position_topic": None,
"set_position_template": None,
"unique_id": None,
"device_config": None,
},
None,
None,
)
assert mqtt_cover.find_in_range_from_percent(56) == 44
assert mqtt_cover.find_in_range_from_percent(56, "cover") == 44
async def test_find_in_range_altered_inverted(hass, mqtt_mock):
"""Test find in range with altered range and inverted."""
mqtt_cover = MqttCover(
hass,
{
"name": "cover.test",
"state_topic": "state-topic",
"get_position_topic": None,
"command_topic": "command-topic",
"availability_topic": None,
"tilt_command_topic": "tilt-command-topic",
"tilt_status_topic": "tilt-status-topic",
"qos": 0,
"retain": False,
"state_open": "OPEN",
"state_closed": "CLOSE",
"position_open": 80,
"position_closed": 180,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"payload_available": None,
"payload_not_available": None,
"optimistic": False,
"value_template": None,
"tilt_open_position": 180,
"tilt_closed_position": 80,
"tilt_min": 180,
"tilt_max": 80,
"tilt_optimistic": False,
"set_position_topic": None,
"set_position_template": None,
"unique_id": None,
"device_config": None,
},
None,
None,
)
assert mqtt_cover.find_in_range_from_percent(60) == 120
assert mqtt_cover.find_in_range_from_percent(60, "cover") == 120
async def test_availability_when_connection_lost(hass, mqtt_mock):
"""Test availability after MQTT disconnection."""
await help_test_availability_when_connection_lost(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_availability_without_topic(hass, mqtt_mock):
"""Test availability without defined availability topic."""
await help_test_availability_without_topic(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
await help_test_default_availability_payload(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
await help_test_custom_availability_payload(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_valid_device_class(hass, mqtt_mock):
"""Test the setting of a valid device class."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"device_class": "garage",
"state_topic": "test-topic",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.attributes.get("device_class") == "garage"
async def test_invalid_device_class(hass, mqtt_mock):
"""Test the setting of an invalid device class."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"device_class": "abc123",
"state_topic": "test-topic",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state is None
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_via_mqtt_json_message(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_setting_blocked_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_blocked_attribute_via_mqtt_json_message(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG, MQTT_COVER_ATTRIBUTES_BLOCKED
)
async def test_setting_attribute_with_template(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_with_template(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_not_dict(
hass, mqtt_mock, caplog, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_bad_json(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_bad_JSON(
hass, mqtt_mock, caplog, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
await help_test_discovery_update_attr(
hass, mqtt_mock, caplog, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_unique_id(hass, mqtt_mock):
"""Test unique_id option only creates one cover per id."""
config = {
cover.DOMAIN: [
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"unique_id": "TOTALLY_UNIQUE",
},
{
"platform": "mqtt",
"name": "Test 2",
"state_topic": "test-topic",
"unique_id": "TOTALLY_UNIQUE",
},
]
}
await help_test_unique_id(hass, mqtt_mock, cover.DOMAIN, config)
async def test_discovery_removal_cover(hass, mqtt_mock, caplog):
"""Test removal of discovered cover."""
data = '{ "name": "test", "command_topic": "test_topic" }'
await help_test_discovery_removal(hass, mqtt_mock, caplog, cover.DOMAIN, data)
async def test_discovery_update_cover(hass, mqtt_mock, caplog):
"""Test update of discovered cover."""
config1 = {"name": "Beer", "command_topic": "test_topic"}
config2 = {"name": "Milk", "command_topic": "test_topic"}
await help_test_discovery_update(
hass, mqtt_mock, caplog, cover.DOMAIN, config1, config2
)
async def test_discovery_update_unchanged_cover(hass, mqtt_mock, caplog):
"""Test update of discovered cover."""
data1 = '{ "name": "Beer", "command_topic": "test_topic" }'
with patch(
"homeassistant.components.mqtt.cover.MqttCover.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, cover.DOMAIN, data1, discovery_update
)
@pytest.mark.no_fail_on_log_exception
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
data1 = '{ "name": "Beer", "command_topic": "test_topic#" }'
data2 = '{ "name": "Milk", "command_topic": "test_topic" }'
await help_test_discovery_broken(
hass, mqtt_mock, caplog, cover.DOMAIN, data1, data2
)
async def test_entity_device_info_with_connection(hass, mqtt_mock):
"""Test MQTT cover device registry integration."""
await help_test_entity_device_info_with_connection(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT cover device registry integration."""
await help_test_entity_device_info_with_identifier(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
await help_test_entity_device_info_update(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_remove(hass, mqtt_mock):
"""Test device registry remove."""
await help_test_entity_device_info_remove(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock):
"""Test MQTT subscriptions are managed when entity_id is updated."""
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock):
"""Test MQTT discovery update when entity_id is updated."""
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_debug_info_message(hass, mqtt_mock):
"""Test MQTT debug info."""
await help_test_entity_debug_info_message(
hass,
mqtt_mock,
cover.DOMAIN,
DEFAULT_CONFIG,
SERVICE_OPEN_COVER,
command_payload="OPEN",
)
async def test_state_and_position_topics_state_not_set_via_position_topic(
hass, mqtt_mock
):
"""Test state is not set via position topic when both state and position topics are set."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"position_topic": "get-position-topic",
"position_open": 100,
"position_closed": 0,
"state_open": "OPEN",
"state_closed": "CLOSE",
"command_topic": "command-topic",
"qos": 0,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "OPEN")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async_fire_mqtt_message(hass, "get-position-topic", "0")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async_fire_mqtt_message(hass, "get-position-topic", "100")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async_fire_mqtt_message(hass, "state-topic", "CLOSE")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async_fire_mqtt_message(hass, "get-position-topic", "0")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async_fire_mqtt_message(hass, "get-position-topic", "100")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async def test_set_state_via_position_using_stopped_state(hass, mqtt_mock):
"""Test the controlling state via position topic using stopped state."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"position_topic": "get-position-topic",
"position_open": 100,
"position_closed": 0,
"state_open": "OPEN",
"state_closed": "CLOSE",
"state_stopped": "STOPPED",
"command_topic": "command-topic",
"qos": 0,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "OPEN")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async_fire_mqtt_message(hass, "get-position-topic", "0")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async_fire_mqtt_message(hass, "state-topic", "STOPPED")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async_fire_mqtt_message(hass, "get-position-topic", "100")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async_fire_mqtt_message(hass, "state-topic", "STOPPED")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async def test_position_via_position_topic_template(hass, mqtt_mock):
"""Test position by updating status via position template."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
"position_topic": "get-position-topic",
"position_template": "{{ (value | multiply(0.01)) | int }}",
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "get-position-topic", "99")
current_cover_position_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position_position == 0
async_fire_mqtt_message(hass, "get-position-topic", "5000")
current_cover_position_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position_position == 50
async def test_position_via_position_topic_template_json_value(hass, mqtt_mock, caplog):
"""Test position by updating status via position template with a JSON value."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
"position_topic": "get-position-topic",
"position_template": "{{ value_json.Var1 }}",
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "get-position-topic", '{"Var1": 9, "Var2": 60}')
current_cover_position_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position_position == 9
async_fire_mqtt_message(hass, "get-position-topic", '{"Var1": 50, "Var2": 10}')
current_cover_position_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position_position == 50
async_fire_mqtt_message(hass, "get-position-topic", '{"Var2": 60}')
assert (
"Template variable warning: 'dict object' has no attribute 'Var1' when rendering"
) in caplog.text
async def test_position_template_with_entity_id(hass, mqtt_mock):
"""Test position by updating status via position template."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
"position_topic": "get-position-topic",
"position_template": '\
{% if state_attr(entity_id, "current_position") != None %}\
{{ value | int + state_attr(entity_id, "current_position") }} \
{% else %} \
{{ value }} \
{% endif %}',
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "get-position-topic", "10")
current_cover_position_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position_position == 10
async_fire_mqtt_message(hass, "get-position-topic", "10")
current_cover_position_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position_position == 20
async def test_position_via_position_topic_template_return_json(hass, mqtt_mock):
"""Test position by updating status via position template and returning json."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
"position_topic": "get-position-topic",
"position_template": '{{ {"position" : value} | tojson }}',
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "get-position-topic", "55")
current_cover_position_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position_position == 55
async def test_position_via_position_topic_template_return_json_warning(
hass, caplog, mqtt_mock
):
"""Test position by updating status via position template returning json without position attribute."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
"position_topic": "get-position-topic",
"position_template": '{{ {"pos" : value} | tojson }}',
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "get-position-topic", "55")
assert (
"Template (position_template) returned JSON without position attribute"
in caplog.text
)
async def test_position_and_tilt_via_position_topic_template_return_json(
hass, mqtt_mock
):
"""Test position and tilt by updating the position via position template."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
"position_topic": "get-position-topic",
"position_template": '\
{{ {"position" : value, "tilt_position" : (value | int / 2)| int } | tojson }}',
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "get-position-topic", "0")
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
current_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_position == 0 and current_tilt_position == 0
async_fire_mqtt_message(hass, "get-position-topic", "99")
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
current_tilt_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_TILT_POSITION
]
assert current_cover_position == 99 and current_tilt_position == 49
async def test_position_via_position_topic_template_all_variables(hass, mqtt_mock):
"""Test position by updating status via position template."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
"position_topic": "get-position-topic",
"tilt_command_topic": "tilt-command-topic",
"position_open": 99,
"position_closed": 1,
"tilt_min": 11,
"tilt_max": 22,
"position_template": "\
{% if value | int < tilt_max %}\
{{ tilt_min }}\
{% endif %}\
{% if value | int > position_closed %}\
{{ position_open }}\
{% endif %}",
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "get-position-topic", "0")
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position == 10
async_fire_mqtt_message(hass, "get-position-topic", "55")
current_cover_position = hass.states.get("cover.test").attributes[
ATTR_CURRENT_POSITION
]
assert current_cover_position == 100
async def test_set_state_via_stopped_state_no_position_topic(hass, mqtt_mock):
"""Test the controlling state via stopped state when no position topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"state_open": "OPEN",
"state_closed": "CLOSE",
"state_stopped": "STOPPED",
"state_opening": "OPENING",
"state_closing": "CLOSING",
"command_topic": "command-topic",
"qos": 0,
"optimistic": False,
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "state-topic", "OPEN")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async_fire_mqtt_message(hass, "state-topic", "OPENING")
state = hass.states.get("cover.test")
assert state.state == STATE_OPENING
async_fire_mqtt_message(hass, "state-topic", "STOPPED")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async_fire_mqtt_message(hass, "state-topic", "CLOSING")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSING
async_fire_mqtt_message(hass, "state-topic", "STOPPED")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async def test_position_via_position_topic_template_return_invalid_json(
hass, caplog, mqtt_mock
):
"""Test position by updating status via position template and returning invalid json."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
"position_topic": "get-position-topic",
"position_template": '{{ {"position" : invalid_json} }}',
}
},
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "get-position-topic", "55")
assert ("Payload '{'position': Undefined}' is not numeric") in caplog.text
async def test_set_position_topic_without_get_position_topic_error(
hass, caplog, mqtt_mock
):
"""Test error when set_position_topic is used without position_topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"set_position_topic": "set-position-topic",
"value_template": "{{100-62}}",
}
},
)
await hass.async_block_till_done()
assert (
f"'{CONF_SET_POSITION_TOPIC}' must be set together with '{CONF_GET_POSITION_TOPIC}'."
) in caplog.text
async def test_value_template_without_state_topic_error(hass, caplog, mqtt_mock):
"""Test error when value_template is used and state_topic is missing."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"value_template": "{{100-62}}",
}
},
)
await hass.async_block_till_done()
assert (
f"'{CONF_VALUE_TEMPLATE}' must be set together with '{CONF_STATE_TOPIC}'."
) in caplog.text
async def test_position_template_without_position_topic_error(hass, caplog, mqtt_mock):
"""Test error when position_template is used and position_topic is missing."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"position_template": "{{100-52}}",
}
},
)
await hass.async_block_till_done()
assert (
f"'{CONF_GET_POSITION_TEMPLATE}' must be set together with '{CONF_GET_POSITION_TOPIC}'."
in caplog.text
)
async def test_set_position_template_without_set_position_topic(
hass, caplog, mqtt_mock
):
"""Test error when set_position_template is used and set_position_topic is missing."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"set_position_template": "{{100-42}}",
}
},
)
await hass.async_block_till_done()
assert (
f"'{CONF_SET_POSITION_TEMPLATE}' must be set together with '{CONF_SET_POSITION_TOPIC}'."
in caplog.text
)
async def test_tilt_command_template_without_tilt_command_topic(
hass, caplog, mqtt_mock
):
"""Test error when tilt_command_template is used and tilt_command_topic is missing."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"tilt_command_template": "{{100-32}}",
}
},
)
await hass.async_block_till_done()
assert (
f"'{CONF_TILT_COMMAND_TEMPLATE}' must be set together with '{CONF_TILT_COMMAND_TOPIC}'."
in caplog.text
)
async def test_tilt_status_template_without_tilt_status_topic_topic(
hass, caplog, mqtt_mock
):
"""Test error when tilt_status_template is used and tilt_status_topic is missing."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"tilt_status_template": "{{100-22}}",
}
},
)
await hass.async_block_till_done()
assert (
f"'{CONF_TILT_STATUS_TEMPLATE}' must be set together with '{CONF_TILT_STATUS_TOPIC}'."
in caplog.text
)
@pytest.mark.parametrize(
"service,topic,parameters,payload,template",
[
(
SERVICE_OPEN_COVER,
"command_topic",
None,
"OPEN",
None,
),
(
SERVICE_SET_COVER_POSITION,
"set_position_topic",
{ATTR_POSITION: "50"},
50,
"set_position_template",
),
(
SERVICE_SET_COVER_TILT_POSITION,
"tilt_command_topic",
{ATTR_TILT_POSITION: "45"},
45,
"tilt_command_template",
),
],
)
async def test_publishing_with_custom_encoding(
hass,
mqtt_mock,
caplog,
service,
topic,
parameters,
payload,
template,
):
"""Test publishing MQTT payload with different encoding."""
domain = cover.DOMAIN
config = DEFAULT_CONFIG[domain]
config["position_topic"] = "some-position-topic"
await help_test_publishing_with_custom_encoding(
hass,
mqtt_mock,
caplog,
domain,
config,
service,
topic,
parameters,
payload,
template,
)
async def test_reloadable(hass, mqtt_mock, caplog, tmp_path):
"""Test reloading the MQTT platform."""
domain = cover.DOMAIN
config = DEFAULT_CONFIG[domain]
await help_test_reloadable(hass, mqtt_mock, caplog, tmp_path, domain, config)
async def test_reloadable_late(hass, mqtt_client_mock, caplog, tmp_path):
"""Test reloading the MQTT platform with late entry setup."""
domain = cover.DOMAIN
config = DEFAULT_CONFIG[domain]
await help_test_reloadable_late(hass, caplog, tmp_path, domain, config)
@pytest.mark.parametrize(
"topic,value,attribute,attribute_value",
[
("state_topic", "open", None, None),
("state_topic", "closing", None, None),
("position_topic", "40", "current_position", 40),
("tilt_status_topic", "60", "current_tilt_position", 60),
],
)
async def test_encoding_subscribable_topics(
hass, mqtt_mock, caplog, topic, value, attribute, attribute_value
):
"""Test handling of incoming encoded payload."""
await help_test_encoding_subscribable_topics(
hass,
mqtt_mock,
caplog,
cover.DOMAIN,
DEFAULT_CONFIG[cover.DOMAIN],
topic,
value,
attribute,
attribute_value,
skip_raw_test=True,
)
| rohitranjan1991/home-assistant | tests/components/mqtt/test_cover.py | Python | mit | 102,124 |
import pytest
from everest.repositories.rdb.testing import check_attributes
from everest.repositories.rdb.testing import persist
from thelma.tests.entity.conftest import TestEntityBase
class TestExperimentEntity(TestEntityBase):
def test_init(self, experiment_fac):
exp = experiment_fac()
check_attributes(exp, experiment_fac.init_kw)
assert len(exp.experiment_racks) == 0
@pytest.mark.parametrize('kw1,kw2,result',
[(dict(id=-1), dict(id=-1), True),
(dict(id=-1), dict(id=-2), False)])
def test_equality(self, experiment_fac, experiment_design_fac, plate_fac,
kw1, kw2, result):
ed1 = experiment_design_fac(**kw1)
ed2 = experiment_design_fac(**kw2)
rack1 = plate_fac(**kw1)
rack2 = plate_fac(**kw2)
exp1 = experiment_fac(experiment_design=ed1, source_rack=rack1)
exp2 = experiment_fac(experiment_design=ed2, source_rack=rack2)
exp3 = experiment_fac(experiment_design=ed2, source_rack=rack1)
exp4 = experiment_fac(experiment_design=ed1, source_rack=rack2)
assert (exp1 == exp2) is result
assert (exp1 == exp3) is result
assert (exp1 == exp4) is result
def test_persist(self, nested_session, experiment_fac,
experiment_job_fac):
exp = experiment_fac()
# FIXME: Working around the circular dependency of experiment and
# experiment job here.
exp_job = experiment_job_fac(experiments=[exp])
kw = experiment_fac.init_kw
kw['job'] = exp.job
exp.job = exp_job
persist(nested_session, exp, kw, True)
class TestExperimentRackEntity(TestEntityBase):
def test_init(self, experiment_rack_fac):
exp_r = experiment_rack_fac()
check_attributes(exp_r, experiment_rack_fac.init_kw)
class TestExperimentDesignEntity(TestEntityBase):
def test_init(self, experiment_design_fac):
exp_dsgn = experiment_design_fac()
check_attributes(exp_dsgn, experiment_design_fac.init_kw)
def test_persist(self, nested_session, experiment_design_fac):
exp_design = experiment_design_fac()
persist(nested_session, exp_design, experiment_design_fac.init_kw,
True)
class TestExperimentDesignRackEntity(TestEntityBase):
def test_init(self, experiment_design_rack_fac):
exp_dr = experiment_design_rack_fac()
check_attributes(exp_dr, experiment_design_rack_fac.init_kw)
class TestExperimentMetadataEntity(TestEntityBase):
def test_init(self, experiment_metadata_fac):
em = experiment_metadata_fac()
check_attributes(em, experiment_metadata_fac.init_kw)
@pytest.mark.parametrize('kw1,kw2,result',
[(dict(label='em1'), dict(label='em1'), True),
(dict(label='em1'), dict(label='em2'), False)])
def test_equality(self, subproject_fac, experiment_metadata_fac,
kw1, kw2, result):
sp1 = subproject_fac(**kw1)
sp2 = subproject_fac(**kw2)
em1 = experiment_metadata_fac(subproject=sp1, **kw1)
em2 = experiment_metadata_fac(subproject=sp2, **kw2)
assert (em1 == em2) is result
def test_persist(self, nested_session, experiment_metadata_fac):
exp_metadata = experiment_metadata_fac()
persist(nested_session, exp_metadata, experiment_metadata_fac.init_kw,
True)
| helixyte/TheLMA | thelma/tests/entity/test_experiment.py | Python | mit | 3,511 |
"""
virtstrap.log
-------------
Provides a central logging facility. It is used to record log info
and report both to a log file and stdout
"""
import sys
import logging
import traceback
CLINT_AVAILABLE = True
try:
from clint.textui import puts, colored
except:
# Clint is still not stable enough yet to just import with so much
# trust, but I really like colored output. So we'll give it a shot
# and if it doesn't work we will just do something else.
CLINT_AVAILABLE = False
def get_logging_level(level):
logging_level = None
if isinstance(level, (str, unicode)):
level = level.upper()
try:
logging_level = getattr(logging, level.upper())
except AttributeError:
raise AttributeError('Tried to grab logging level "%s"'
' but it does not exist' % level)
elif isinstance(level, int):
# Do nothing
logging_level = level
else:
raise TypeError('Invalid logging level. Must be string or int %s'
% str(level))
return logging_level
class VirtstrapLogger(object):
"""Custom logger for use with virtstrap
It'll allow the logger to store logged data before a log file is setup. It
is meant to be used globally.
"""
def __init__(self):
self._handlers = []
self._log_lines = [] #storage before any handlers appear
def add_handler(self, handler):
self._handlers.append(handler)
log_lines = self._log_lines
for level, message in log_lines:
self.log(level, message, new_line=False)
self._log_lines = []
def debug(self, message, **kwargs):
self.log('debug', message, **kwargs)
def error(self, message, **kwargs):
self.log('error', message, **kwargs)
def info(self, message, **kwargs):
self.log('info', message, **kwargs)
def warning(self, message, **kwargs):
self.log('warning', message, **kwargs)
def critical(self, message, **kwargs):
self.log('critical', message, **kwargs)
def exception(self, message, **kwargs):
exception_str = self._get_exception_str()
self.log('error', '%s\n%s' % (message, exception_str))
def debug_exception(self, message, **kwargs):
"""Stores exception except using the debug level"""
exception_str = self._get_exception_str()
self.log('debug', '%s\n%s' % (message, exception_str))
def _get_exception_str(self):
exception_info = sys.exc_info()
exception_lines = traceback.format_exception(*exception_info)
exception_str = ''.join(exception_lines)
return exception_str
def log(self, level, message, new_line=True):
if new_line:
message = "%s\n" % message
handlers = self._handlers
if not handlers:
self._log_lines.append((level, message))
else:
for handler in handlers:
handler.log(level, message)
def close(self):
handlers = self._handlers
for handler in handlers:
close = getattr(handler, 'close')
if close:
close()
class VirtstrapLogHandler(object):
def __init__(self, level='debug'):
self._level = get_logging_level(level)
def set_level(self, level):
self._level = get_logging_level(level)
def log(self, level, message):
current_level = get_logging_level(level)
if current_level >= self._level:
self.emit(level, message)
def emit(self, level, message):
raise NotImplementedError('Please implement an emit method')
def close(self):
pass
class ConsoleLogHandler(VirtstrapLogHandler):
def emit(self, level, message):
sys.stdout.write(message)
class ColoredConsoleLogHandler(VirtstrapLogHandler):
level_colors = {
"DEBUG": "green",
"INFO": "black",
"WARNING": "yellow",
"CRITICAL": "purple",
"ERROR": "red",
"EXCEPTION": "red",
}
def emit(self, level, output):
color = self.level_colors.get(level, "black")
colored_function = getattr(colored, color, lambda text: text)
colored_output = colored_function(output)
puts(colored_output)
class FileLogHandler(VirtstrapLogHandler):
"""File Log Handler that uses built in logging to log"""
def __init__(self, filename):
self._file = open(filename, 'a')
def emit(self, level, message):
if self._file:
self._file.write(message)
def close(self):
self._file.close()
self._file = None
class VirtstrapConsoleLogHandler(logging.Handler):
def __init__(self, outputter):
self._outputter = outputter
logging.Handler.__init__(self)
def emit(self, record):
outputter = self._outputter
output_string = self.format(record)
outputter.write(output_string, record.levelname)
class ConsoleLogOutputter(object):
def write(self, output, level):
print(output)
class ColoredConsoleLogOutputter(ConsoleLogOutputter):
level_colors = {
"DEBUG": "green",
"INFO": "black",
"WARNING": "yellow",
"CRITICAL": "purple",
"ERROR": "red",
"EXCEPTION": "red",
}
def write(self, output, level):
color = self.level_colors.get(level, "black")
colored_function = getattr(colored, color, lambda text: text)
colored_output = colored_function(output)
puts(colored_output)
logger = VirtstrapLogger()
VERBOSITY_LEVELS = {
0: None,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG,
}
def setup_logger(verbosity, no_colored_output=False, log_file=None):
"""Sets up the logger for the program. DO NOT USE DIRECTLY IN COMMANDS"""
verbosity_level = VERBOSITY_LEVELS.get(verbosity, logging.INFO)
if log_file:
file_handler = FileLogHandler(log_file)
# The file should log all things to be used for error reporting
file_handler.set_level(logging.DEBUG)
logger.add_handler(file_handler)
if not verbosity_level:
return
console_handler = ConsoleLogHandler()
if CLINT_AVAILABLE:
console_handler = ColoredConsoleLogHandler()
console_handler.set_level(verbosity_level)
logger.add_handler(console_handler)
| ravenac95/virtstrap | virtstrap-core/virtstrap/log.py | Python | mit | 6,365 |
from ...utils.tests import base as base
from ...utils.tests import mpi as mpit
# key order: dataset; epsilon; C
__REFERENCE_RESULTS__ = {
"dryrun": {
0.1: {
0.1: {
"accuracy": 1-0.9300,
},
1: {
"accuracy": 1-0.9300,
},
10: {
"accuracy": 1-0.9300,
},
},
},
"glass": {
0.001: {
0.1: {
"accuracy": 1-0.6667,
},
1: {
"accuracy": 1-0.6190,
},
10: {
"accuracy": 1-0.3333,
},
},
},
"iris": {
0.001: {
0.1: {
"accuracy": 1-0.1333,
},
1: {
"accuracy": 1-0.2667,
},
10: {
"accuracy": 1-0.2667,
},
},
},
"news20": {
0.001: {
0.1: {
"accuracy": 1-0.2923,
},
1: {
"accuracy": 1-0.2297,
},
10: {
"accuracy": 1-0.1615,
},
},
},
}
def do_llwmr_tsd(options_update={}, mpi_comm=None,
datasets=None, reference=True):
solver_id = "llw_mr_sparse"
# default options
options = {
"epsilon": 0.001,
"C": 10**-1,
"mpi_comm": mpi_comm,
}
options.update(options_update)
reference_results = __REFERENCE_RESULTS__
if reference is False:
reference_results = None
base._do_test_small_datasets(solver_id, options,
datasets=datasets,
reference_results=reference_results,
mpi_comm=mpi_comm)
pass
def do_llwmr_tld(options_update={}, mpi_comm=None,
datasets=None, reference=True):
solver_id = "llw_mr_sparse"
# default options
options = {
"epsilon": 0.1,
"C": 10**-1,
"mpi_comm": mpi_comm,
}
options.update(options_update)
tolerance = 0.01
reference_results = __REFERENCE_RESULTS__
if reference is False:
reference_results = None
base._do_test_large_datasets(solver_id, options,
datasets=datasets,
reference_results=reference_results,
mpi_comm=mpi_comm,
tolerance=tolerance)
pass
###############################################################################
# .............................................................................
# BASE SOLVER TESTS
# .............................................................................
###############################################################################
###############################################################################
# Running default config
def test_default_sd():
do_llwmr_tsd()
@base.testattr("slow")
def test_default_ld():
do_llwmr_tld()
###############################################################################
# Parameter C and max_iter
@base.testattr("slow")
def test_C_1_sd():
do_llwmr_tsd({"C": 10**0})
@base.testattr("slow")
def test_C_1_ld():
do_llwmr_tld({"C": 10**0})
@base.testattr("slow")
def test_C_10_sd():
do_llwmr_tsd({"C": 10**1, "max_iter": 10000})
@base.testattr("slow")
def test_C_10_ld():
do_llwmr_tld({"C": 10**1, "max_iter": 10000})
###############################################################################
# Parameter epsilon
def test_small_epsilon_sd():
do_llwmr_tsd({"epsilon": 0.0001}, reference=False)
@base.testattr("slow")
def test_small_epsilon_ld():
do_llwmr_tld({"epsilon": 0.01}, reference=False)
###############################################################################
# Parameter shuffle
def test_no_shuffle_sd():
do_llwmr_tsd({"shuffle": False})
@base.testattr("slow")
def test_no_shuffle_ld():
do_llwmr_tld({"shuffle": False})
###############################################################################
# Parameter seed
def test_seed_12345_sd():
do_llwmr_tsd({"seed": 12345})
@base.testattr("slow")
def test_seed_12345_ld():
do_llwmr_tld({"seed": 12345})
###############################################################################
# Parameter dtype
@base.testattr("slow")
def test_dtype_float32_sd():
do_llwmr_tsd({"dtype": "float32"})
@base.testattr("slow")
def test_dtype_float32_ld():
do_llwmr_tld({"dtype": "float32"})
@base.testattr("slow")
def test_dtype_float64_sd():
do_llwmr_tsd({"dtype": "float64"})
@base.testattr("slow")
def test_dtype_float64_ld():
do_llwmr_tld({"dtype": "float64"})
###############################################################################
# Parameter idtype
@base.testattr("slow")
def test_idtype_uint32_sd():
do_llwmr_tsd({"idtype": "uint32"})
@base.testattr("slow")
def test_idtype_uint32_ld():
do_llwmr_tld({"idtype": "uint32"})
@base.testattr("slow")
def test_idtype_uint64_sd():
do_llwmr_tsd({"idtype": "uint64"})
@base.testattr("slow")
def test_idtype_uint64_ld():
do_llwmr_tld({"idtype": "uint64"})
###############################################################################
# Parameter nr_threads
def test_nr_threads_2_sd():
do_llwmr_tsd({"nr_threads": 2})
@base.testattr("slow")
def test_nr_threads_2_ld():
do_llwmr_tld({"nr_threads": 2})
def test_nr_threads_5_sd():
do_llwmr_tsd({"nr_threads": 5})
@base.testattr("slow")
def test_nr_threads_5_ld():
do_llwmr_tld({"nr_threads": 5})
###############################################################################
# .............................................................................
# LLW SOLVER TESTS
# .............................................................................
###############################################################################
###############################################################################
# Parameter folds
def test_folds_2_sd():
do_llwmr_tsd({"folds": 2})
@base.testattr("slow")
def test_folds_2_ld():
do_llwmr_tld({"folds": 2})
def test_folds_5_sd():
do_llwmr_tsd({"folds": 5})
@base.testattr("slow")
def test_folds_5_ld():
do_llwmr_tld({"folds": 5})
###############################################################################
# Parameter variant
def test_variant_1_sd():
do_llwmr_tsd({"variant": 1})
@base.testattr("slow")
def test_variant_1_ld():
do_llwmr_tld({"variant": 1})
###############################################################################
# Parameter shrinking
def test_shrinking_1_sd():
do_llwmr_tsd({"shrinking": 1})
@base.testattr("slow")
def test_shrinking_1_ld():
do_llwmr_tld({"shrinking": 1})
###############################################################################
# Spreading computation with openmpi
@mpit.wrap(2)
def test_nr_proc_2_sd(comm):
do_llwmr_tsd({}, comm)
@base.testattr("slow")
@mpit.wrap(2)
def test_nr_proc_2_ld(comm):
do_llwmr_tld({}, comm)
@mpit.wrap(3)
def test_nr_proc_3_sd(comm):
do_llwmr_tsd({}, comm)
@base.testattr("slow")
@mpit.wrap(3)
def test_nr_proc_3_ld(comm):
do_llwmr_tld({}, comm)
| albermax/xcsvm | xcsvm/xcsvm/tests/solvers/llwmr.py | Python | mit | 7,407 |
from __future__ import print_function
from eventlet import hubs
from eventlet.support import greenlets as greenlet
__all__ = ['Event']
class NOT_USED:
def __repr__(self):
return 'NOT_USED'
NOT_USED = NOT_USED()
class Event(object):
"""An abstraction where an arbitrary number of coroutines
can wait for one event from another.
Events are similar to a Queue that can only hold one item, but differ
in two important ways:
1. calling :meth:`send` never unschedules the current greenthread
2. :meth:`send` can only be called once; create a new event to send again.
They are good for communicating results between coroutines, and
are the basis for how
:meth:`GreenThread.wait() <eventlet.greenthread.GreenThread.wait>`
is implemented.
>>> from eventlet import event
>>> import eventlet
>>> evt = event.Event()
>>> def baz(b):
... evt.send(b + 1)
...
>>> _ = eventlet.spawn_n(baz, 3)
>>> evt.wait()
4
"""
_result = None
_exc = None
def __init__(self):
self._waiters = set()
self.reset()
def __str__(self):
params = (self.__class__.__name__, hex(id(self)),
self._result, self._exc, len(self._waiters))
return '<%s at %s result=%r _exc=%r _waiters[%d]>' % params
def reset(self):
# this is kind of a misfeature and doesn't work perfectly well,
# it's better to create a new event rather than reset an old one
# removing documentation so that we don't get new use cases for it
assert self._result is not NOT_USED, 'Trying to re-reset() a fresh event.'
self._result = NOT_USED
self._exc = None
def ready(self):
""" Return true if the :meth:`wait` call will return immediately.
Used to avoid waiting for things that might take a while to time out.
For example, you can put a bunch of events into a list, and then visit
them all repeatedly, calling :meth:`ready` until one returns ``True``,
and then you can :meth:`wait` on that one."""
return self._result is not NOT_USED
def has_exception(self):
return self._exc is not None
def has_result(self):
return self._result is not NOT_USED and self._exc is None
def poll(self, notready=None):
if self.ready():
return self.wait()
return notready
# QQQ make it return tuple (type, value, tb) instead of raising
# because
# 1) "poll" does not imply raising
# 2) it's better not to screw up caller's sys.exc_info() by default
# (e.g. if caller wants to calls the function in except or finally)
def poll_exception(self, notready=None):
if self.has_exception():
return self.wait()
return notready
def poll_result(self, notready=None):
if self.has_result():
return self.wait()
return notready
def wait(self):
"""Wait until another coroutine calls :meth:`send`.
Returns the value the other coroutine passed to
:meth:`send`.
>>> from eventlet import event
>>> import eventlet
>>> evt = event.Event()
>>> def wait_on():
... retval = evt.wait()
... print("waited for {0}".format(retval))
>>> _ = eventlet.spawn(wait_on)
>>> evt.send('result')
>>> eventlet.sleep(0)
waited for result
Returns immediately if the event has already
occured.
>>> evt.wait()
'result'
"""
current = greenlet.getcurrent()
if self._result is NOT_USED:
self._waiters.add(current)
try:
return hubs.get_hub().switch()
finally:
self._waiters.discard(current)
if self._exc is not None:
current.throw(*self._exc)
return self._result
def send(self, result=None, exc=None):
"""Makes arrangements for the waiters to be woken with the
result and then returns immediately to the parent.
>>> from eventlet import event
>>> import eventlet
>>> evt = event.Event()
>>> def waiter():
... print('about to wait')
... result = evt.wait()
... print('waited for {0}'.format(result))
>>> _ = eventlet.spawn(waiter)
>>> eventlet.sleep(0)
about to wait
>>> evt.send('a')
>>> eventlet.sleep(0)
waited for a
It is an error to call :meth:`send` multiple times on the same event.
>>> evt.send('whoops')
Traceback (most recent call last):
...
AssertionError: Trying to re-send() an already-triggered event.
Use :meth:`reset` between :meth:`send` s to reuse an event object.
"""
assert self._result is NOT_USED, 'Trying to re-send() an already-triggered event.'
self._result = result
if exc is not None and not isinstance(exc, tuple):
exc = (exc, )
self._exc = exc
hub = hubs.get_hub()
for waiter in self._waiters:
hub.schedule_call_global(
0, self._do_send, self._result, self._exc, waiter)
def _do_send(self, result, exc, waiter):
if waiter in self._waiters:
if exc is None:
waiter.switch(result)
else:
waiter.throw(*exc)
def send_exception(self, *args):
"""Same as :meth:`send`, but sends an exception to waiters.
The arguments to send_exception are the same as the arguments
to ``raise``. If a single exception object is passed in, it
will be re-raised when :meth:`wait` is called, generating a
new stacktrace.
>>> from eventlet import event
>>> evt = event.Event()
>>> evt.send_exception(RuntimeError())
>>> evt.wait()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "eventlet/event.py", line 120, in wait
current.throw(*self._exc)
RuntimeError
If it's important to preserve the entire original stack trace,
you must pass in the entire :func:`sys.exc_info` tuple.
>>> import sys
>>> evt = event.Event()
>>> try:
... raise RuntimeError()
... except RuntimeError:
... evt.send_exception(*sys.exc_info())
...
>>> evt.wait()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "eventlet/event.py", line 120, in wait
current.throw(*self._exc)
File "<stdin>", line 2, in <module>
RuntimeError
Note that doing so stores a traceback object directly on the
Event object, which may cause reference cycles. See the
:func:`sys.exc_info` documentation.
"""
# the arguments and the same as for greenlet.throw
return self.send(None, args)
| sbadia/pkg-python-eventlet | eventlet/event.py | Python | mit | 7,095 |
from PyQt4 import QtCore, QtGui
from components.propertyeditor.Property import Property
from components.RestrictFileDialog import RestrictFileDialog
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import sys, os
class QPropertyModel(QtCore.QAbstractItemModel):
def __init__(self, parent):
super(QPropertyModel, self).__init__(parent)
self.rootItem = Property("Root", "Root", 0, None);
def index (self, row, column, parent):
parentItem = self.rootItem;
if (parent.isValid()):
parentItem = parent.internalPointer()
if (row >= parentItem.childCount() or row < 0):
return QtCore.QModelIndex();
return self.createIndex(row, column, parentItem.child(row))
def getIndexForNode(self, node):
return self.createIndex(node.row(), 1, node)
def getPropItem(self, name, parent=None):
if(parent == None):
parent = self.rootItem
for item in parent.childItems:
if(item.name == name):
return item
return None
def headerData (self, section, orientation, role) :
if (orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole) :
if (section == 0) :
return "Property"
elif (section == 1) :
return "Value"
return None # QtCore.QVariant();
def flags (self, index ):
if (not index.isValid()):
return QtCore.Qt.ItemIsEnabled;
item = index.internalPointer();
if (index.column() == 0):
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
# only allow change of value attribute
if (item.isRoot()):
return QtCore.Qt.ItemIsEnabled;
elif (item.readOnly):
return QtCore.Qt.ItemIsDragEnabled
else:
return QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsEditable;
def parent(self, index):
if not index.isValid():
return QtCore.QModelIndex()
childItem = index.internalPointer()
parentItem = childItem.parentItem
if parentItem == None or parentItem == self.rootItem:
return QtCore.QModelIndex()
return self.createIndex(parentItem.childCount(), 0, parentItem)
def rowCount ( self, parent ):
parentItem = self.rootItem;
if (parent.isValid()):
parentItem = parent.internalPointer()
return len(parentItem.childItems)
def columnCount (self, parent):
return 2
def data (self, index, role):
if (not index.isValid()):
return None
item = index.internalPointer()
if(item.editor_type == Property.IMAGE_EDITOR):
if (index.column() == 0) and (
role == QtCore.Qt.ToolTipRole or
role == QtCore.Qt.DecorationRole or
role == QtCore.Qt.DisplayRole or
role == QtCore.Qt.EditRole):
return item.label.replace('_', ' ');
if (index.column() == 1):
if(role == QtCore.Qt.DecorationRole):
if(item.value['icon'] != None and not item.value['icon'].isNull()):
return item.value['icon'].scaled(18, 18)
else:
return None
if(role == QtCore.Qt.DisplayRole):
return item.value['url']
if(role == QtCore.Qt.EditRole):
return item.value
else:
if(role == QtCore.Qt.ToolTipRole or
role == QtCore.Qt.DecorationRole or
role == QtCore.Qt.DisplayRole or
role == QtCore.Qt.EditRole):
if (index.column() == 0):
return item.label.replace('_', ' ');
if (index.column() == 1):
return item.value
if(role == QtCore.Qt.BackgroundRole):
if (item.isRoot()):
return QtGui.QApplication.palette("QTreeView").brush(QtGui.QPalette.Normal, QtGui.QPalette.Button).color();
return None
def getItem(self, index):
if index.isValid():
item = index.internalPointer()
if item:
return item
return self.rootItem
def insertRows(self, position, rows, parent=QtCore.QModelIndex()):
parentItem = self.getItem(parent)
self.beginInsertRows(parent, position, position + rows - 1)
for row in range(rows):
success = parentItem.insertChild(position+row) != None
self.endInsertRows()
return success
def removeRows(self, position, rows, parent=QtCore.QModelIndex()):
parentItem = self.getItem(parent)
self.beginRemoveRows(parent, position, position + rows - 1)
success = parentItem.removeChildren(position, rows)
self.endRemoveRows()
return success
# edit methods
def setData(self, index, value, role = QtCore.Qt.EditRole):
if (index.isValid() and role == Qt.EditRole):
item = index.internalPointer()
item.setValue(value)
self.dataChanged.emit(index, index)
return True;
return False
def import_module_from_file(self, full_path_to_module):
"""
Import a module given the full path/filename of the .py file
Python 3.4
"""
module = None
# Get module name and path from full path
module_dir, module_file = os.path.split(full_path_to_module)
module_name, module_ext = os.path.splitext(module_file)
if(sys.version_info >= (3,4)):
import importlib
# Get module "spec" from filename
spec = importlib.util.spec_from_file_location(module_name,full_path_to_module)
module = spec.loader.load_module()
else:
import imp
module = imp.load_source(module_name,full_path_to_module)
return module
def getModuleFuncList(self, module_name):
import inspect
func_list = []
if(module_name != ''):
try:
module_name = os.getcwd() + '\\' + module_name
module = self.import_module_from_file(module_name)
all_functions = inspect.getmembers(module, inspect.isfunction)
for function in all_functions:
func_list.append(function[0])
except:
pass
return func_list
def getModuleName(self, editor):
module_name = QFileDialog.getOpenFileName(None, 'Open File', '.', "All file(*.*);;Python (*.py)")
module_name = os.path.relpath(module_name, os.getcwd())
if (module_name == ''): return
prop_root = self.getPropItem('properties')
module_name_prop= self.getPropItem('module_name', prop_root)
module_name_prop.setValue(module_name)
module_name_index = self.getIndexForNode(module_name_prop)
self.dataChanged.emit(module_name_index, module_name_index)
function_name_prop= self.getPropItem('function_name', prop_root)
function_name_prop.editor_type = Property.COMBO_BOX_EDITOR
function_name_prop.editor_data = self.getModuleFuncList(module_name)
function_name_index = self.getIndexForNode(function_name_prop)
self.dataChanged.emit(function_name_index, function_name_index)
| go2net/PythonBlocks | components/propertyeditor/QPropertyModel.py | Python | mit | 7,747 |
# -*- coding: utf-8 -*-
"""
Sitemap builder
"""
import json, os
from treelib import Tree
from optimus.conf import settings
class SitemapError(Exception):
pass
class PageSitemap(object):
"""
Construct ressource page to build and published sitemap
"""
def __init__(self, tree, view, with_root=False):
self.tree = json.loads(tree.to_json(with_data=True))
self.view = view
self.with_root = with_root # For public sitemap
# Public sitemap
self.sitemap = self.get_public_sitemap(self.tree)
# Store a flat list of every ressources to build as pages
self.ressources = self.recursive_ressources([self.tree])
def get_public_sitemap(self, tree):
"""
Return a list of sitemap nodes
If 'PageSitemap.with_root' is False, return only root children nodes,
else return the full dict containing root node.
"""
if not self.with_root:
return tree['root']['children']
return [tree]
def recursive_ressources(self, children, pages=[]):
"""
Return a flat ressources list from given children
"""
for branch in children:
for leaf_name, leaf_content in branch.items():
datas = leaf_content['data']
pages.append(self.view(
title=leaf_name,
template_name=datas['link'],
destination=datas['link'],
sitemap=self.sitemap,
))
if datas['is_dir']:
pages = self.recursive_ressources(leaf_content['children'])
return pages
def tree_from_directory_structure(scanned_path, base_path=None):
"""
Scan given "scanned_path" path to find every HTML page file to build sitemap.
Assume you want to use templates file names as ressource filename url.
* Filenames and directory starting with "_" are ignored;
* Expect an "index.html" file in each directory (except ignored ones) which
will take the directory name;
Return a treelib.Tree of finded pages
"""
tree = Tree()
tree.create_node("root", "root", data={
'id': "root",
'link': 'index.html',
'is_dir': True,
})
if base_path is None:
base_path = scanned_path
for root, dirs, files in os.walk(scanned_path):
# Current relative dir from demos dir
relative_dir = os.path.relpath(root, base_path)
if not relative_dir.startswith('_'):
if relative_dir == '.':
parent = None
current_dir = "root"
dir_name = "Root"
else:
dir_name = os.path.basename(relative_dir)
current_dir = relative_dir
# Resolve parent tag
parent = "/".join(os.path.split(relative_dir)[:-1])
if not parent:
parent = "root"
# Add directory node
tree.create_node(dir_name.replace('_', ' '), current_dir, parent=parent, data={
'id': current_dir,
'link': os.path.join(relative_dir, 'index.html'),
'is_dir': True,
})
#print "dir_name:{dir_name} | current_dir:{current_dir} | relative_dir:{relative_dir} | parent:{parent}".format(
#dir_name=dir_name, current_dir=current_dir, relative_dir=relative_dir, parent=parent)
# Recursive find templates in dirs
for item in files:
if not item.startswith('_') and item != 'index.html':
# Get filepath relative to root, remove leading './'
filepath = os.path.join(relative_dir, item)
if filepath.startswith('./'):
filepath = filepath[2:]
# Build unique tag identifier
tag = filepath
#print " * file:{filename} | tag:{tag} | parent:{parent}".format(filename=item, tag=tag, parent=current_dir)
# Make title
head, tail = os.path.splitext(item)
title = head.replace('_', ' ')
# Add file node to current directory node
tree.create_node(title, tag, parent=current_dir, data={
'id': tag,
'link': filepath,
'is_dir': False,
})
#print
return tree
| sveetch/Sveetoy | project/sitemap.py | Python | mit | 4,546 |
import rcblog
if __name__ == '__main__':
rcblog.main()
| sanchopanca/rcblog | run.py | Python | mit | 60 |
# -*- coding: utf-8 -*-
# @Time : 2017/7/27 18:04
# @Author : play4fun
# @File : calibrateCamera2.py
# @Software: PyCharm
"""
calibrateCamera2.py:
"""
import cv2
import numpy as np
def draw_axis(img, charuco_corners, charuco_ids, board):
vecs = np.load("./calib.npz") # I already calibrated the camera
mtx, dist, _, _ = [vecs[i] for i in ('mtx', 'dist', 'rvecs', 'tvecs')]
ret, rvec, tvec = cv2.aruco.estimatePoseCharucoBoard(
charuco_corners, charuco_ids, board, mtx, dist)
if ret is not None and ret is True:
cv2.aruco.drawAxis(img, mtx, dist, rvec, tvec, 0.1)
def get_image(camera):
ret, img = camera.read()
return img
def make_grayscale(img):
ret = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return ret
def main():
camera = cv2.VideoCapture(0)
img = get_image(camera)
while True:
cv2.imshow('calibration', img)
cv2.waitKey(10)
img = get_image(camera)
gray = make_grayscale(img)
corners, ids, rejected = cv2.aruco.detectMarkers(gray, aruco_dict,
corners, ids)
cv2.aruco.drawDetectedMarkers(img, corners, ids)
if ids is not None and corners is not None \
and len(ids) > 0 and len(ids) == len(corners):
diamond_corners, diamond_ids = \
cv2.aruco.detectCharucoDiamond(img, corners, ids,
0.05 / 0.03, cameraMatrix=mtx,
distCoeffs=dist)
cv2.aruco.drawDetectedDiamonds(img, diamond_corners, diamond_ids)
'''if diamond_ids is not None and len(diamond_ids) >= 4:
break'''
board = cv2.aruco.CharucoBoard_create(9, 6, 0.05, 0.03,
aruco_dict)
if diamond_corners is not None and diamond_ids is not None \
and len(diamond_corners) == len(diamond_ids):
count, char_corners, char_ids = \
cv2.aruco.interpolateCornersCharuco(diamond_corners,
diamond_ids, gray,
board)
if count >= 3:
draw_axis(img, char_corners, char_ids, board)
if __name__ == '__main__':
main()
| makelove/OpenCV-Python-Tutorial | ch200_Extra_modules/aruco/Camera Calibration using ChArUco and Python/calibrateCamera2.py | Python | mit | 2,398 |
def hamming_distance(bytes1, bytes2):
distance = 0
for b1, b2 in zip(bytes1, bytes2):
xored = b1^b2
distance += sum(1 for n in range(8) if (xored >> n) & 0x01)
return distance
| nfschmidt/cryptopals | python/byte.py | Python | mit | 206 |
"""
Module containing MPG Ranch NFC coarse classifier, version 3.1.
An NFC coarse classifier classifies an unclassified clip as a `'Call'`
if it appears to be a nocturnal flight call, or as a `'Noise'` otherwise.
It does not classify a clip that has already been classified, whether
manually or automatically.
This classifier uses the same model as version 3.0, which was developed
for TensorFlow 1, but updated for TensorFlow 2.
The two classifiers were compared by running both on 16429 clips created
by the Old Bird Thrush Detector Redux 1.1 and 22505 clips created by the
Old Bird Tseep Detector Redux 1.1 from 17 nights of recordings made in
Ithaca, NY from 2021-04-03 through 2021-04-19. The older classifier ran
with TensorFlow 1.15.5 and the newer one with TensorFlow 2.5.0rc1. The
two classifiers labeled the clips exactly the same. 1711 thrush clips
were labeled as calls and the others as noises, and 2636 tseep clips
were labeled as calls and the others as noises.
"""
from collections import defaultdict
import logging
import numpy as np
import resampy
import tensorflow as tf
from vesper.command.annotator import Annotator
from vesper.django.app.models import AnnotationInfo
from vesper.singleton.clip_manager import clip_manager
from vesper.util.settings import Settings
import vesper.django.app.model_utils as model_utils
import vesper.mpg_ranch.nfc_coarse_classifier_3_1.classifier_utils as \
classifier_utils
import vesper.mpg_ranch.nfc_coarse_classifier_3_1.dataset_utils as \
dataset_utils
import vesper.util.open_mp_utils as open_mp_utils
import vesper.util.signal_utils as signal_utils
import vesper.util.yaml_utils as yaml_utils
_EVALUATION_MODE_ENABLED = False
'''
This classifier can run in one of two modes, *normal mode* and
*evaluation mode*. In normal mode, it annotates only unclassified clips,
assigning to each a "Classification" annotation value or either "Call"
or "Noise".
In evaluation mode, the classifier classifies every clip whose clip type
(e.g. "Tseep" or "Thrush") it recognizes and that already has a
classification that is "Noise" or starts with "Call" or "XCall".
The new classification is a function of both the existing classification
and the *normal classification* that the classifier would assign to the
clip in normal mode if it had no existing classification. The new
classifications are as follows (where the classification pairs are
(existing classification, normal classification)):
(Noise, Noise) -> Noise (i.e. no change)
(Noise, Call) -> FP
(Call*, Call) -> Call* (i.e. no change)
(Call*, Noise) -> FN* (i.e. only coarse part changes)
(XCall*, Call) -> XCallP* (i.e. only coarse part changes)
(XCall*, Noise) -> XCallN* (i.e. only coarse part changes)
This reclassifies clips for which the normal classification differs from
the existing classification in such a way that important sets of clips
(i.e. false positives, false negatives, excluded call positives, and
excluded call negatives) can subsequently be viewed in clip albums.
'''
class Classifier(Annotator):
extension_name = 'MPG Ranch NFC Coarse Classifier 3.1'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
open_mp_utils.work_around_multiple_copies_issue()
# Suppress TensorFlow INFO and DEBUG log messages.
logging.getLogger('tensorflow').setLevel(logging.WARN)
self._classifiers = dict(
(t, _Classifier(t)) for t in ('Tseep', 'Thrush'))
if _EVALUATION_MODE_ENABLED:
self._score_annotation_info = \
AnnotationInfo.objects.get(name='Score')
def annotate_clips(self, clips):
"""Annotates the specified clips with the appropriate classifiers."""
clip_lists = self._get_clip_lists(clips)
num_clips_classified = 0
for clip_type, clips in clip_lists.items():
classifier = self._classifiers.get(clip_type)
if classifier is not None:
# have classifier for this clip type
num_clips_classified += self._annotate_clips(clips, classifier)
return num_clips_classified
def _get_clip_lists(self, clips):
"""Gets a mapping from clip types to lists of clips to classify."""
clip_lists = defaultdict(list)
for clip in clips:
if _EVALUATION_MODE_ENABLED or \
self._get_annotation_value(clip) is None:
# clip should be classified
clip_type = model_utils.get_clip_type(clip)
clip_lists[clip_type].append(clip)
return clip_lists
def _annotate_clips(self, clips, classifier):
"""Annotates the specified clips with the specified classifier."""
num_clips_classified = 0
triples = classifier.classify_clips(clips)
# if _EVALUATION_MODE_ENABLED and len(triples) > 0:
# self._show_classification_errors(triples)
for clip, auto_classification, score in triples:
if auto_classification is not None:
if _EVALUATION_MODE_ENABLED:
old_classification = self._get_annotation_value(clip)
new_classification = self._get_new_classification(
old_classification, auto_classification)
if new_classification is not None:
self._annotate(clip, new_classification)
num_clips_classified += 1
self._set_clip_score(clip, score)
else:
# normal mode
self._annotate(clip, auto_classification)
num_clips_classified += 1
return num_clips_classified
def _get_new_classification(self, old_classification, auto_classification):
old = old_classification
auto = auto_classification
if old is None:
return None
elif old.startswith('Call') and auto == 'Noise':
return 'FN' + old[len('Call'):]
elif old == 'Noise' and auto == 'Call':
return 'FP'
elif old.startswith('XCall') and auto == 'Noise':
return 'XCallN' + old_classification[len('XCall'):]
elif old.startswith('XCall') and auto == 'Call':
return 'XCallP' + old_classification[len('XCall'):]
else:
return None
def _set_clip_score(self, clip, score):
value = '{:.3f}'.format(score)
model_utils.annotate_clip(
clip, self._score_annotation_info, value,
creating_user=self._creating_user,
creating_job=self._creating_job,
creating_processor=self._creating_processor)
def _show_classification_errors(self, triples):
num_positives = 0
num_negatives = 0
false_positives = []
false_negatives = []
for i, (clip, new_classification, score) in enumerate(triples):
old_classification = self._get_annotation_value(clip)
if old_classification.startswith('Call'):
num_positives += 1
if new_classification == 'Noise':
false_negatives.append(
(i, old_classification, new_classification, score))
else:
# old classification does not start with 'Call'
num_negatives += 1
if new_classification == 'Call':
false_positives.append(
(i, old_classification, new_classification, score))
num_clips = len(triples)
logging.info('Classified {} clips.'.format(num_clips))
self._show_classification_errors_aux(
'calls', false_negatives, num_positives)
self._show_classification_errors_aux(
'non-calls', false_positives, num_negatives)
num_errors = len(false_positives) + len(false_negatives)
accuracy = 100 * (1 - num_errors / num_clips)
logging.info(
'The overall accuracy was {:.1f} percent.'.format(accuracy))
def _show_classification_errors_aux(self, category, errors, num_clips):
num_errors = len(errors)
percent = 100 * num_errors / num_clips
logging.info((
'{} of {} {} ({:.1f} percent) where incorrectly '
'classified:').format(num_errors, num_clips, category, percent))
for i, old_classification, new_classification, score in errors:
logging.info(
' {} {} -> {} {}'.format(
i, old_classification, new_classification, score))
class _Classifier:
def __init__(self, clip_type):
self.clip_type = clip_type
self._model = self._load_model()
self._settings = self._load_settings()
# Configure waveform slicing.
s = self._settings
fs = s.waveform_sample_rate
s2f = signal_utils.seconds_to_frames
self._waveform_start_time = \
s.waveform_start_time + s.inference_waveform_start_time_offset
self._waveform_duration = s.waveform_duration
self._waveform_length = s2f(self._waveform_duration, fs)
self._classification_threshold = \
self._settings.classification_threshold
def _load_model(self):
path = classifier_utils.get_keras_model_file_path(self.clip_type)
logging.info(f'Loading classifier model from "{path}"...')
return tf.keras.models.load_model(path)
def _load_settings(self):
path = classifier_utils.get_settings_file_path(self.clip_type)
logging.info(f'Loading classifier settings from "{path}"...')
text = path.read_text()
d = yaml_utils.load(text)
return Settings.create_from_dict(d)
def classify_clips(self, clips):
# logging.info('Collecting clip waveforms for scoring...')
waveforms, indices = self._slice_clip_waveforms(clips)
if len(waveforms) == 0:
return []
else:
# have at least one waveform slice to classify
# Stack waveform slices to make 2-D NumPy array.
self._waveforms = np.stack(waveforms)
# logging.info('Scoring clip waveforms...')
dataset = \
dataset_utils.create_spectrogram_dataset_from_waveforms_array(
self._waveforms, dataset_utils.DATASET_MODE_INFERENCE,
self._settings, batch_size=64,
feature_name=self._settings.model_input_name)
scores = self._model.predict(dataset).flatten()
# logging.info('Classifying clips...')
triples = [
self._classify_clip(i, score, clips)
for i, score in zip(indices, scores)]
return triples
def _slice_clip_waveforms(self, clips):
waveforms = []
indices = []
for i, clip in enumerate(clips):
try:
waveform = self._get_clip_samples(clip)
except Exception as e:
logging.warning((
'Could not classify clip "{}", since its '
'samples could not be obtained. Error message was: '
'{}').format(str(clip), str(e)))
else:
# got clip samples
waveforms.append(waveform)
indices.append(i)
return waveforms, indices
def _get_clip_samples(self, clip):
clip_sample_rate = clip.sample_rate
classifier_sample_rate = self._settings.waveform_sample_rate
s2f = signal_utils.seconds_to_frames
start_offset = s2f(self._waveform_start_time, clip_sample_rate)
if clip_sample_rate != classifier_sample_rate:
# need to resample
# Get clip samples, including a millisecond of padding at
# the end. I don't know what if any guarantees the
# `resampy.resample` function offers about the relationship
# between its input and output lengths, so we add the padding
# to try to ensure that we don't wind up with too few samples
# after resampling.
length = s2f(self._waveform_duration + .001, clip_sample_rate)
samples = clip_manager.get_samples(
clip, start_offset=start_offset, length=length)
# Resample clip samples to classifier sample rate.
samples = resampy.resample(
samples, clip_sample_rate, classifier_sample_rate)
# Discard any extra trailing samples we wound up with.
samples = samples[:self._waveform_length]
if len(samples) < self._waveform_length:
raise ValueError('Resampling produced too few samples.')
else:
# don't need to resample
samples = clip_manager.get_samples(
clip, start_offset=start_offset, length=self._waveform_length)
return samples
def _classify_clip(self, index, score, clips):
if score >= self._classification_threshold:
classification = 'Call'
else:
classification = 'Noise'
return clips[index], classification, score
| HaroldMills/Vesper | vesper/mpg_ranch/nfc_coarse_classifier_3_1/classifier.py | Python | mit | 14,590 |
from setuptools import setup, find_packages
setup(
name="simple-crawler",
version="0.1",
url="https://github.com/shonenada/crawler",
author="shonenada",
author_email="[email protected]",
description="Simple crawler",
zip_safe=True,
platforms="any",
packages=find_packages(),
install_requires=["requests==2.2.1"],
)
| shonenada/crawler | setup.py | Python | mit | 359 |
import unittest
import numpy as np
import theano
import theano.tensor as T
from tests.helpers import (SimpleTrainer, SimpleClf, SimpleTransformer,
simple_reg)
from theano_wrapper.layers import (BaseLayer, HiddenLayer, MultiLayerBase,
BaseEstimator, BaseTransformer,
LinearRegression, LogisticRegression,
MultiLayerPerceptron, MultiLayerRegression,
TiedAutoEncoder, AutoEncoder)
# BASE LAYERS ################################################################
class TestBaseLayer(unittest.TestCase):
""" Tests for layer.py module, which includes various types of layers
for theano-wrapper
"""
def test_base_layer_has_params(self):
base = BaseLayer(100, 10)
self.assertTrue(hasattr(base, 'params'),
msg="Class has no attribute 'parameters'")
def test_base_layer_params_not_empty(self):
base = BaseLayer(100, 10)
self.assertTrue(base.params, msg="Class 'parameters' are empty")
def test_base_layer_no_args(self):
# Test if BaseLayer initializes as expected when given no
# extra arguements
try:
BaseLayer(100, 10)
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
def test_base_layer_params_are_theano_shared_variables(self):
base = BaseLayer(100, 10)
for p in base.params:
self.assertIsInstance(p, theano.compile.SharedVariable)
def test_base_layer_has_input(self):
base = BaseLayer(100, 10)
self.assertTrue(hasattr(base, 'X'))
def test_base_layer_input_is_theano_variable(self):
base = BaseLayer(100, 10)
self.assertIsInstance(base.X, theano.tensor.TensorVariable)
def test_base_layer_weights_shape(self):
base = BaseLayer(100, 10)
self.assertEqual(base.W.get_value().shape, (100, 10))
def test_base_layer_bias_shape(self):
base = BaseLayer(100, 10)
self.assertEqual(base.b.get_value().shape, (10,))
def test_base_layer_weights_shape_single_output(self):
base = BaseLayer(100, 1)
self.assertEqual(base.W.get_value().shape, (100,))
def test_base_layer_bias_shape_single_output(self):
base = BaseLayer(100, 1)
self.assertEqual(base.b.get_value().shape, ())
def test_base_layer_no_output(self):
base = BaseLayer(100, 10)
self.assertFalse(hasattr(base, 'y'))
def test_base_layer_int_output(self):
base = BaseLayer(100, 10, y='int')
self.assertTrue(hasattr(base, 'y'))
self.assertTrue(hasattr(base.y, 'dtype'))
self.assertEqual(base.y.dtype, 'int32')
def test_base_layer_float_output(self):
base = BaseLayer(100, 10, y='float')
self.assertTrue(hasattr(base, 'y'))
self.assertTrue(hasattr(base.y, 'dtype'))
self.assertEqual(base.y.dtype, 'float32')
def test_base_layer_custom_weights(self):
try:
BaseLayer(100, 10, weights=np.random.random_sample((100, 10)))
except TypeError:
self.fail("Class did not accept 'weights' arg")
class TestHiddenLayer(unittest.TestCase):
""" Tests for HiddenLayer class.
This class is used only by other classes, so mostly basic stuff here.
"""
def test_hidden_layer_has_params(self):
base = HiddenLayer(100, 10)
self.assertTrue(hasattr(base, 'params'),
msg="Class has no attribute 'parameters'")
def test_hidden_layer_params_not_empty(self):
base = HiddenLayer(100, 10)
self.assertTrue(base.params, msg="Class 'parameters' are empty")
def test_hidden_layer_no_args(self):
# Test if HiddenLayer initializes as expected when given no
# extra arguements
try:
HiddenLayer(100, 10)
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
def test_hidden_layer_params_are_theano_shared_variables(self):
base = HiddenLayer(100, 10)
for p in base.params:
self.assertIsInstance(p, theano.compile.SharedVariable)
def test_hidden_layer_has_input(self):
base = HiddenLayer(100, 10)
self.assertTrue(hasattr(base, 'X'))
def test_hidden_layer_input_is_theano_variable(self):
base = HiddenLayer(100, 10)
self.assertIsInstance(base.X, theano.tensor.TensorVariable)
def test_hidden_layer_weights_shape(self):
base = HiddenLayer(100, 10)
self.assertEqual(base.W.get_value().shape, (100, 10))
def test_hidden_layer_bias_shape(self):
base = HiddenLayer(100, 10)
self.assertEqual(base.b.get_value().shape, (10,))
def test_hidden_layer_weights_shape_single_output(self):
base = HiddenLayer(100, 1)
self.assertEqual(base.W.get_value().shape, (100,))
def test_hidden_layer_bias_shape_single_output(self):
base = HiddenLayer(100, 1)
self.assertEqual(base.b.get_value().shape, ())
def test_hidden_layer_no_output(self):
base = HiddenLayer(100, 10)
self.assertFalse(hasattr(base, 'y'))
def test_hidden_layer_int_output(self):
base = HiddenLayer(100, 10, y='int')
self.assertTrue(hasattr(base, 'y'))
self.assertTrue(hasattr(base.y, 'dtype'))
self.assertEqual(base.y.dtype, 'int32')
def test_hidden_layer_float_output(self):
base = HiddenLayer(100, 10, y='float')
self.assertTrue(hasattr(base, 'y'))
self.assertTrue(hasattr(base.y, 'dtype'))
self.assertEqual(base.y.dtype, 'float32')
class TestMultiLayerBase(unittest.TestCase):
""" Tests for MultiLayerBase class """
def test_multi_layer_base_has_params(self):
base = MultiLayerBase(100, 50, 10, SimpleClf)
self.assertTrue(hasattr(base, 'params'),
msg="Class has no attribute 'parameters'")
def test_multi_layer_base_params_not_empty(self):
base = MultiLayerBase(100, 50, 10, SimpleClf)
self.assertTrue(base.params, msg="Class 'parameters' are empty")
def test_multi_layer_base_no_args(self):
# Test if MultiLayerBase initializes as expected when given no
# extra arguements
try:
MultiLayerBase(100, 50, 10, SimpleClf)
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
def test_multi_layer_base_single_layer(self):
# Test if MultiLayerBase initializes as expected when given no
# extra arguements
try:
MultiLayerBase(100, 50, 10, SimpleClf)
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
def test_multi_layer_base_multi_layer_single_activation(self):
# Test if MultiLayerBase initializes as expected when given no
# extra arguements
try:
MultiLayerBase(100, [100, 30, 50], 10, SimpleClf, lambda x: x)
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
def test_multi_layer_base_multi_layer_multi_activation(self):
# Test if MultiLayerBase initializes as expected when given no
# extra arguements
try:
MultiLayerBase(100, [100, 30, 50], 10, SimpleClf,
[lambda x: x for i in range(3)])
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
class BaseEstimatorTransformerTests:
def test_has_trainers(self):
clf = self.Clf()
for t in ['epoch', 'sgd']:
self.assertIn(t, clf.trainer_aliases)
def test_builtin_sgd_trainer(self):
clf = self.Clf()
try:
clf.fit(*self.fit_args, 'sgd', max_iter=1)
except Exception as e:
self.fail("Fitting failed: %s" % str(e))
def test_builtin_sgd_trainer_all_args(self):
clf = self.Clf()
try:
clf.fit(*self.fit_args, 'sgd', alpha=0.1, batch_size=20,
max_iter=1, patience=100, p_inc=3, imp_thresh=0.9,
random=10, verbose=1000)
except Exception as e:
self.fail("Fitting failed: %s" % str(e))
def test_builtin_trainer_regularizer(self):
clf = self.Clf()
reg = simple_reg(clf)
try:
clf.fit(*self.fit_args, reg=reg, max_iter=2)
except Exception as e:
self.fail("Fitting failed: %s" % str(e))
class TestBaseEstimator(unittest.TestCase, BaseEstimatorTransformerTests):
TheBase = BaseEstimator
TheClf = SimpleClf
X = np.random.standard_normal((500, 100)).astype(np.float32)
y = np.random.randint(0, 9, (500,)).astype(np.int32)
fit_args = (X, y,)
def setUp(self):
class Clf(self.TheClf, self.TheBase):
def __init__(*args, **kwargs):
SimpleClf.__init__(*args, **kwargs)
self.Clf = Clf
class TestBaseTransformer(unittest.TestCase, BaseEstimatorTransformerTests):
TheBase = BaseTransformer
TheClf = SimpleTransformer
X = np.random.standard_normal((500, 100)).astype(np.float32)
fit_args = (X,)
def setUp(self):
class Clf(self.TheClf, self.TheBase):
def __init__(*args, **kwargs):
self.TheClf.__init__(*args, **kwargs)
self.Clf = Clf
# ESTIMATORS #################################################################
class EstimatorTests:
X = np.random.standard_normal((500, 100)).astype(np.float32)
def test_estimator_has_params(self):
clf = self.estimator(*self.args)
self.assertTrue(hasattr(clf, 'params'))
self.assertIsNotNone(clf.params)
def test_estimator_has_output(self):
clf = self.estimator(*self.args)
self.assertIsInstance(clf.output, theano.tensor.TensorVariable)
def test_estimator_has_cost(self):
clf = self.estimator(*self.args)
self.assertIsInstance(clf.cost, theano.tensor.TensorVariable)
def test_estimator_fit(self):
trn = SimpleTrainer(self.estimator(*self.args))
try:
trn.fit(self.X, self.y)
except Exception as e:
self.fail("Training failed: %s" % str(e))
def test_estimator_with_regularization(self):
clf = self.estimator(*self.args)
reg = simple_reg(clf)
try:
trn = SimpleTrainer(clf, reg)
trn.fit(self.X, self.y)
except Exception as e:
self.fail("Estimator failed: %s" % str(e))
def test_estimator_builtin_fit(self):
clf = self.estimator(*self.args)
try:
clf.fit(self.X, self.y, max_iter=1)
except Exception as e:
self.fail("Estimator failed: %s" % str(e))
def test_estimator_builtin_predict(self):
clf = self.estimator(*self.args)
clf.fit(self.X, self.y, max_iter=1)
pred = clf.predict(self.X)
self.assertEqual(pred.shape, (self.X.shape[0],))
class MultiLayerEstimatorMixin:
def test_estimator_fit_three_hidden_single_activation(self):
args = list(self.args)
# set n_hidden arg to an array of n_nodes for three layers
args[1] = [args[0], int(args[0]/2), int(args[0]/3)]
trn = SimpleTrainer(self.estimator(*args))
try:
trn.fit(self.X, self.y)
except Exception as e:
self.fail("Training failed: %s" % str(e))
def test_estimator_random_arguement_int_seed(self):
# The estimator should accept a random arguement for initialization
# of weights. Here we test an integer seed.
trn = SimpleTrainer(self.estimator(*self.args, random=42))
try:
trn.fit(self.X, self.y)
except Exception as e:
self.fail("Training failed: %s" % str(e))
def test_estimator_random_arguement_rng(self):
# The estimator should accept a random arguement for initialization
# of weights. Here we test a random state generator
trn = SimpleTrainer(self.estimator(*self.args,
random=np.random.RandomState(42)))
try:
trn.fit(self.X, self.y)
except Exception as e:
self.fail("Training failed: %s" % str(e))
class ClassificationTest(EstimatorTests):
y = np.random.randint(0, 9, (500,)).astype(np.int32)
class RegressionTest(EstimatorTests):
y = np.random.random((500,)).astype(np.float32)
def test_estimator_fit_multivariate(self):
args = list(self.args)
args[-1] = 5
y = np.random.random((500, 5)).astype(np.float32)
trn = SimpleTrainer(self.estimator(*args))
try:
trn.fit(self.X, y)
except Exception as e:
self.fail("Training failed: %s" % str(e))
class TestLinearRegression(unittest.TestCase, RegressionTest):
estimator = LinearRegression
args = (100, 1)
class TestLogisticRegression(unittest.TestCase, ClassificationTest):
estimator = LogisticRegression
args = (100, 10)
class TestMultiLayerPerceptron(unittest.TestCase,
ClassificationTest, MultiLayerEstimatorMixin):
estimator = MultiLayerPerceptron
args = (100, 100, 10)
class TestMultiLayerRegression(unittest.TestCase,
RegressionTest, MultiLayerEstimatorMixin):
estimator = MultiLayerRegression
args = (100, 100, 1)
# TRANSFORMERS ###############################################################
class TransformerTests:
X = np.random.standard_normal((500, 100)).astype(np.float32)
def test_transformer_has_params(self):
clf = self.transformer(*self.args)
self.assertTrue(hasattr(clf, 'params'))
self.assertIsNotNone(clf.params)
def test_transformer_has_encode(self):
clf = self.transformer(*self.args)
self.assertIsInstance(clf.encode, theano.tensor.TensorVariable)
def test_transformer_has_cost(self):
clf = self.transformer(*self.args)
self.assertIsInstance(clf.cost, theano.tensor.TensorVariable)
def test_transformer_fit(self):
trn = SimpleTrainer(self.transformer(*self.args))
try:
trn.fit(self.X)
except Exception as e:
self.fail("Training failed: %s" % str(e))
def test_transformer_with_regularization(self):
clf = self.transformer(*self.args)
reg = simple_reg(clf)
try:
trn = SimpleTrainer(clf, reg)
trn.fit(self.X)
except Exception as e:
self.fail("Estimator failed: %s" % str(e))
def test_transfomer_float_n_hidden(self):
args = list(self.args)
args[-1] = 0.5
trn = SimpleTrainer(self.transformer(*args))
try:
trn.fit(self.X)
except Exception as e:
self.fail("Training failed: %s" % str(e))
def test_transformer_builtin_fit(self):
clf = self.transformer(*self.args)
try:
clf.fit(self.X, max_iter=1)
except Exception as e:
self.fail("Estimator failed: %s" % str(e))
def test_transformer_builtin_predict(self):
clf = self.transformer(*self.args)
clf.fit(self.X, max_iter=1)
pred = clf.predict(self.X)
self.assertEqual(pred.shape, (self.X.shape))
def test_transformer_builtin_transform(self):
clf = self.transformer(*self.args)
clf.fit(self.X, max_iter=1)
pred = clf.transform(self.X)
self.assertEqual(pred.shape, (self.X.shape[0], self.args[-1]))
class MultiLayerTransformerMixin:
def test_transformer_fit_three_hidden_single_activation(self):
args = list(self.args)
# set n_hidden arg to an array of n_nodes for three layers
args[1] = [args[0], int(args[0]/2), int(args[0]/3)]
trn = SimpleTrainer(self.transformer(*args))
try:
trn.fit(self.X)
except Exception as e:
self.fail("Training failed: %s" % str(e))
def test_transformer_fit_three_hidden_all_activations(self):
args = list(self.args)
# set n_hidden arg to an array of n_nodes for three layers
args[1] = [args[0], int(args[0]/2), int(args[0]/3)]
activation = [T.nnet.sigmoid, T.nnet.softplus, T.nnet.softmax,
T.nnet.sigmoid]
trn = SimpleTrainer(self.transformer(*args, activation))
try:
trn.fit(self.X)
except Exception as e:
self.fail("Training failed: %s" % str(e))
def test_transformer_random_arguement_int_seed(self):
# The transformer should accept a random arguement for initialization
# of weights. Here we test an integer seed.
trn = SimpleTrainer(self.transformer(*self.args, random=42))
try:
trn.fit(self.X)
except Exception as e:
self.fail("Training failed: %s" % str(e))
def test_transformer_random_arguement_rng(self):
# The transformer should accept a random arguement for initialization
# of weights. Here we test a random state generator
trn = SimpleTrainer(self.transformer(*self.args,
random=np.random.RandomState(42)))
try:
trn.fit(self.X)
except Exception as e:
self.fail("Training failed: %s" % str(e))
class TestTiedAutoEncoder(unittest.TestCase, TransformerTests):
transformer = TiedAutoEncoder
args = (100, 50)
class TestAutoEncoder(unittest.TestCase, TransformerTests,
MultiLayerTransformerMixin):
transformer = AutoEncoder
args = (100, 50)
def test_cost_cross_entropy(self):
try:
trn = SimpleTrainer(self.transformer(*self.args,
cost='cross_entropy'))
trn.fit(self.X)
except Exception as e:
self.fail("Training failed: %s" % str(e))
def test_denoising_mode(self):
try:
trn = SimpleTrainer(self.transformer(*self.args,
corrupt=0.1))
trn.fit(self.X)
except Exception as e:
self.fail("Training failed: %s" % str(e))
| sotlampr/theano-wrapper | tests/test_layers.py | Python | mit | 18,340 |
# Past examples are programmatically insecure
# You require arguments to be passed in but what if the wrong arguments are provided?
# Look at the timestable solution which changes numbers to text - what happens if you provide the number 30?
#
# One way of controlling these things uses conditions
# These enable specific operations to be carried out "if" something is the case or "else" something else is the case
a = 5
# first condition trial
if a >= 5:
print("Value is greater than 5")
else:
print("Value is less than 5")
# second condition trial
if a >= 5:
print("Value is greater than 5")
elif a < 5:
print("Value is less than 5")
else:
print("Value is 5")
# if and (2 conditions)
a=3
b=5
if (a==3) and (b==5):
print("a and b are as expected - great :)")
else:
print("a and b not as expected - not great :(") | Chris35Wills/Chris35Wills.github.io | courses/examples/Beginners_python/conditions.py | Python | mit | 826 |
import py
from rpython.annotator import model as annmodel
from rpython.rtyper.llannotation import SomePtr, lltype_to_annotation
from rpython.conftest import option
from rpython.rtyper.annlowlevel import (annotate_lowlevel_helper,
MixLevelHelperAnnotator, PseudoHighLevelCallable, llhelper,
cast_instance_to_base_ptr, cast_base_ptr_to_instance)
from rpython.rtyper.llinterp import LLInterpreter
from rpython.rtyper.lltypesystem.lltype import *
from rpython.rtyper.rclass import fishllattr, OBJECTPTR
from rpython.rtyper.test.test_llinterp import interpret
from rpython.translator.translator import TranslationContext
# helpers
def annotated_calls(ann, ops=('simple_call,')):
for block in ann.annotated:
for op in block.operations:
if op.opname in ops:
yield op
def derived(op, orig):
if op.args[0].value.__name__.startswith(orig):
return op.args[0].value
else:
return None
class TestLowLevelAnnotateTestCase:
from rpython.annotator.annrpython import RPythonAnnotator
def annotate(self, ll_function, argtypes):
self.a = self.RPythonAnnotator()
graph = annotate_lowlevel_helper(self.a, ll_function, argtypes)
if option.view:
self.a.translator.view()
return self.a.binding(graph.getreturnvar())
def test_simple(self):
S = GcStruct("s", ('v', Signed))
def llf():
s = malloc(S)
return s.v
s = self.annotate(llf, [])
assert s.knowntype == int
def test_simple2(self):
S = Struct("s", ('v', Signed))
S2 = GcStruct("s2", ('a', S), ('b', S))
def llf():
s = malloc(S2)
return s.a.v+s.b.v
s = self.annotate(llf, [])
assert s.knowntype == int
def test_array(self):
A = GcArray(('v', Signed))
def llf():
a = malloc(A, 1)
return a[0].v
s = self.annotate(llf, [])
assert s.knowntype == int
def test_array_longlong(self):
from rpython.rlib.rarithmetic import r_longlong
A = GcArray(('v', Signed))
one = r_longlong(1)
def llf():
a = malloc(A, one)
return a[0].v
s = self.annotate(llf, [])
assert s.knowntype == int
def test_prim_array(self):
A = GcArray(Signed)
def llf():
a = malloc(A, 1)
return a[0]
s = self.annotate(llf, [])
assert s.knowntype == int
def test_prim_array_setitem(self):
A = GcArray(Signed)
def llf():
a = malloc(A, 1)
a[0] = 3
return a[0]
s = self.annotate(llf, [])
assert s.knowntype == int
def test_cast_simple_widening(self):
S2 = Struct("s2", ('a', Signed))
S1 = Struct("s1", ('sub1', S2), ('sub2', S2))
PS1 = Ptr(S1)
PS2 = Ptr(S2)
def llf(p1):
p2 = p1.sub1
p3 = cast_pointer(PS1, p2)
return p3
s = self.annotate(llf, [SomePtr(PS1)])
assert isinstance(s, SomePtr)
assert s.ll_ptrtype == PS1
def test_cast_simple_widening_from_gc(self):
S2 = GcStruct("s2", ('a', Signed))
S1 = GcStruct("s1", ('sub1', S2), ('x', Signed))
PS1 = Ptr(S1)
def llf():
p1 = malloc(S1)
p2 = p1.sub1
p3 = cast_pointer(PS1, p2)
return p3
s = self.annotate(llf, [])
assert isinstance(s, SomePtr)
assert s.ll_ptrtype == PS1
def test_cast_pointer(self):
S3 = GcStruct("s3", ('a', Signed))
S2 = GcStruct("s3", ('sub', S3))
S1 = GcStruct("s1", ('sub', S2))
PS1 = Ptr(S1)
PS2 = Ptr(S2)
PS3 = Ptr(S3)
def llf():
p1 = malloc(S1)
p2 = p1.sub
p3 = p2.sub
p12 = cast_pointer(PS1, p2)
p13 = cast_pointer(PS1, p3)
p21 = cast_pointer(PS2, p1)
p23 = cast_pointer(PS2, p3)
p31 = cast_pointer(PS3, p1)
p32 = cast_pointer(PS3, p2)
return p12, p13, p21, p23, p31, p32
s = self.annotate(llf, [])
assert [x.ll_ptrtype for x in s.items] == [PS1, PS1, PS2, PS2, PS3, PS3]
def test_array_length(self):
A = GcArray(('v', Signed))
def llf():
a = malloc(A, 1)
return len(a)
s = self.annotate(llf, [])
assert s.knowntype == int
def test_funcptr(self):
F = FuncType((Signed,), Signed)
PF = Ptr(F)
def llf(p):
return p(0)
s = self.annotate(llf, [SomePtr(PF)])
assert s.knowntype == int
def test_ll_calling_ll(self):
A = GcArray(Float)
B = GcArray(Signed)
def ll_make(T, n):
x = malloc(T, n)
return x
def ll_get(T, x, i):
return x[i]
def llf():
a = ll_make(A, 3)
b = ll_make(B, 2)
a[0] = 1.0
b[1] = 3
y0 = ll_get(A, a, 1)
y1 = ll_get(B, b, 1)
#
a2 = ll_make(A, 4)
a2[0] = 2.0
return ll_get(A, a2, 1)
s = self.annotate(llf, [])
a = self.a
assert s == annmodel.SomeFloat()
seen = {}
ngraphs = len(a.translator.graphs)
vTs = []
for call in annotated_calls(a):
if derived(call, "ll_"):
func, T = [x.value for x in call.args[0:2]]
if (func, T) in seen:
continue
seen[func, T] = True
desc = a.bookkeeper.getdesc(func)
g = desc.specialize([a.binding(x) for x in call.args[1:]])
args = g.getargs()
rv = g.getreturnvar()
if func is ll_get:
vT, vp, vi = args
assert a.binding(vT) == a.bookkeeper.immutablevalue(T)
assert a.binding(vi).knowntype == int
assert a.binding(vp).ll_ptrtype.TO == T
assert a.binding(rv) == lltype_to_annotation(T.OF)
elif func is ll_make:
vT, vn = args
assert a.binding(vT) == a.bookkeeper.immutablevalue(T)
assert a.binding(vn).knowntype == int
assert a.binding(rv).ll_ptrtype.TO == T
else:
assert False, func
vTs.append(vT)
assert len(seen) == 4
return a, vTs # reused by a test in test_rtyper
def test_ll_calling_ll2(self):
A = GcArray(Float)
B = GcArray(Signed)
def ll_make(T, n):
x = malloc(T, n)
return x
def ll_get(x, i):
return x[i]
def makelen4(T):
return ll_make(T, 4)
def llf():
a = ll_make(A, 3)
b = ll_make(B, 2)
a[0] = 1.0
b[1] = 3
y0 = ll_get(a, 1)
y1 = ll_get(b, 1)
#
a2 = makelen4(A)
a2[0] = 2.0
return ll_get(a2, 1)
s = self.annotate(llf, [])
a = self.a
assert s == annmodel.SomeFloat()
seen = {}
def q(v):
s = a.binding(v)
if s.is_constant():
return s.const
else:
return s.ll_ptrtype
vTs = []
for call in annotated_calls(a):
if derived(call, "ll_") or derived(call, "makelen4"):
func, T = [q(x) for x in call.args[0:2]]
if (func, T) in seen:
continue
seen[func, T] = True
desc = a.bookkeeper.getdesc(func)
g = desc.specialize([a.binding(x) for x in call.args[1:]])
args = g.getargs()
rv = g.getreturnvar()
if func is ll_make:
vT, vn = args
assert a.binding(vT) == a.bookkeeper.immutablevalue(T)
assert a.binding(vn).knowntype == int
assert a.binding(rv).ll_ptrtype.TO == T
vTs.append(vT)
elif func is makelen4:
vT, = args
assert a.binding(vT) == a.bookkeeper.immutablevalue(T)
assert a.binding(rv).ll_ptrtype.TO == T
vTs.append(vT)
elif func is ll_get:
vp, vi = args
assert a.binding(vi).knowntype == int
assert a.binding(vp).ll_ptrtype == T
assert a.binding(rv) == lltype_to_annotation(
T.TO.OF)
else:
assert False, func
assert len(seen) == 5
return a, vTs # reused by a test in test_rtyper
def test_ll_stararg(self):
A = GcArray(Float)
B = GcArray(Signed)
def ll_sum(*args):
result = 0
if len(args) > 0:
result += args[0]
if len(args) > 1:
result += args[1]
if len(args) > 2:
result += args[2]
if len(args) > 3:
result += args[3]
return result
def llf():
a = ll_sum()
b = ll_sum(4, 5)
c = ll_sum(2.5)
d = ll_sum(4, 5.25)
e = ll_sum(1000, 200, 30, 4)
f = ll_sum(1000, 200, 30, 5)
return a, b, c, d, e, f
s = self.annotate(llf, [])
assert isinstance(s, annmodel.SomeTuple)
assert s.items[0].knowntype is int
assert s.items[0].const == 0
assert s.items[1].knowntype is int
assert s.items[2].knowntype is float
assert s.items[3].knowntype is float
assert s.items[4].knowntype is int
assert s.items[5].knowntype is int
def test_str_vs_ptr(self):
S = GcStruct('s', ('x', Signed))
def ll_stuff(x):
if x is None or isinstance(x, str):
return 2
else:
return 3
def llf():
x = ll_stuff("hello")
y = ll_stuff(nullptr(S))
return x, y
s = self.annotate(llf, [])
assert isinstance(s, annmodel.SomeTuple)
assert s.items[0].is_constant()
assert s.items[0].const == 2
assert s.items[1].is_constant()
assert s.items[1].const == 3
def test_getRuntimeTypeInfo(self):
S = GcStruct('s', ('x', Signed), rtti=True)
def llf():
return getRuntimeTypeInfo(S)
s = self.annotate(llf, [])
assert isinstance(s, SomePtr)
assert s.ll_ptrtype == Ptr(RuntimeTypeInfo)
assert s.const == getRuntimeTypeInfo(S)
def test_runtime_type_info(self):
S = GcStruct('s', ('x', Signed), rtti=True)
def llf(p):
return runtime_type_info(p)
s = self.annotate(llf, [SomePtr(Ptr(S))])
assert isinstance(s, SomePtr)
assert s.ll_ptrtype == Ptr(RuntimeTypeInfo)
def test_cast_primitive(self):
def llf(u):
return cast_primitive(Signed, u)
s = self.annotate(llf, [annmodel.SomeInteger(unsigned=True)])
assert s.knowntype == int
def llf(s):
return cast_primitive(Unsigned, s)
s = self.annotate(llf, [annmodel.SomeInteger()])
assert s.unsigned == True
def test_pbctype(self):
TYPE = Void
TYPE2 = Signed
def g(lst):
n = lst[0]
if isinstance(TYPE, Number):
result = 123
else:
result = 456
if isinstance(TYPE2, Number):
result += 1
return result + n
def llf():
lst = [5]
g(lst)
lst.append(6)
self.annotate(llf, [])
def test_adtmeths(self):
def h_length(s):
return s.foo
S = GcStruct("S", ('foo', Signed),
adtmeths={"h_length": h_length,
"stuff": 12})
def llf():
s = malloc(S)
s.foo = 321
return s.h_length()
s = self.annotate(llf, [])
assert s.knowntype == int and not s.is_constant()
def llf():
s = malloc(S)
return s.stuff
s = self.annotate(llf, [])
assert s.is_constant() and s.const == 12
def test_pseudohighlevelcallable():
t = TranslationContext()
t.buildannotator()
rtyper = t.buildrtyper()
rtyper.specialize()
a = MixLevelHelperAnnotator(rtyper)
class A:
value = 5
def double(self):
return self.value * 2
def fn1(a):
a2 = A()
a2.value = a.double()
return a2
s_A, r_A = a.s_r_instanceof(A)
fn1ptr = a.delayedfunction(fn1, [s_A], s_A)
pseudo = PseudoHighLevelCallable(fn1ptr, [s_A], s_A)
def fn2(n):
a = A()
a.value = n
a2 = pseudo(a)
return a2.value
graph = a.getgraph(fn2, [annmodel.SomeInteger()], annmodel.SomeInteger())
a.finish()
llinterp = LLInterpreter(rtyper)
res = llinterp.eval_graph(graph, [21])
assert res == 42
def test_llhelper():
S = GcStruct('S', ('x', Signed), ('y', Signed))
def f(s,z):
return s.x*s.y+z
def g(s):
return s.x+s.y
F = Ptr(FuncType([Ptr(S), Signed], Signed))
G = Ptr(FuncType([Ptr(S)], Signed))
def h(x, y, z):
s = malloc(S)
s.x = x
s.y = y
fptr = llhelper(F, f)
gptr = llhelper(G, g)
assert typeOf(fptr) == F
return fptr(s, z)+fptr(s, z*2)+gptr(s)
res = interpret(h, [8, 5, 2])
assert res == 99
def test_llhelper_multiple_functions():
S = GcStruct('S', ('x', Signed), ('y', Signed))
def f(s):
return s.x - s.y
def g(s):
return s.x + s.y
F = Ptr(FuncType([Ptr(S)], Signed))
myfuncs = [f, g]
def h(x, y, z):
s = malloc(S)
s.x = x
s.y = y
fptr = llhelper(F, myfuncs[z])
assert typeOf(fptr) == F
return fptr(s)
res = interpret(h, [80, 5, 0])
assert res == 75
res = interpret(h, [80, 5, 1])
assert res == 85
def test_cast_instance_to_base_ptr():
class A:
def __init__(self, x, y):
self.x = x
self.y = y
def f(x, y):
if x > 20:
a = None
else:
a = A(x, y)
a1 = cast_instance_to_base_ptr(a)
return a1
res = interpret(f, [5, 10])
assert typeOf(res) == OBJECTPTR
assert fishllattr(res, 'x') == 5
assert fishllattr(res, 'y') == 10
res = interpret(f, [25, 10])
assert res == nullptr(OBJECTPTR.TO)
def test_cast_base_ptr_to_instance():
class A:
def __init__(self, x, y):
self.x = x
self.y = y
def f(x, y):
if x > 20:
a = None
else:
a = A(x, y)
a1 = cast_instance_to_base_ptr(a)
b = cast_base_ptr_to_instance(A, a1)
return a is b
assert f(5, 10) is True
assert f(25, 10) is True
res = interpret(f, [5, 10])
assert res is True
res = interpret(f, [25, 10])
assert res is True
| oblique-labs/pyVM | rpython/rtyper/test/test_llann.py | Python | mit | 15,371 |
#Import Libraries
import eventmaster as EM
from time import sleep
import random
import sys
""" Create new Instance of EventMasterSwitcher and turn off logging """
s3 = EM.EventMasterSwitcher()
s3.setVerbose(0)
with open('example_settings_.xml', 'r') as content_file:
content = content_file.read()
s3.loadFromXML(content)
""" Enumerate all Inputs and print known information for each """
print("# Inputs")
for input_id, input_inst in s3.getInputs().items():
input_name = input_inst.getName()
frozen_string = "is Frozen" if input_inst.getFreeze() else "is not Frozen"
print(" ({0!s}) {1!s} {2!s}".format(input_id, input_name, frozen_string))
""" Enumerate all Outputs and print known information for each """
print("\r\n# Outputs")
for output_id, output_inst in s3.getOutputs().items():
output_name = output_inst.getName()
print(" ({0!s}) {1!s}".format(output_id, output_name))
""" Enumerate all Presets and print known information for each """
print("\r\n# Presets")
for preset_id, preset_inst in s3.getPresets().items():
preset_name = preset_inst.getName()
print(" ({0!s}) {1!s}".format(preset_id, preset_name))
""" Enumerate all Destinations and print known information for each """
print("\r\n# Destinations:")
for dest_id, dest_inst in s3.getScreenDests().items():
dest_numoflayers = len(dest_inst.getLayers())
dest_name = dest_inst.getName()
dest_size = dest_inst.getSize()
print("\n {1!s} is {2!s} x {3!s} & has {4!s} layer(s)".format( dest_id,
dest_name,
dest_size["HSize"],
dest_size["VSize"],
dest_numoflayers))
""" Enumerate all Layers for Destination and print known information for each """
for layer_number, layer_inst in dest_inst.getLayers().items():
if "Pvw" in layer_inst:
layer_name_pvw = layer_inst["Pvw"].getName()
layer_owin_pvw = layer_inst["Pvw"].getOWIN()
layer_hpos_pvw = layer_owin_pvw["HPos"]
layer_hsize_pvw = layer_owin_pvw["HSize"]
layer_vpos_pvw = layer_owin_pvw["VPos"]
layer_vsize_pvw = layer_owin_pvw["VSize"]
if layer_inst["Pvw"].getSource() is not None:
layer_source_name_pvw = layer_inst["Pvw"].getSource().getName()
else:
layer_source_name_pvw = "(Unknown)"
size_string_pvw = " {4!s} is on PVW - {0!s}x{1!s} at {2!s},{3!s}".format(layer_hsize_pvw, layer_vsize_pvw, layer_hpos_pvw, layer_vpos_pvw, layer_name_pvw)
source_string_pvw = " current source is {0!s}".format(layer_source_name_pvw)
else:
size_string_pvw = " Layer is not on PVW "
source_string_pvw = ""
if "Pgm" in layer_inst:
layer_name_pgm = layer_inst["Pgm"].getName()
layer_owin_pgm = layer_inst["Pgm"].getOWIN()
layer_hpos_pgm = layer_owin_pgm["HPos"]
layer_hsize_pgm = layer_owin_pgm["HSize"]
layer_vpos_pgm = layer_owin_pgm["VPos"]
layer_vsize_pgm = layer_owin_pgm["VSize"]
if layer_inst["Pgm"].getSource() is not None:
layer_source_name_pgm = layer_inst["Pgm"].getSource().getName()
else:
layer_source_name_pgm = "(Unknown)"
size_string_pgm = " {4!s} is on PGM - {0!s}x{1!s} at {2!s},{3!s}".format(layer_hsize_pgm, layer_vsize_pgm, layer_hpos_pgm, layer_vpos_pgm, layer_name_pgm)
source_string_pgm = " current source is {0!s}".format(layer_source_name_pgm)
else:
size_string_pgm = " Layer is not on PGM "
source_string_pgm = ""
size_string = " {4!s} is on PGM - {0!s}x{1!s} at {2!s},{3!s}".format(layer_hsize_pgm, layer_vsize_pgm, layer_hpos_pgm, layer_vpos_pgm, layer_name_pgm)
source_string = " current source is {0!s}".format(layer_source_name_pgm)
print(" ({0!s}) {1!s}\n {2!s}\n {3!s}\n {4!s}".format(layer_number+1, size_string_pgm, source_string_pgm, size_string_pvw, source_string_pvw))
sys.exit()
| kyelewisstgc/EventMaster-Python | tests/test_offline.py | Python | mit | 4,308 |
#!/usr/bin/python3
# This code is available for use under CC0 (Creative Commons 0 - universal).
# You can copy, modify, distribute and perform the work, even for commercial
# purposes, all without asking permission. For more information, see LICENSE.md or
# https://creativecommons.org/publicdomain/zero/1.0/
# usage:
# opts = Picker(
# title = 'Delete all files',
# options = ["Yes", "No"]
# ).getSelected()
# returns a simple list
# cancel returns False
import curses
import shutil
import signal
from curses import wrapper
class Picker:
"""Allows you to select from a list with curses"""
stdscr = None
win = None
title = ""
arrow = ""
footer = ""
more = ""
c_selected = ""
c_empty = ""
cursor = 0
offset = 0
selected = 0
selcount = 0
aborted = False
window_height = shutil.get_terminal_size().lines - 10
window_width = shutil.get_terminal_size().columns - 20
all_options = []
length = 0
def curses_start(self):
self.stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
self.win = curses.newwin(
5 + self.window_height,
self.window_width,
2,
4
)
def sigwinch_handler(self, n, frame):
self.window_height = shutil.get_terminal_size().lines - 10
self.window_width = shutil.get_terminal_size().columns - 20
curses.endwin()
self.stdscr.clear()
self.stdscr = curses.initscr()
self.win = curses.newwin(
5 + self.window_height,
self.window_width,
2,
4
)
def curses_stop(self):
curses.nocbreak()
self.stdscr.keypad(0)
curses.echo()
curses.endwin()
def getSelected(self):
if self.aborted == True:
return( False )
ret_s = [x for x in self.all_options if x["selected"]]
ret = [x["label"] for x in ret_s]
return(ret)
def redraw(self):
self.win.clear()
self.win.border(
self.border[0], self.border[1],
self.border[2], self.border[3],
self.border[4], self.border[5],
self.border[6], self.border[7]
)
self.win.addstr(
self.window_height + 4, 5, " " + self.footer + " "
)
position = 0
range = self.all_options[self.offset:self.offset+self.window_height+1]
for option in range:
if option["selected"] == True:
line_label = self.c_selected + " "
else:
line_label = self.c_empty + " "
if len(option["label"]) > (self.window_width - 20):
reduced = option["label"][:self.window_width - 20] + "..."
else:
reduced = option["label"]
self.win.addstr(position + 2, 5, line_label + reduced)
position = position + 1
# hint for more content above
if self.offset > 0:
self.win.addstr(1, 5, self.more)
# hint for more content below
if self.offset + self.window_height <= self.length - 2:
self.win.addstr(self.window_height + 3, 5, self.more)
self.win.addstr(0, 5, " " + self.title + " ")
self.win.addstr(
0, self.window_width - 8,
" " + str(self.selcount) + "/" + str(self.length) + " "
)
self.win.addstr(self.cursor + 2,1, self.arrow)
self.win.refresh()
def check_cursor_up(self):
if self.cursor < 0:
self.cursor = 0
if self.offset > 0:
self.offset = self.offset - 1
def check_cursor_down(self):
if self.cursor >= self.length:
self.cursor = self.cursor - 1
if self.cursor > self.window_height:
self.cursor = self.window_height
self.offset = self.offset + 1
if self.offset + self.cursor >= self.length:
self.offset = self.offset - 1
def curses_loop(self, stdscr):
while 1:
self.redraw()
c = stdscr.getch()
if c == ord('q') or c == ord('Q'):
self.aborted = True
break
elif c == curses.KEY_UP:
self.cursor = self.cursor - 1
elif c == curses.KEY_DOWN:
self.cursor = self.cursor + 1
#elif c == curses.KEY_PPAGE:
#elif c == curses.KEY_NPAGE:
elif c == ord(' '):
self.all_options[self.selected]["selected"] = \
not self.all_options[self.selected]["selected"]
elif c == 10:
break
# deal with interaction limits
self.check_cursor_up()
self.check_cursor_down()
# compute selected position only after dealing with limits
self.selected = self.cursor + self.offset
temp = self.getSelected()
self.selcount = len(temp)
def __init__(self, options, title='Select', arrow="-->",
footer="Space = toggle, Enter = accept, q = cancel",
more="...", border="||--++++", c_selected="[X]", c_empty="[ ]", checked="[ ]"):
self.title = title
self.arrow = arrow
self.footer = footer
self.more = more
self.border = border
self.c_selected = c_selected
self.c_empty = c_empty
self.all_options = []
for option in options:
self.all_options.append({
"label": option,
"selected": True if (option in checked) else False
})
self.length = len(self.all_options)
self.curses_start()
signal.signal(signal.SIGWINCH, self.sigwinch_handler)
curses.wrapper( self.curses_loop )
self.curses_stop()
| MSchuwalow/StudDP | studdp/picker.py | Python | mit | 5,867 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "remakery.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| symroe/remakery | manage.py | Python | mit | 251 |
#!/usr/bin/env python
import dateutil.parser
import dateutil.tz
import feedparser
import re
from datetime import datetime, timedelta
from joblist import JobList
class FilterException(Exception):
pass
class IndeedJobList(JobList):
'''Joblist class for Indeed
This joblist is for the indeed.com rss feed. Indeed has an API,
but it requires registration and is more suited to companies repackaging
their data. The RSS feed works just fine for the kind of search I'm
interested in.
'''
base_url = ('http://www.indeed.{domain}/rss?q={keywords}&l={location}'
'&sort=date&start={offset}')
page_size = 20
def collect_results(self, keywords, location, radius, filter_location=(),
filter_title=(), country='us',
max_results=1000, oldest=None):
'''Collect results for indeed.com (.ca, etc)
The feeds site is "indeed.com/rss?" plus these parameters:
* q: a set of keywords, combined with "+"
* l: the location (a zip code, "city, state", "remote", or just a state)
* sort: "date" or "relevance", I guess
* offset: The rss returns up to 20 results, you can page through them
using this parameter
:param keywords: str A space-separated list of keywords, arguments to
the "q" operator
:param location: str a zip code, "city, state" combination, "remote",
or state code. Argument to "l"
:param radius: int radius around a location. Argument to "r". May use 0
to limit to the location exactly.
:param filter_location: str an iterable of locations to be removed
from results. Any location that contains any of the strings
will be ignored.
:param filter_title: str an iterable of strings to filter titles. A
title will be ignored if it contains any of the strings.
:param country: str A two-letter country code. Defaults to "us", which
will try indeed.com; will try any other code if provided, but there
is no guarantee other codes will be handled well.
:param max_results: int A maximum number of results. The
results may be less than this, but the function will stop
querying if this number is reached.
:param oldest: timedelta Anything older than today - oldest
will be ignored.
:returns: A generator which when called will yield a dict of
the following format:
{
'date': The reported date of the entry,
'id': 'indeed$' + indeed's id for the job entry,
'link': a link to indeed's page about the entry,
'location': the entry's reported location,
'source': the reported author of the post,
'title': the reported title
}
'''
domain = 'com'
if country is not 'us':
domain = country
if oldest is None:
oldest = timedelta(weeks=52)
oldest_cutoff = datetime.now(tz=dateutil.tz.tzlocal()) - oldest
pages = 0
found = 0
cutoff = False
previous = ()
while found < max_results:
# Get a page of feed results (sorted by date), and process
# it until either a date older than *oldest_cutoff*
# appears or all the entries have been processed
offset = pages * self.page_size
feed = feedparser.parse(
self.base_url.format(domain=domain,
keywords=keywords,
location=location,
radius=radius,
offset=offset)
)
new = []
for entry in feed['entries']:
# We've seen this before, skip it.
if entry['id'] in previous:
continue
new.append(entry['id'])
entry_date = dateutil.parser.parse(entry['published'])
if oldest_cutoff > entry_date:
return None
entry_title = entry['title']
entry_location = 'Unspecified'
try:
entry_location = entry_title.split(' - ')[-1]
except IndexError:
pass
try:
for location_filter in filter_location:
if re.search(location_filter, entry_location,
re.IGNORECASE):
raise FilterException
for title_filter in filter_title:
if re.search(title_filter, entry_title,
re.IGNORECASE):
raise FilterException
except FilterException:
continue
found += 1
yield {
'date': entry_date,
'id': 'indeed$' + entry['id'],
'link': entry['link'],
'location': entry_location,
'source': entry['source']['title'],
'title': entry_title,
}
if not new:
# The assumption is that if none of the entries are new,
# indeed is just repeating and the current group
# of jobs is ended
return None
previous = tuple(new)
pages += 1
| keisetsu/joblist | indeed.py | Python | mit | 5,528 |
from .config import Config, HentaiHavenConfig, HAnimeConfig, Section
from .last_entry import LastEntry
| NightShadeNeko/HentaiBot | data/__init__.py | Python | mit | 103 |
from Tkinter import *
from ScrolledText import ScrolledText
from unicodedata import lookup
import os
class Diacritical:
"""Mix-in class that adds keyboard bindings for accented characters, plus
other common functionality.
An inheriting class must define a select_all method that will respond
to Ctrl-A."""
accents = (('acute', "'"), ('grave', '`'), ('circumflex', '^'),
('tilde', '='), ('diaeresis', '"'), ('cedilla', ','),
('stroke', '/'), ('ring above', ';'))
def __init__(self):
# Fix some key bindings
self.bind("<Control-Key-a>", self.select_all)
# We will need Ctrl-/ for the "stroke", but it cannot be unbound, so
# let's prevent it from being passed to the standard handler
self.bind("<Control-Key-/>", lambda event: "break")
# Diacritical bindings
for a, k in self.accents:
# Little-known feature of Tk, it allows to bind an event to
# multiple keystrokes
self.bind("<Control-Key-%s><Key>" % k,
lambda event, a=a: self.insert_accented(event.char, a))
def insert_accented(self, c, accent):
if c.isalpha():
if c.isupper():
cap = 'capital'
else:
cap = 'small'
try:
c = lookup("latin %s letter %c with %s" % (cap, c, accent))
self.insert(INSERT, c)
# Prevent plain letter from being inserted too, tell Tk to
# stop handling this event
return "break"
except KeyError, e:
pass
class DiacriticalEntry(Entry, Diacritical):
"""Tkinter Entry widget with some extra key bindings for
entering typical Unicode characters - with umlauts, accents, etc."""
def __init__(self, master=None, **kwargs):
Entry.__init__(self, master=None, **kwargs)
Diacritical.__init__(self)
def select_all(self, event=None):
self.selection_range(0, END)
return "break"
class DiacriticalText(ScrolledText, Diacritical):
"""Tkinter ScrolledText widget with some extra key bindings for
entering typical Unicode characters - with umlauts, accents, etc."""
def __init__(self, master=None, **kwargs):
ScrolledText.__init__(self, master=None, **kwargs)
Diacritical.__init__(self)
def select_all(self, event=None):
self.tag_add(SEL, "1.0", "end-1c")
self.mark_set(INSERT, "1.0")
self.see(INSERT)
return "break"
def test():
frame = Frame()
frame.pack(fill=BOTH, expand=YES)
if os.name == "nt":
# Set default font for all widgets; use Windows typical default
frame.option_add("*font", "Tahoma 8")
# The editors
entry = DiacriticalEntry(frame)
entry.pack(fill=BOTH, expand=YES)
text = DiacriticalText(frame, width=76, height=25, wrap=WORD)
if os.name == "nt":
# But this looks better than the default set above
text.config(font="Arial 10")
text.pack(fill=BOTH, expand=YES)
text.focus()
frame.master.title("Diacritical Editor")
frame.mainloop()
if __name__ == "__main__":
test()
| ActiveState/code | recipes/Python/286155_Entering_accented_characters_Tkinter/recipe-286155.py | Python | mit | 3,210 |
import base64
import json
import requests
def call_vision_api(image_filename, api_keys):
api_key = api_keys['microsoft']
post_url = "https://api.projectoxford.ai/vision/v1.0/analyze?visualFeatures=Categories,Tags,Description,Faces,ImageType,Color,Adult&subscription-key=" + api_key
image_data = open(image_filename, 'rb').read()
result = requests.post(post_url, data=image_data, headers={'Content-Type': 'application/octet-stream'})
result.raise_for_status()
return result.text
# Return a dictionary of features to their scored values (represented as lists of tuples).
# Scored values must be sorted in descending order.
#
# {
# 'feature_1' : [(element, score), ...],
# 'feature_2' : ...
# }
#
# E.g.,
#
# {
# 'tags' : [('throne', 0.95), ('swords', 0.84)],
# 'description' : [('A throne made of pointy objects', 0.73)]
# }
#
def get_standardized_result(api_result):
output = {
'tags' : [],
'captions' : [],
# 'categories' : [],
# 'adult' : [],
# 'image_types' : []
# 'tags_without_score' : {}
}
for tag_data in api_result['tags']:
output['tags'].append((tag_data['name'], tag_data['confidence']))
for caption in api_result['description']['captions']:
output['captions'].append((caption['text'], caption['confidence']))
# for category in api_result['categories']:
# output['categories'].append(([category['name'], category['score']))
# output['adult'] = api_result['adult']
# for tag in api_result['description']['tags']:
# output['tags_without_score'][tag] = 'n/a'
# output['image_types'] = api_result['imageType']
return output
| SlashDK/OpenCV-simplestuff | vendors/microsoft.py | Python | mit | 1,684 |
from django.db import models
from django.contrib.auth.models import User, Group
from django.utils.translation import ugettext_lazy as _
from django.core.validators import RegexValidator
from django.conf import settings
class Repository(models.Model):
"""
Git repository
"""
# basic info
name = models.CharField(
max_length=64,
validators=[RegexValidator(regex=r'^[^\x00-\x2c\x2f\x3a-\x40\x5b-\x5e\x60\x7b-\x7f\s]+$')],
verbose_name=_('name'),
help_text=_('Name of the repository, cannot contain special characters other than hyphens.'),
)
description = models.TextField(blank=True, verbose_name=_('description'))
# owner
user = models.ForeignKey(
User,
blank=True,
null=True,
related_name='repositories',
on_delete=models.SET_NULL,
verbose_name=_('user'),
help_text=_('Owner of the repository. Repository path will be prefixed by owner\'s username.'),
)
# access control
users = models.ManyToManyField(
User,
blank=True,
verbose_name=_('users'),
help_text=_('These users have right access to the repository.'),
)
groups = models.ManyToManyField(
Group,
blank=True,
verbose_name=_('groups'),
help_text=_('Users in these groups have right access to the repository.'),
)
is_private = models.BooleanField(
default=True,
verbose_name=_('is private'),
help_text=_('Restrict read access to specified users and groups.'),
)
# meta
created = models.DateTimeField(auto_now_add=True, verbose_name=_('created'))
modified = models.DateTimeField(auto_now=True, verbose_name=_('modified'))
class Meta:
verbose_name = _('repository')
verbose_name_plural = _('repositories')
ordering = ['user', 'name']
unique_together = ['user', 'name']
def __unicode__(self):
if self.user:
return u'%s/%s' % (self.user.username, self.name)
return u'./%s' % (self.name)
def can_read(self, user):
if not user and settings.PROTECTED:
return False
if not self.is_private:
return True
return self.can_write(user)
def can_write(self, user):
if not user:
return False
if user.id == self.user_id:
return True
if self.users.filter(pk=user.id).exists():
return True
if self.groups.filter(user__pk=user.id).exists():
return True
return False
| gitmill/gitmill | django/repository/models.py | Python | mit | 2,579 |
import re
def read_lua():
PATTERN = r'\s*\[(?P<id>\d+)\] = {\s*unidentifiedDisplayName = ' \
r'"(?P<unidentifiedDisplayName>[^"]+)",\s*unidentifie' \
r'dResourceName = "(?P<unidentifiedResourceName>[^"]+' \
r')",\s*unidentifiedDescriptionName = {\s*"(?P<uniden' \
r'tifiedDescriptionName>[^=]+)"\s*},\s*identifiedDisp' \
r'layName = "(?P<identifiedDisplayName>[\S ]+)",\s*id' \
r'entifiedResourceName = "(?P<identifiedResourceName>' \
r'[\S ]+)",\s*identifiedDescriptionName = {\s*"(?P<id' \
r'entifiedDescriptionName>[^=]+)"\s*},\s*slotCount = ' \
r'(?P<slotCount>\d{1}),\s*ClassNum = (?P<ClassNum>\d{' \
r'1})\s*}'
PATTERN = re.compile(PATTERN)
with open('testcase.txt', encoding='utf8', errors='ignore') as file:
test = PATTERN.findall(file.read())
for item in test:
if item[0] == '502':
print(item)
print(len(test))
return 0
"""
for group in test.groupdict():
for k, v in group.items():
print(k + ' : ' + v)
print()
"""
read_lua() | vakhet/rathena-utils | dev/ragna.py | Python | mit | 1,183 |
from django.apps import AppConfig
class JcvrbaseappConfig(AppConfig):
name = 'jcvrbaseapp'
| jucapoco/baseSiteGanttChart | jcvrbaseapp/apps.py | Python | mit | 97 |
# coding=utf8
import time
import random
import unittest
from qiniuManager.progress import *
class Pro(object):
def __init__(self):
self.progressed = 0
self.total = 100
self.title = 'test'
self.chunked = False
self.chunk_recved = 0
self.start = time.time()
@bar(100, '=')
def loader(self):
self._normal_loader()
self.title = "固定长度"
@bar(fill='x')
def loader_x(self):
self._normal_loader()
self.title = "x"
@bar()
def auto_loader(self):
self._normal_loader()
self.title = "长度占满宽度"
def _normal_loader(self):
time.sleep(0.01)
self.progressed += 1
def _chunked_loader(self):
self.chunked = True
time.sleep(0.01)
self.chunk_recved += random.randrange(3, 1000000)
if time.time() - self.start > 5:
self.progressed = self.total
@bar()
def chunked_loader(self):
self._chunked_loader()
self.title = "full width"
@bar(100)
def fixed_chunked_loader(self):
self._chunked_loader()
self.title = "fixed width"
class ProgressTester(unittest.TestCase):
def test_100_progress(self):
print("进度条换行")
Pro().loader()
Pro().loader_x()
def test_auto_width_progress(self):
print("进度条换行")
Pro().auto_loader()
def test_disable_progress(self):
pro = Pro()
pro.disable_progress = True
pro.title = "无进度条,也就是说应该看不到这串字符才对"
pro.loader()
def test_chunked_progress(self):
print("进度条换行")
Pro().chunked_loader()
def test_fixed_chunked_progress(self):
print("进度条换行")
Pro().fixed_chunked_loader()
if __name__ == '__main__':
unittest.main(verbosity=2)
| hellflame/qiniu_manager | qiniuManager/test/progress_test.py | Python | mit | 1,900 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-18 18:01
from __future__ import unicode_literals
from django.db import migrations, models
import django.forms.widgets
class Migration(migrations.Migration):
dependencies = [
('sshcomm', '0002_auto_20170118_1702'),
]
operations = [
migrations.AlterField(
model_name='userdata',
name='user_name',
field=models.CharField(max_length=128),
),
migrations.AlterField(
model_name='userdata',
name='user_password',
field=models.CharField(max_length=128, verbose_name=django.forms.widgets.PasswordInput),
),
]
| t-mertz/slurmCompanion | django-web/sshcomm/migrations/0003_auto_20170118_1901.py | Python | mit | 697 |
from bson.objectid import ObjectId
import json
class Room():
def __init__(self, players_num, objectid, table, current_color='purple'):
if players_num:
self.players_num = players_num
else:
self.players_num = 0
for el in ['p', 'b', 'g', 'r']:
if el in table:
self.players_num += 1
self.objectid = objectid
self.current_color = current_color
self.players_dict = {}
self.alredy_ex = []
self.colors = []
self.winner = None
for col in ['p', 'b', 'g', 'r']:
if col in table:
self.colors.append(
{'p': 'purple',
'b': 'blue',
'g': 'green',
'r': 'red'}[col])
if current_color in self.colors:
self.current_color = current_color
else:
self.current_color = self.colors[0]
self.users_nicks = {}
self.color_player_dict = {'purple': None, 'blue': None, 'green': None, 'red': None}
self.player_color_dict = {}
self.status = 'waiting'
def get_player_by_color(self, color):
if color in self.color_player_dict:
return self.players_dict[self.color_player_dict[color]]
return None
def get_color_by_player(self, player_id):
if player_id in self.player_color_dict:
return self.player_color_dict[player_id]
return None
def add_player(self, player_id, name):
self.players_dict[player_id] = False
self.users_nicks[player_id] = name
for color in self.colors:
if not self.color_player_dict[color]:
self.color_player_dict[color] = player_id
self.player_color_dict[player_id] = color
break
def dell_player(self, player_id):
self.players_dict[player_id] = False
return self
def change_row(self, row, i, to):
return row[:i] + to + row[i + 1:]
def update_table(self, move, table):
print('Table updating')
pymove = json.loads(move)
pytable = json.loads(table)
print('Old table:')
for row in pytable:
print(' ', row)
x0, y0 = int(pymove['X0']), int(pymove['Y0'])
x1, y1 = int(pymove['X1']), int(pymove['Y1'])
print('Move from ({}, {}) to ({}, {})'.format(x0, y0, x1, y1))
if ((abs(x1 - x0) > 1) or (abs(y1 - y0) > 1)):
pytable[x0] = self.change_row(pytable[x0], y0, 'e')
for i in range(-1, 2):
for j in range(-1, 2):
if (x1 + i < len(pytable)) and (x1 + i > -1):
if (y1 + j < len(pytable[x1])) and (y1 + j > -1):
if pytable[x1 + i][y1 + j] != 'e':
pytable[x1 + i] = self.change_row(pytable[x1 + i], y1 + j, self.current_color[0].lower())
pytable[x1] = self.change_row(pytable[x1], y1, self.current_color[0].lower())
res = json.dumps(pytable)
if 'e' not in res:
r_count = (res.count('r'), 'red')
b_count = (res.count('b'), 'blue')
g_count = (res.count('g'), 'green')
p_count = (res.count('p'), 'purple')
sort_list = [r_count, b_count, p_count, g_count]
sort_list.sort()
self.winner = sort_list[-1][1]
print('New table:')
for row in pytable:
print(' ', row)
return res
def can_move(self, table):
pytable = json.loads(table)
for row_id, row in enumerate(pytable):
for char_id in range(len(row)):
char = row[char_id]
if char == self.current_color[0].lower():
for i in range(-2, 3):
for j in range(-2, 3):
if (row_id + i < len(pytable)) and (row_id + i > -1):
if (char_id + j < len(row)) and (char_id + j > -1):
if pytable[row_id + i][char_id + j] == 'e':
return True
return False
def change_color(self, table):
print('Сolor changing')
colors = self.colors
self.current_color = colors[
(colors.index(self.current_color) + 1) % self.players_num]
i = 1
while ((not self.players_dict[self.color_player_dict[self.current_color]]) or (not self.can_move(table))) and (i <= 5):
self.current_color = colors[
(colors.index(self.current_color) + 1) % self.players_num]
i += 1
if not self.can_move(table):
return None
return self.current_color
class RoomsManager():
def __init__(self, db):
# dict of rooms by their obj_id
self.db = db
self.rooms_dict = {}
def get_room(self, objectid):
if objectid not in self.rooms_dict:
rid = objectid
room_in_db = self.db.rooms.find_one({'_id': ObjectId(rid)})
if room_in_db:
print('Room', objectid, 'extrapolated from db')
new_room = Room(
int(room_in_db['players_num']), rid, room_in_db['table'])
new_room.current_color = room_in_db['current_color']
for user_id in room_in_db['players']:
player = room_in_db['players'][user_id]
new_room.color_player_dict[player['color']] = user_id
new_room.player_color_dict[user_id] = player['color']
new_room.users_nicks[user_id] = player['nick']
new_room.players_dict[user_id] = None
self.rooms_dict[rid] = new_room
else:
return None
return self.rooms_dict[objectid]
def add_room(self, room):
self.rooms_dict[room.objectid] = room
def rooms(self):
for objectid in self.rooms_dict:
yield self.rooms_dict[objectid]
| Andrey-Tkachev/infection | models/room.py | Python | mit | 6,066 |
class Solution:
def countSegments(self, s: 'str') -> 'int':
return len(s.split())
| jiadaizhao/LeetCode | 0401-0500/0434-Number of Segments in a String/0434-Number of Segments in a String.py | Python | mit | 94 |
import datetime
from sqlalchemy import create_engine, ForeignKey, Column, Integer, String, Text, Date, Table, Boolean
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy.ext.declarative import declarative_base
from . import app
from flask_login import UserMixin
engine = create_engine(app.config["SQLALCHEMY_DATABASE_URI"])
Base = declarative_base()
Session = sessionmaker(bind=engine)
session = Session()
class Fighter(Base):
__tablename__ = "fighters"
id = Column(Integer, primary_key=True)
first_name = Column(String(1024), nullable=False)
last_name = Column(String(1024), nullable=False)
nickname = Column(String(1024))
gender = Column(String(128), nullable=False)
dob = Column(Date)
age = Column(Integer)
promotion = Column(String(1024), nullable=False)
profile_image = Column(String(1024))
right_full = Column(String(1024))
left_full = Column(String(1024))
height = Column(Integer)
weight = Column(String(128), nullable=False)
win = Column(Integer, nullable=False)
loss = Column(Integer, nullable=False)
draw = Column(Integer)
no_contest = Column(Integer)
def as_dictionary(self):
fighter = {
"id": self.id,
"first_name": self.first_name,
"last_name": self.last_name,
"nickname": self.nickname,
"gender": self.gender,
"age": self.age,
"promotion": self.promotion,
"profile_image": self.profile_image,
"right_full": self.right_full,
"left_full": self.left_full,
"height": self.height,
"weight": self.weight,
"win": self.win,
"loss": self.loss,
"draw": self.draw,
"no_contest": self.no_contest,
}
return fighter
class User(Base, UserMixin):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
email = Column(String(1024), unique=True, nullable=False)
password = Column(String(128), nullable=False)
user_history = relationship("History", backref="user")
class History(Base):
__tablename__ = "history"
id = Column(Integer, primary_key=True)
fight_date = Column(String, nullable=False)
has_occured = Column(Boolean, nullable=False)
red_corner = Column(String(1024), nullable=False)
blue_corner = Column(String(1024), nullable=False)
winner = Column(String(1024))
end_round = Column(String, nullable=False)
end_time = Column(String, nullable=False)
method = Column(String, nullable=False)
visible = Column(Boolean, nullable=False)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
def as_dictionary(self):
results = {
"id": self.id,
"fight_date": self.fight_date,
"has_occured": self.has_occured,
"red_corner": self.red_corner,
"blue_corner": self.blue_corner,
"winner": self.winner,
"end_round": self.end_round,
"end_time": self.end_time,
"method": self.method,
"user_id": self.user_id,
}
return results
class Event(Base):
__tablename__ = "events"
id = Column(Integer, primary_key=True)
event_date = Column(String(256))
base_title = Column(String(1024), nullable=False)
title_tag_line = Column(String(1024))
#feature_image = Column(String(1024))
arena = Column(String(1024))
location = Column(String(1024))
event_id = Column(Integer)
def as_dictionary(self):
event = {
"id": self.id,
"event_date": self.event_date,
"base_title": self.base_title,
"title_tag_line": self.title_tag_line,
#"feature_image": self.feature_image,
"arena": self.arena,
"location": self.location,
"event_id": self.event_id
}
return event
Base.metadata.create_all(engine)
| tydonk/fight_simulator | fight_simulator/database.py | Python | mit | 3,978 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Pyramidal bidirectional LSTM encoder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
class PyramidBLSTMEncoder(object):
"""Pyramidal bidirectional LSTM Encoder.
Args:
num_units (int): the number of units in each layer
num_layers (int): the number of layers
lstm_impl (string, optional): BasicLSTMCell or LSTMCell or
LSTMBlockCell or LSTMBlockFusedCell or CudnnLSTM.
Choose the background implementation of tensorflow.
Default is LSTMBlockCell.
use_peephole (bool, optional): if True, use peephole
parameter_init (float, optional): the range of uniform distribution to
initialize weight parameters (>= 0)
clip_activation (float, optional): the range of activation clipping (> 0)
# num_proj (int, optional): the number of nodes in the projection layer
concat (bool, optional):
name (string, optional): the name of encoder
"""
def __init__(self,
num_units,
num_layers,
lstm_impl,
use_peephole,
parameter_init,
clip_activation,
num_proj,
concat=False,
name='pblstm_encoder'):
assert num_proj != 0
assert num_units % 2 == 0, 'num_unit should be even number.'
self.num_units = num_units
self.num_proj = None
self.num_layers = num_layers
self.lstm_impl = lstm_impl
self.use_peephole = use_peephole
self.parameter_init = parameter_init
self.clip_activation = clip_activation
self.name = name
def _build(self, inputs, inputs_seq_len, keep_prob, is_training):
"""Construct Pyramidal Bidirectional LSTM encoder.
Args:
inputs (placeholder): A tensor of size`[B, T, input_size]`
inputs_seq_len (placeholder): A tensor of size` [B]`
keep_prob (placeholder, float): A probability to keep nodes
in the hidden-hidden connection
is_training (bool):
Returns:
outputs: Encoder states, a tensor of size
`[T, B, num_units (num_proj)]`
final_state: A final hidden state of the encoder
"""
initializer = tf.random_uniform_initializer(
minval=-self.parameter_init, maxval=self.parameter_init)
# Hidden layers
outputs = inputs
for i_layer in range(1, self.num_layers + 1, 1):
with tf.variable_scope('pblstm_hidden' + str(i_layer),
initializer=initializer) as scope:
lstm_fw = tf.contrib.rnn.LSTMCell(
self.num_units,
use_peepholes=self.use_peephole,
cell_clip=self.clip_activation,
initializer=initializer,
num_proj=None,
forget_bias=1.0,
state_is_tuple=True)
lstm_bw = tf.contrib.rnn.LSTMCell(
self.num_units,
use_peepholes=self.use_peephole,
cell_clip=self.clip_activation,
initializer=initializer,
num_proj=self.num_proj,
forget_bias=1.0,
state_is_tuple=True)
# Dropout for the hidden-hidden connections
lstm_fw = tf.contrib.rnn.DropoutWrapper(
lstm_fw, output_keep_prob=keep_prob)
lstm_bw = tf.contrib.rnn.DropoutWrapper(
lstm_bw, output_keep_prob=keep_prob)
if i_layer > 0:
# Convert to time-major: `[T, B, input_size]`
outputs = tf.transpose(outputs, (1, 0, 2))
max_time = tf.shape(outputs)[0]
max_time_half = tf.floor(max_time / 2) + 1
# Apply concat_fn to each tensor in outputs along
# dimension 0 (times-axis)
i_time = tf.constant(0)
final_time, outputs, tensor_list = tf.while_loop(
cond=lambda t, hidden, tensor_list: t < max_time,
body=lambda t, hidden, tensor_list: self._concat_fn(
t, hidden, tensor_list),
loop_vars=[i_time, outputs, tf.Variable([])],
shape_invariants=[i_time.get_shape(),
outputs.get_shape(),
tf.TensorShape([None])])
outputs = tf.stack(tensor_list, axis=0)
inputs_seq_len = tf.cast(tf.floor(
tf.cast(inputs_seq_len, tf.float32) / 2),
tf.int32)
# Transpose to `[batch_size, time, input_size]`
outputs = tf.transpose(outputs, (1, 0, 2))
(outputs_fw, outputs_bw), final_state = tf.nn.bidirectional_dynamic_rnn(
cell_fw=lstm_fw,
cell_bw=lstm_bw,
inputs=outputs,
sequence_length=inputs_seq_len,
dtype=tf.float32,
time_major=False,
scope=scope)
# NOTE: initial states are zero states by default
# Concatenate each direction
outputs = tf.concat(axis=2, values=[outputs_fw, outputs_bw])
return outputs, final_state
def _concat_fn(self, current_time, x, tensor_list):
"""Concatenate each 2 time steps to reduce time resolution.
Args:
current_time: The current timestep
x: A tensor of size `[max_time, batch_size, feature_dim]`
result: A tensor of size `[t, batch_size, feature_dim * 2]`
Returns:
current_time: current_time + 2
x: A tensor of size `[max_time, batch_size, feature_dim]`
result: A tensor of size `[t + 1, batch_size, feature_dim * 2]`
"""
print(tensor_list)
print(current_time)
print('-----')
batch_size = tf.shape(x)[1]
feature_dim = x.get_shape().as_list()[2]
# Concat features in 2 timesteps
concat_x = tf.concat(
axis=0,
values=[tf.reshape(x[current_time],
shape=[1, batch_size, feature_dim]),
tf.reshape(x[current_time + 1],
shape=[1, batch_size, feature_dim])])
# Reshape to `[1, batch_size, feature_dim * 2]`
concat_x = tf.reshape(concat_x,
shape=[1, batch_size, feature_dim * 2])
tensor_list = tf.concat(axis=0, values=[tensor_list, [concat_x]])
# Skip 2 timesteps
current_time += 2
return current_time, x, tensor_list
| hirofumi0810/tensorflow_end2end_speech_recognition | models/encoders/core/pyramidal_blstm.py | Python | mit | 7,080 |
import numpy as np
import pandas as pd
arts = pd.DataFrame()
# 1. Clean the dates so you only see numbers by using string manipulations
arts["execution_date"] = arts["execution_date"].str.findall(r"([0-9]+)").str[0]
arts["execution_date"] = arts["execution_date"].astype(float)
arts.head()
# 1. If a year is lower than 100, then is referred to 1900. For example, 78 is actually 1978, and that needs to be fixed too.
arts["execution_date"] = arts["execution_date"].apply(lambda x: 1900 + x if x < 100 else x)
arts.head()
# 2. Get the average execution year per artist.
arts.groupby("artist_name").mean().head()
# 3. Get the average execution year per category.
arts.groupby("category").mean().head()
# 4. Get the number of artworks per artist. Which artist is the most prolific?
artworks_by_artist = arts.groupby("artist_name")[["title"]].aggregate(np.count_nonzero)
artworks_by_artist.sort("title", ascending=False).head()
# 5. Get the number of artworks per category. Which category has the highest number?
artworks_by_category = arts.groupby("category")[["title"]].aggregate(np.count_nonzero)
artworks_by_category.sort("title", ascending=False).head()
# 6. Get the average length of artworks titles per category and artist.
arts['title_length'] = arts['title'].str.len()
length_by_category = arts.groupby("category")[["title_length"]].aggregate(np.mean)
length_by_category.sort("title_length", ascending=False).head()
# 6. Get the year with the highest production.
artworks_by_year = arts.groupby("execution_date")[["title"]].aggregate(np.count_nonzero)
artworks_by_year.sort("title", ascending=False).head()
# 8. Get the approximate period of production for each artist. If an artist painted from 1970 to 1990, the period is 20.
period_min = arts.groupby("artist_name")[['execution_date']].aggregate(np.min)
period_max = arts.groupby("artist_name")[['execution_date']].aggregate(np.max)
(period_max - period_min).sort("execution_date", ascending=False).head()
| versae/DH2304 | data/arts2.py | Python | mit | 1,974 |
import os
from .base import Output
class AppleSay(Output):
"""Speech output supporting the Apple Say subsystem."""
name = 'Apple Say'
def __init__(self, voice = 'Alex', rate = '300'):
self.voice = voice
self.rate = rate
super(AppleSay, self).__init__()
def is_active(self):
return not os.system('which say')
def speak(self, text, interrupt = 0):
if interrupt:
self.silence()
os.system('say -v %s -r %s "%s" &' % (self.voice, self.rate, text))
def silence(self):
os.system('killall say')
output_class = AppleSay | frastlin/PyAudioGame | pyaudiogame/accessible_output2/outputs/say.py | Python | mit | 535 |
from jupyter_server.utils import url_path_join as ujoin
from .config import Lmod as LmodConfig
from .handler import default_handlers, PinsHandler
def _jupyter_server_extension_paths():
return [{"module": "jupyterlmod"}]
# Jupyter Extension points
def _jupyter_nbextension_paths():
return [
dict(
section="tree", src="static", dest="jupyterlmod", require="jupyterlmod/main"
)
]
def load_jupyter_server_extension(nbapp):
"""
Called when the extension is loaded.
Args:
nbapp : handle to the Notebook webserver instance.
"""
nbapp.log.info("Loading lmod extension")
lmod_config = LmodConfig(parent=nbapp)
launcher_pins = lmod_config.launcher_pins
web_app = nbapp.web_app
base_url = web_app.settings["base_url"]
for path, class_ in default_handlers:
web_app.add_handlers(".*$", [(ujoin(base_url, path), class_)])
web_app.add_handlers(".*$", [
(ujoin(base_url, 'lmod/launcher-pins'), PinsHandler, {'launcher_pins': launcher_pins}),
])
| cmd-ntrf/jupyter-lmod | jupyterlmod/__init__.py | Python | mit | 1,051 |
from models import Event
from django.views.generic import DetailView, ListView
class EventListView(ListView):
template_name = 'agenda/event_list.html'
queryset = Event.objects.upcoming()
paginate_by = 20
class EventArchiveview(EventListView):
queryset = Event.objects.past()
class EventDetailView(DetailView):
model = Event
template_name = 'agenda/event_detail.html'
| feinheit/feincms-elephantagenda | elephantagenda/views.py | Python | mit | 403 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Prueba',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('nombre', models.CharField(max_length=100)),
],
options={
},
bases=(models.Model,),
),
]
| HenryGBC/landing_company | landing/migrations/0001_initial.py | Python | mit | 556 |
#!/usr/bin/env python
"""
Download NLTK data
"""
__author__ = "Manan Kalra"
__email__ = "[email protected]"
import nltk
nltk.download() | manankalra/Twitter-Sentiment-Analysis | demo/download.py | Python | mit | 145 |
import numpy as np
import ctypes
import struct
import time
# relative imports in Python3 must be explicit
from .ioctl_numbers import _IOR, _IOW
from fcntl import ioctl
SPI_IOC_MAGIC = ord("k")
SPI_IOC_RD_MODE = _IOR(SPI_IOC_MAGIC, 1, "=B")
SPI_IOC_WR_MODE = _IOW(SPI_IOC_MAGIC, 1, "=B")
SPI_IOC_RD_LSB_FIRST = _IOR(SPI_IOC_MAGIC, 2, "=B")
SPI_IOC_WR_LSB_FIRST = _IOW(SPI_IOC_MAGIC, 2, "=B")
SPI_IOC_RD_BITS_PER_WORD = _IOR(SPI_IOC_MAGIC, 3, "=B")
SPI_IOC_WR_BITS_PER_WORD = _IOW(SPI_IOC_MAGIC, 3, "=B")
SPI_IOC_RD_MAX_SPEED_HZ = _IOR(SPI_IOC_MAGIC, 4, "=I")
SPI_IOC_WR_MAX_SPEED_HZ = _IOW(SPI_IOC_MAGIC, 4, "=I")
SPI_CPHA = 0x01 # /* clock phase */
SPI_CPOL = 0x02 # /* clock polarity */
SPI_MODE_0 = (0|0) # /* (original MicroWire) */
SPI_MODE_1 = (0|SPI_CPHA)
SPI_MODE_2 = (SPI_CPOL|0)
SPI_MODE_3 = (SPI_CPOL|SPI_CPHA)
class Lepton(object):
"""Communication class for FLIR Lepton module on SPI
Args:
spi_dev (str): Location of SPI device node. Default '/dev/spidev0.0'.
"""
ROWS = 60
COLS = 80
VOSPI_FRAME_SIZE = COLS + 2
VOSPI_FRAME_SIZE_BYTES = VOSPI_FRAME_SIZE * 2
MODE = SPI_MODE_3
BITS = 8
SPEED = 18000000
SPIDEV_MESSAGE_LIMIT = 24
def __init__(self, spi_dev = "/dev/spidev0.0"):
self.__spi_dev = spi_dev
self.__txbuf = np.zeros(Lepton.VOSPI_FRAME_SIZE, dtype=np.uint16)
# struct spi_ioc_transfer {
# __u64 tx_buf;
# __u64 rx_buf;
# __u32 len;
# __u32 speed_hz;
# __u16 delay_usecs;
# __u8 bits_per_word;
# __u8 cs_change;
# __u32 pad;
# };
self.__xmit_struct = struct.Struct("=QQIIHBBI")
self.__msg_size = self.__xmit_struct.size
self.__xmit_buf = np.zeros((self.__msg_size * Lepton.ROWS), dtype=np.uint8)
self.__msg = _IOW(SPI_IOC_MAGIC, 0, self.__xmit_struct.format)
self.__capture_buf = np.zeros((Lepton.ROWS, Lepton.VOSPI_FRAME_SIZE, 1), dtype=np.uint16)
for i in range(Lepton.ROWS):
self.__xmit_struct.pack_into(self.__xmit_buf, i * self.__msg_size,
self.__txbuf.ctypes.data, # __u64 tx_buf;
self.__capture_buf.ctypes.data + Lepton.VOSPI_FRAME_SIZE_BYTES * i, # __u64 rx_buf;
Lepton.VOSPI_FRAME_SIZE_BYTES, # __u32 len;
Lepton.SPEED, # __u32 speed_hz;
0, # __u16 delay_usecs;
Lepton.BITS, # __u8 bits_per_word;
1, # __u8 cs_change;
0) # __u32 pad;
def __enter__(self):
# "In Python 3 the only way to open /dev/tty under Linux appears to be 1) in binary mode and 2) with buffering disabled."
self.__handle = open(self.__spi_dev, "wb+", buffering=0)
ioctl(self.__handle, SPI_IOC_RD_MODE, struct.pack("=B", Lepton.MODE))
ioctl(self.__handle, SPI_IOC_WR_MODE, struct.pack("=B", Lepton.MODE))
ioctl(self.__handle, SPI_IOC_RD_BITS_PER_WORD, struct.pack("=B", Lepton.BITS))
ioctl(self.__handle, SPI_IOC_WR_BITS_PER_WORD, struct.pack("=B", Lepton.BITS))
ioctl(self.__handle, SPI_IOC_RD_MAX_SPEED_HZ, struct.pack("=I", Lepton.SPEED))
ioctl(self.__handle, SPI_IOC_WR_MAX_SPEED_HZ, struct.pack("=I", Lepton.SPEED))
return self
def __exit__(self, type, value, tb):
self.__handle.close()
@staticmethod
def capture_segment(handle, xs_buf, xs_size, capture_buf):
messages = Lepton.ROWS
iow = _IOW(SPI_IOC_MAGIC, 0, xs_size)
ioctl(handle, iow, xs_buf, True)
while (capture_buf[0] & 0x000f) == 0x000f: # byteswapped 0x0f00
ioctl(handle, iow, xs_buf, True)
messages -= 1
# NB: the default spidev bufsiz is 4096 bytes so that's where the 24 message limit comes from: 4096 / Lepton.VOSPI_FRAME_SIZE_BYTES = 24.97...
# This 24 message limit works OK, but if you really need to optimize the read speed here, this hack is for you:
# The limit can be changed when spidev is loaded, but since it is compiled statically into newer raspbian kernels, that means
# modifying the kernel boot args to pass this option. This works too:
# $ sudo chmod 666 /sys/module/spidev/parameters/bufsiz
# $ echo 65536 > /sys/module/spidev/parameters/bufsiz
# Then Lepton.SPIDEV_MESSAGE_LIMIT of 24 can be raised to 59
while messages > 0:
if messages > Lepton.SPIDEV_MESSAGE_LIMIT:
count = Lepton.SPIDEV_MESSAGE_LIMIT
else:
count = messages
iow = _IOW(SPI_IOC_MAGIC, 0, xs_size * count)
ret = ioctl(handle, iow, xs_buf[xs_size * (60 - messages):], True)
if ret < 1:
raise IOError("can't send {0} spi messages ({1})".format(60, ret))
messages -= count
def capture(self, data_buffer = None, log_time = False, debug_print = False, retry_reset = True):
"""Capture a frame of data.
Captures 80x60 uint16 array of non-normalized (raw 12-bit) data. Returns that frame and a frame_id (which
is currently just the sum of all pixels). The Lepton will return multiple, identical frames at a rate of up
to ~27 Hz, with unique frames at only ~9 Hz, so the frame_id can help you from doing additional work
processing duplicate frames.
Args:
data_buffer (numpy.ndarray): Optional. If specified, should be ``(60,80,1)`` with `dtype`=``numpy.uint16``.
Returns:
tuple consisting of (data_buffer, frame_id)
"""
start = time.time()
if data_buffer is None:
data_buffer = np.ndarray((Lepton.ROWS, Lepton.COLS, 1), dtype=np.uint16)
elif data_buffer.ndim < 2 or data_buffer.shape[0] < Lepton.ROWS or data_buffer.shape[1] < Lepton.COLS or data_buffer.itemsize < 2:
raise Exception("Provided input array not large enough")
while True:
Lepton.capture_segment(self.__handle, self.__xmit_buf, self.__msg_size, self.__capture_buf[0])
if retry_reset and (self.__capture_buf[20, 0] & 0xFF0F) != 0x1400: # make sure that this is a well-formed frame, should find line 20 here
# Leave chip select deasserted for at least 185 ms to reset
if debug_print:
print("Garbage frame number reset waiting...")
time.sleep(0.185)
else:
break
self.__capture_buf.byteswap(True)
data_buffer[:,:] = self.__capture_buf[:,2:]
end = time.time()
if debug_print:
print("---")
for i in range(Lepton.ROWS):
fid = self.__capture_buf[i, 0, 0]
crc = self.__capture_buf[i, 1, 0]
fnum = fid & 0xFFF
print("0x{0:04x} 0x{1:04x} : Row {2:2} : crc={1}".format(fid, crc, fnum))
print("---")
if log_time:
print("frame processed int {0}s, {1}hz".format(end-start, 1.0/(end-start)))
# TODO: turn on telemetry to get real frame id, sum on this array is fast enough though (< 500us)
return data_buffer, data_buffer.sum()
| varunsuresh2912/SafeRanger | Python PiCode/Lepton.py | Python | mit | 7,177 |
import numpy as np
import matplotlib.pyplot as plt
from astropy.io import fits
from scipy.interpolate import interp1d,splev,splrep
def extractSpectrum(filename):
"""
NAME:
extractSpectrum
PURPOSE:
To open an input fits file from SDSS and extract the relevant
components, namely the flux and corresponding wavelength.
INPUTS:
filename The path and filename (including the extension) to the
file to be read in.
OUTPUTS:
lam The wavelengths, in angstrom, of the flux values
flux The actual flux, in arbitrary units
EXAMPLE:
flux, lam = extractSpectra('path/to/file/filename.fits')
"""
hdu = fits.open(filename) #Open the file using astropy
data = hdu[1].data #Data is in 2nd component of HDU
flux = data['flux'] #Get flux from read in dict
lam = 10**(data['loglam']) #Get wavelength, make it not log10
hdu.close() #Close the file, we're done with it
return lam, flux #Return the values as numpy arrays
def interpolate(points, lam, flux, method):
"""
NAME:
interpolate
PURPOSE:
General purpose function that can call and use various scipy.interpolate
methods. Defined for convienience.
INPUTS:
points Set of new points to get interpolated values for.
lam The wavelengths of the data points
flux The fluxes of the data points
method The method of interpolation to use. Valide values include
'interp1d:linear', 'interp1d:quadratic', and 'splrep'.
OUTPUTS:
Interpolated set of values for each corresponding input point.
EXAMPLE:
interpFlux = interpolate(interpLam, lam, flux)
"""
if method == 'interp1d:linear':
f = interp1d(lam, flux, assume_sorted = True)
return f(points)
if method == 'interp1d:quadratic':
f = interp1d(lam, flux, kind = 'quadratic', assume_sorted = True)
return f(points)
if method == 'splrep':
return splev(points, splrep(lam, flux))
raise Exception("You didn't choose a proper interpolating method")
#First extract the flux and corresponding wavelength
fileName = 'spec-4053-55591-0938.fits'
lam, flux = extractSpectrum(fileName)
#Now let's plot it, without any processing
plt.figure(1)
plt.plot(lam, flux, '-o', lw = 1.5, c = (0.694,0.906,0.561),
mec = 'none', ms = 4, label = 'Original data')
plt.xlabel('Wavelength', fontsize = 16)
plt.ylabel('Flux', fontsize = 16)
plt.ylim(0,1.1*max(flux))
#Now let's interpolate and plot that up
interpLam = np.arange(4000,10000,1)
interpFlux = interpolate(interpLam, lam, flux, 'splrep') #This is my own method
plt.plot(interpLam, interpFlux, '-k', label = 'Interpolated')
plt.legend(loc = 0)
plt.show(block = False)
print('Done...')
| BU-PyCon/Meeting-3 | Programs/interpolate.py | Python | mit | 3,010 |
import unittest
import random
from pygraph.classes.graph import graph
class SWIM(object):
def __init__(self, graph):
self.graph = graph
def edge_alive(self, nodeA, nodeB, alive):
'''
edge_alive(A, B, True|False)
'''
edge = (nodeA, nodeB)
if alive:
self.graph.add_edge(edge)
else:
self.graph.del_edge(edge)
def node_alive(self, node, alive):
'''
node_alive(A, True|False)
'''
if alive:
self.graph.node_attributes(node).clear()
else:
self.graph.node_attributes(node).append("dead")
def ping(self, nodeStart, nodeEnd, k):
'''
NodeStart to ping NodeEnd directly or indirectly through
K random neighbors. Return True if nodeEnd receives ping,
or False otherwise
'''
g = self.graph
# Check if direct ping works
if g.has_edge((nodeStart, nodeEnd)) and \
"dead" not in g.node_attributes(nodeEnd):
return True
# Pick k random neighbors and let them ping end node
for neighbor in self._random_neighbors(nodeStart, k):
if self.ping(neighbor, nodeEnd, 0):
return True
# All pings have failed
return False
def _random_neighbors(self, node, b):
neighbors = self.graph.neighbors(node)
if len(neighbors) <= b:
return neighbors
else:
return random.sample(neighbors, b)
class SWIMTest(unittest.TestCase):
def setUp(self):
g = graph()
g.add_nodes(xrange(10))
g.complete()
self.graph = g
self.swim = SWIM(g)
def test_good_ping(self):
swim = self.swim
self.assertTrue(swim.ping(0, 1, 0))
self.assertTrue(swim.ping(1, 3, 0))
def test_dead_edge_ping(self):
swim = self.swim
swim.edge_alive(0, 1, False)
self.assertFalse(swim.ping(0, 1, 0))
self.assertTrue(swim.ping(0, 1, 1))
def test_dead_node_ping(self):
swim = self.swim
swim.node_alive(2, False)
self.assertFalse(swim.ping(0, 2, 0))
self.assertFalse(swim.ping(0, 2, 3))
if __name__ == '__main__':
unittest.main()
| achoi007/CloudComputing | Concepts/SWIM.py | Python | mit | 2,292 |
import numpy as np
from astropy.coordinates import EarthLocation, SkyCoord
__all__ = ['MWA_LOC', 'MWA_FIELD_EOR0', 'MWA_FIELD_EOR1', 'MWA_FIELD_EOR2',
'MWA_FREQ_EOR_ALL_40KHZ', 'MWA_FREQ_EOR_ALL_80KHZ',
'MWA_FREQ_EOR_HI_40KHZ', 'MWA_FREQ_EOR_HI_80KHZ',
'MWA_FREQ_EOR_LOW_40KHZ', 'MWA_FREQ_EOR_LOW_80KHZ',
'HERA_ANT_DICT', 'F21']
F21 = 1420.405751786e6
MWA_LOC = EarthLocation(lat='−26d42m11.94986s', lon='116d40m14.93485s',
height=377.827)
MWA_FIELD_EOR0 = SkyCoord(ra='0.0h', dec='-30.0d')
MWA_FIELD_EOR1 = SkyCoord(ra='4.0h', dec='-30.0d')
MWA_FIELD_EOR2 = SkyCoord(ra='10.33h', dec='-10.0d')
MWA_FREQ_EOR_LOW_40KHZ = np.arange(138.895, 167.055, 0.04)
MWA_FREQ_EOR_HI_40KHZ = np.arange(167.055, 195.255, 0.04)
MWA_FREQ_EOR_ALL_40KHZ = np.arange(138.895, 195.255, 0.04)
MWA_FREQ_EOR_LOW_80KHZ = np.arange(138.915, 167.075, 0.08)
MWA_FREQ_EOR_HI_80KHZ = np.arange(167.075, 195.275, 0.08)
MWA_FREQ_EOR_ALL_80KHZ = np.arange(138.915, 195.275, 0.08)
HERA_ANT_DICT = {'hera19': 3, 'hera37': 4, 'hera61': 5, 'hera91': 6,
'hera127': 7, 'hera169': 8, 'hera217': 9, 'hera271': 10,
'hera331': 11} | piyanatk/sim | opstats/utils/settings.py | Python | mit | 1,196 |
__all__ = ['LEAGUE_PROPERTIES']
LEAGUE_PROPERTIES = {
"PL": {
"rl": [18, 20],
"cl": [1, 4],
"el": [5, 5],
},
"EL1": {
"rl": [21, 24],
"cl": [1, 2],
"el": [3, 6]
},
"EL2": {
"rl": [21, 24],
"cl": [1, 2],
"el": [3, 6]
},
"ELC": {
"rl": [22, 24],
"cl": [1,2],
"el": [3,6]
},
"BL1": {
"rl": [16, 18],
"cl": [1, 4],
"el": [5, 6]
},
"BL2": {
"rl": [16, 18],
"cl": [1, 2],
"el": [3, 3]
},
"BL3": {
"rl": [18, 20],
"cl": [1, 2],
"el": [3, 3]
},
"PD": {
"rl": [18,20],
"cl": [1,3],
"el": [4,6]
},
"SD": {
"rl": [19, 22],
"cl": [1, 2],
"el": [3, 6]
},
"SA": {
"rl": [18, 20],
"cl": [1, 3],
"el": [4, 5]
},
"PPL": {
"rl": [17, 18],
"cl": [1, 3],
"el": [4, 5]
},
"DED": {
"rl": [17, 18],
"cl": [1, 3],
"el": [4, 5]
},
"FL1": {
"rl": [19, 20],
"cl": [1, 3],
"el": [4, 4]
},
"FL2": {
"rl": [18, 20],
"cl": [1, 3],
"el": [0, 0]
},
"SB": {
"rl": [19, 22],
"cl": [1, 2],
"el": [3, 6]
},
"ENL": {
"rl": [22, 24],
"cl": [1,2],
"el": [3,6]
},
}
| RayYu03/pysoccer | soccer/data/leagueproperties.py | Python | mit | 1,439 |
'''
This script demonstrates how to create a periodic Gaussian process
using the *gpiso* function.
'''
import numpy as np
import matplotlib.pyplot as plt
from sympy import sin, exp, pi
from rbf.basis import get_r, get_eps, RBF
from rbf.gproc import gpiso
np.random.seed(1)
period = 5.0
cls = 0.5 # characteristic length scale
var = 1.0 # variance
r = get_r() # get symbolic variables
eps = get_eps()
# create a symbolic expression of the periodic covariance function
expr = exp(-sin(r*pi/period)**2/eps**2)
# define a periodic RBF using the symbolic expression
basis = RBF(expr)
# define a Gaussian process using the periodic RBF
gp = gpiso(basis, eps=cls, var=var)
t = np.linspace(-10, 10, 1000)[:,None]
sample = gp.sample(t) # draw a sample
mu,sigma = gp(t) # evaluate mean and std. dev.
# plot the results
fig,ax = plt.subplots(figsize=(6,4))
ax.grid(True)
ax.plot(t[:,0], mu, 'b-', label='mean')
ax.fill_between(
t[:,0], mu - sigma, mu + sigma,
color='b', alpha=0.2, edgecolor='none', label='std. dev.')
ax.plot(t, sample, 'k', label='sample')
ax.set_xlim((-10.0, 10.0))
ax.set_ylim((-2.5*var, 2.5*var))
ax.legend(loc=4, fontsize=10)
ax.tick_params(labelsize=10)
ax.set_xlabel('time', fontsize=10)
ax.set_title('periodic Gaussian process', fontsize=10)
fig.tight_layout()
plt.savefig('../figures/gproc.e.png')
plt.show()
| treverhines/RBF | docs/scripts/gproc.e.py | Python | mit | 1,343 |
# 1417. Weighing Problem
# Gives nn coins, each weighing 10g, but the weight of one coin is 11g. There
# is now a balance that can be accurately weighed. Ask at least a few times
# to be sure to find the 11g gold coin.
#
# Example
# Given n = 3, return 1.
#
# Explanation:
# Select two gold coins on the two ends of the balance. If the two ends of
# the balance are level, the third gold coin is 11g, otherwise the heavy one
# is 11g.
# Given n = 4, return 2.
#
# Explanation:
# Four gold coins can be divided into two groups and placed on both ends of
# the scale. According to the weighing results, select the two heavy gold
# coins and place them on the two ends of the balance for the second
# weighing. The gold coin at the heavy end is 11g gold coins.
# class Solution:
# """
# @param n: The number of coins
# @return: The Minimum weighing times int worst case
# """
# def minimumtimes(self, n):
# # Write your code here
| kingdaa/LC-python | lintc/1417_Weighing_Problem.py | Python | mit | 958 |
#!/usr/bin/python
import petsc4py
import sys
petsc4py.init(sys.argv)
from petsc4py import PETSc
Print = PETSc.Sys.Print
# from MatrixOperations import *
from dolfin import *
import numpy as np
import matplotlib.pylab as plt
import scipy.sparse as sps
import scipy.sparse.linalg as slinalg
import os
import scipy.io
import PETScIO as IO
import MatrixOperations as MO
def StoreMatrix(A,name):
test ="".join([name,".mat"])
scipy.io.savemat( test, {name: A},oned_as='row')
parameters['num_threads'] = 10
m = 6
errL2b =np.zeros((m-1,1))
errCurlb =np.zeros((m-1,1))
errL2r =np.zeros((m-1,1))
errH1r =np.zeros((m-1,1))
l2border = np.zeros((m-1,1))
Curlborder =np.zeros((m-1,1))
# set_log_level(DEBUG)
NN = np.zeros((m-1,1))
DoF = np.zeros((m-1,1))
Vdim = np.zeros((m-1,1))
Qdim = np.zeros((m-1,1))
Wdim = np.zeros((m-1,1))
iterations = np.zeros((m-1,1))
SolTime = np.zeros((m-1,1))
udiv = np.zeros((m-1,1))
MU = np.zeros((m-1,1))
nn = 2
dim = 2
Solving = 'Direct'
ShowResultPlots = 'yes'
ShowErrorPlots = 'no'
EigenProblem = 'no'
SavePrecond = 'no'
CheckMu = 'no'
case = 4
parameters['linear_algebra_backend'] = 'uBLAS'
MU[0]= 1e0
for xx in xrange(1,m):
print xx
nn = 2**(xx)/2
if (CheckMu == 'yes'):
if (xx != 1):
MU[xx-1] = MU[xx-2]/10
else:
if (xx != 1):
MU[xx-1] = MU[xx-2]
# Create mesh and define function space
nn = int(nn)
NN[xx-1] = nn
parameters["form_compiler"]["quadrature_degree"] = 3
parameters["form_compiler"]["optimize"] = True
parameters["form_compiler"]["representation"] = 'quadrature'
# mesh = BoxMesh(-1,-1,-1,1, 1, 1, nn, nn, nn)
mesh = UnitCubeMesh(nn,nn,nn)
parameters['reorder_dofs_serial'] = False
V = FunctionSpace(mesh, "N1curl",2)
Q = FunctionSpace(mesh, "CG",2)
Vdim[xx-1] = V.dim()
print "\n\n\n V-dim", V.dim()
def boundary(x, on_boundary):
return on_boundary
if case == 1:
u0 = Expression(("x[1]*x[1]*(x[1]-1)","x[0]*x[0]*(x[0]-1)","0"))
elif case == 2:
u0 = Expression(("sin(2*pi*x[1])*cos(2*pi*x[0])","-sin(2*pi*x[0])*cos(2*pi*x[1])"))
elif case == 3:
u0 = Expression(("x[0]*x[0]*(x[0]-1)","x[1]*x[1]*(x[1]-1)","0"))
elif case == 4:
u0 = Expression(("x[0]*x[1]*x[2]*(x[0]-1)","x[0]*x[1]*x[2]*(x[1]-1)","x[0]*x[1]*x[2]*(x[2]-1)"))
bcs = DirichletBC(V,u0, boundary)
# (u1) = TrialFunctions(V)
# (v1) = TestFunctions(V)
c = .5
if case == 1:
# f= Expression(("(8*pow(pi,2)-C)*sin(2*pi*x[1])*cos(2*pi*x[0])","-(8*pow(pi,2)-C)*sin(2*pi*x[0])*cos(2*pi*x[1])"),C = c)
f = Expression(("-6*x[1]+2","-6*x[0]+2"))+c*u0
elif case == 2:
f = 8*pow(pi,2)*u0+c*u0
elif case == 3:
f = Expression(("0","0","0"),C = c)
f = c*u0
elif case == 4:
f = Expression(("x[2]*(2*x[1]-1)+x[1]*(2*x[2]-1)","x[0]*(2*x[2]-1)+x[2]*(2*x[0]-1)","x[1]*(2*x[0]-1)+x[0]*(2*x[1]-1)"))+c*u0
(u) = TrialFunction(V)
(v) = TestFunction(V)
a = dot(curl(u),curl(v))*dx+c*inner(u, v)*dx
L1 = inner(v, f)*dx
tic()
AA, bb = assemble_system(a, L1, bcs)
As = AA.sparray()
StoreMatrix(As,'A')
A = PETSc.Mat().createAIJ(size=As.shape,csr=(As.indptr, As.indices, As.data))
# exit
# A = as_backend_type(AA).mat()
print toc()
b = bb.array()
zeros = 0*b
x = IO.arrayToVec(zeros)
bb = IO.arrayToVec(b)
if (Solving == 'Direct'):
ksp = PETSc.KSP().create()
ksp.setOperators(A)
ksp.setFromOptions()
ksp.setType(ksp.Type.PREONLY)
ksp.pc.setType(ksp.pc.Type.LU)
# print 'Solving with:', ksp.getType()
# Solve!
tic()
ksp.solve(bb, x)
SolTime[xx-1] = toc()
print "time to solve: ",SolTime[xx-1]
del AA
if (Solving == 'Iterative' or Solving == 'Direct'):
if case == 1:
ue = Expression(("x[1]*x[1]*(x[1]-1)","x[0]*x[0]*(x[0]-1)"))
elif case == 2:
ue = Expression(("sin(2*pi*x[1])*cos(2*pi*x[0])","-sin(2*pi*x[0])*cos(2*pi*x[1])"))
elif case == 3:
ue=u0
elif case == 4:
ue=u0
Ve = FunctionSpace(mesh, "N1curl",4)
u = interpolate(ue,Ve)
Nv = u.vector().array().shape
X = IO.vecToArray(x)
x = X[0:Nv[0]]
ua = Function(V)
ua.vector()[:] = x
parameters["form_compiler"]["quadrature_degree"] = 4
parameters["form_compiler"]["optimize"] = True
ErrorB = Function(V)
ErrorB.vector()[:] = interpolate(ue,V).vector().array()-ua.vector().array()
errL2b[xx-1] = sqrt(assemble(inner(ErrorB, ErrorB)*dx))
errCurlb[xx-1] = sqrt(assemble(inner(curl(ErrorB), curl(ErrorB))*dx))
if xx == 1:
a = 1
else:
l2border[xx-1] = np.abs(np.log2(errL2b[xx-2]/errL2b[xx-1]))
Curlborder[xx-1] = np.abs(np.log2(errCurlb[xx-2]/errCurlb[xx-1]))
print errL2b[xx-1]
print errCurlb[xx-1]
import pandas as pd
print "\n\n Magnetic convergence"
MagneticTitles = ["Total DoF","Soln Time","B-L2","B-order","B-Curl","Curl-order"]
MagneticValues = np.concatenate((Vdim,SolTime,errL2b,l2border,errCurlb,Curlborder),axis=1)
MagneticTable= pd.DataFrame(MagneticValues, columns = MagneticTitles)
pd.set_option('precision',3)
MagneticTable = MO.PandasFormat(MagneticTable,"B-Curl","%2.4e")
MagneticTable = MO.PandasFormat(MagneticTable,'B-L2',"%2.4e")
print MagneticTable
if (SavePrecond == 'yes'):
scipy.io.savemat('eigenvalues/Wdim.mat', {'Wdim':Wdim-1},oned_as = 'row')
if (ShowResultPlots == 'yes'):
plot(ua)
plot(interpolate(ue,V))
interactive()
| wathen/PhD | MHD/FEniCS/ShiftCurlCurl/saddle.py | Python | mit | 5,740 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-08-01 07:59
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import proso.django.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('proso_user', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('proso_common', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.DateTimeField(default=datetime.datetime.now)),
('response_time', models.IntegerField()),
('guess', models.FloatField(default=0)),
('type', models.CharField(max_length=10)),
('lang', models.CharField(blank=True, default=None, max_length=2, null=True)),
('config', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_common.Config')),
],
),
migrations.CreateModel(
name='AnswerMeta',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('content_hash', models.CharField(db_index=True, max_length=40, unique=True)),
],
),
migrations.CreateModel(
name='Audit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(max_length=50)),
('value', models.FloatField()),
('time', models.DateTimeField(default=datetime.datetime.now)),
('answer', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.Answer')),
],
),
migrations.CreateModel(
name='EnvironmentInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.IntegerField(choices=[(0, 'disabled'), (1, 'loading'), (2, 'enabled'), (3, 'active')], default=1)),
('revision', models.IntegerField()),
('load_progress', models.IntegerField(default=0)),
('updated', models.DateTimeField(auto_now=True)),
('created', models.DateTimeField(auto_now_add=True)),
('config', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='proso_common.Config')),
],
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('active', models.BooleanField(default=True)),
],
bases=(models.Model, proso.django.models.ModelDiffMixin),
),
migrations.CreateModel(
name='ItemRelation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('visible', models.BooleanField(default=True)),
('active', models.BooleanField(default=True)),
('child', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='child_relations', to='proso_models.Item')),
('parent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parent_relations', to='proso_models.Item')),
],
bases=(models.Model, proso.django.models.ModelDiffMixin),
),
migrations.CreateModel(
name='ItemType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('model', models.CharField(max_length=100)),
('table', models.CharField(max_length=100)),
('foreign_key', models.CharField(max_length=100)),
('language', models.CharField(blank=True, default=None, max_length=100, null=True)),
('valid', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='PracticeContext',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('content_hash', models.CharField(db_index=True, max_length=40, unique=True)),
],
),
migrations.CreateModel(
name='PracticeSet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('finished', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Variable',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('permanent', models.BooleanField(default=False)),
('key', models.CharField(max_length=50)),
('value', models.FloatField()),
('audit', models.BooleanField(default=True)),
('updated', models.DateTimeField(default=datetime.datetime.now)),
('answer', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.Answer')),
('info', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.EnvironmentInfo')),
('item_primary', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='item_primary_variables', to='proso_models.Item')),
('item_secondary', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='item_secondary_variables', to='proso_models.Item')),
('user', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterUniqueTogether(
name='itemtype',
unique_together=set([('model', 'foreign_key'), ('table', 'foreign_key')]),
),
migrations.AddField(
model_name='item',
name='children',
field=models.ManyToManyField(related_name='parents', through='proso_models.ItemRelation', to='proso_models.Item'),
),
migrations.AddField(
model_name='item',
name='item_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.ItemType'),
),
migrations.AddField(
model_name='audit',
name='info',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.EnvironmentInfo'),
),
migrations.AddField(
model_name='audit',
name='item_primary',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='item_primary_audits', to='proso_models.Item'),
),
migrations.AddField(
model_name='audit',
name='item_secondary',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='item_secondary_audits', to='proso_models.Item'),
),
migrations.AddField(
model_name='audit',
name='user',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='answer',
name='context',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.PracticeContext'),
),
migrations.AddField(
model_name='answer',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='item_answers', to='proso_models.Item'),
),
migrations.AddField(
model_name='answer',
name='item_answered',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='item_answered_answers', to='proso_models.Item'),
),
migrations.AddField(
model_name='answer',
name='item_asked',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='item_asked_answers', to='proso_models.Item'),
),
migrations.AddField(
model_name='answer',
name='metainfo',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.AnswerMeta'),
),
migrations.AddField(
model_name='answer',
name='practice_set',
field=models.ForeignKey(blank=None, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.PracticeSet'),
),
migrations.AddField(
model_name='answer',
name='session',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_user.Session'),
),
migrations.AddField(
model_name='answer',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='variable',
unique_together=set([('info', 'key', 'user', 'item_primary', 'item_secondary')]),
),
migrations.AlterIndexTogether(
name='variable',
index_together=set([('info', 'key', 'user'), ('info', 'key', 'user', 'item_primary'), ('info', 'key', 'user', 'item_primary', 'item_secondary'), ('info', 'key', 'item_primary'), ('info', 'key')]),
),
migrations.AlterUniqueTogether(
name='environmentinfo',
unique_together=set([('config', 'revision')]),
),
migrations.AlterIndexTogether(
name='audit',
index_together=set([('info', 'key', 'user'), ('info', 'key', 'user', 'item_primary'), ('info', 'key', 'user', 'item_primary', 'item_secondary'), ('info', 'key', 'item_primary'), ('info', 'key')]),
),
migrations.AlterIndexTogether(
name='answer',
index_together=set([('user', 'context')]),
),
]
| adaptive-learning/proso-apps | proso_models/migrations/0001_initial.py | Python | mit | 11,335 |
#!/usr/bin/env python
# Usage parse_shear sequences.fna a2t.txt emb_output.b6
import sys
import csv
from collections import Counter, defaultdict
sequences = sys.argv[1]
accession2taxonomy = sys.argv[2]
alignment = sys.argv[3]
with open(accession2taxonomy) as inf:
next(inf)
csv_inf = csv.reader(inf, delimiter="\t")
a2t = dict(('_'.join(row[0].split()[0].split('_')[:-1]).split('.')[0], row[-1]) for row in csv_inf)
print("Loaded accession2taxonomy.")
reads_counter = Counter()
with open(sequences) as inf:
for i, line in enumerate(inf):
if i % 100000 == 0:
print("Processed %d lines" % i)
print(line)
if line.startswith('>'):
name = '_'.join(line.split()[0][1:].split('_')[:-1]).split('.')[0]
if name in a2t:
species = a2t[name]
reads_counter.update([species])
print("Loaded read counter")
counts_dict = defaultdict(Counter)
with open(alignment) as inf:
csv_inf = csv.reader(inf, delimiter="\t")
for i, row in enumerate(csv_inf):
if i % 100000 == 0:
print("Processed %d records" % i)
print(row)
if row[-1].startswith('k'):
read = row[0]
read = "_".join(read.split('_')[:-1]).split('.')[0]
if read in a2t:
species = a2t[read]
tax = row[-1]
counts_dict[species].update([tax])
print("Loaded counts_dict.")
with open("sheared_bayes.txt", "w") as outf:
for i, species in enumerate(counts_dict.keys()):
row = [0] * 10
row[-1] = reads_counter[species]
row[0] = species
counts = counts_dict[species]
if i % 10000 == 0:
print("Processed %d records" % i)
print(counts)
for j in counts.keys():
c = j.count(';')
row[c+1] = counts[j]
row = list(map(str, row))
outf.write("\t".join(row) + "\n")
| knights-lab/analysis_SHOGUN | scripts/parse_shear.py | Python | mit | 2,010 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from .config import Config
| kaka19ace/kkutil | kkutil/config/__init__.py | Python | mit | 77 |
import seaborn as sns
import matplotlib.pyplot as plt
def plot_corrmatrix(df, square=True, linewidths=0.1, annot=True,
size=None, figsize=(12, 9), *args, **kwargs):
"""
Plot correlation matrix of the dataset
see doc at https://stanford.edu/~mwaskom/software/seaborn/generated/seaborn.heatmap.html#seaborn.heatmap
"""
sns.set(context="paper", font="monospace")
f, ax = plt.subplots(figsize=figsize)
sns.heatmap(df.corr(), vmax=1, square=square, linewidths=linewidths,
annot=annot, annot_kws={"size": size}, *args, **kwargs)
| ericfourrier/auto-clean | autoc/utils/corrplot.py | Python | mit | 590 |
#--------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#--------------------------------------------------------------------------
import base64
import json
import pickle
import re
from utils import HTTP_REQUESTS
from azure.core.pipeline._tools import is_rest
import types
import unittest
try:
from unittest import mock
except ImportError:
import mock
import pytest
from requests import Request, Response
from msrest import Deserializer
from azure.core.polling import async_poller, AsyncLROPoller
from azure.core.exceptions import DecodeError, HttpResponseError
from azure.core import AsyncPipelineClient
from azure.core.pipeline import PipelineResponse, AsyncPipeline, PipelineContext
from azure.core.pipeline.transport import AsyncioRequestsTransportResponse, AsyncHttpTransport
from azure.core.polling.async_base_polling import (
AsyncLROBasePolling,
)
from utils import ASYNCIO_REQUESTS_TRANSPORT_RESPONSES, request_and_responses_product, create_transport_response
from rest_client_async import AsyncTestRestClient
class SimpleResource:
"""An implementation of Python 3 SimpleNamespace.
Used to deserialize resource objects from response bodies where
no particular object type has been specified.
"""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __repr__(self):
keys = sorted(self.__dict__)
items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
return "{}({})".format(type(self).__name__, ", ".join(items))
def __eq__(self, other):
return self.__dict__ == other.__dict__
class BadEndpointError(Exception):
pass
TEST_NAME = 'foo'
RESPONSE_BODY = {'properties':{'provisioningState': 'InProgress'}}
ASYNC_BODY = json.dumps({ 'status': 'Succeeded' })
ASYNC_URL = 'http://dummyurlFromAzureAsyncOPHeader_Return200'
LOCATION_BODY = json.dumps({ 'name': TEST_NAME })
LOCATION_URL = 'http://dummyurlurlFromLocationHeader_Return200'
RESOURCE_BODY = json.dumps({ 'name': TEST_NAME })
RESOURCE_URL = 'http://subscriptions/sub1/resourcegroups/g1/resourcetype1/resource1'
ERROR = 'http://dummyurl_ReturnError'
POLLING_STATUS = 200
CLIENT = AsyncPipelineClient("http://example.org")
CLIENT.http_request_type = None
CLIENT.http_response_type = None
async def mock_run(client_self, request, **kwargs):
return TestBasePolling.mock_update(client_self.http_request_type, client_self.http_response_type, request.url)
CLIENT._pipeline.run = types.MethodType(mock_run, CLIENT)
@pytest.fixture
def client():
# The poller itself don't use it, so we don't need something functionnal
return AsyncPipelineClient("https://baseurl")
@pytest.fixture
def async_pipeline_client_builder():
"""Build a client that use the "send" callback as final transport layer
send will receive "request" and kwargs as any transport layer
"""
def create_client(send_cb):
class TestHttpTransport(AsyncHttpTransport):
async def open(self): pass
async def close(self): pass
async def __aexit__(self, *args, **kwargs): pass
async def send(self, request, **kwargs):
return await send_cb(request, **kwargs)
return AsyncPipelineClient(
'http://example.org/',
pipeline=AsyncPipeline(
transport=TestHttpTransport()
)
)
return create_client
@pytest.fixture
def deserialization_cb():
def cb(pipeline_response):
return json.loads(pipeline_response.http_response.text())
return cb
@pytest.fixture
def polling_response():
polling = AsyncLROBasePolling()
headers = {}
response = Response()
response.headers = headers
response.status_code = 200
polling._pipeline_response = PipelineResponse(
None,
AsyncioRequestsTransportResponse(
None,
response,
),
PipelineContext(None)
)
polling._initial_response = polling._pipeline_response
return polling, headers
def test_base_polling_continuation_token(client, polling_response):
polling, _ = polling_response
continuation_token = polling.get_continuation_token()
assert isinstance(continuation_token, str)
polling_args = AsyncLROBasePolling.from_continuation_token(
continuation_token,
deserialization_callback="deserialization_callback",
client=client,
)
new_polling = AsyncLROBasePolling()
new_polling.initialize(*polling_args)
@pytest.mark.asyncio
@pytest.mark.parametrize("http_request,http_response", request_and_responses_product(ASYNCIO_REQUESTS_TRANSPORT_RESPONSES))
async def test_post(async_pipeline_client_builder, deserialization_cb, http_request, http_response):
# Test POST LRO with both Location and Operation-Location
# The initial response contains both Location and Operation-Location, a 202 and no Body
initial_response = TestBasePolling.mock_send(
http_request,
http_response,
'POST',
202,
{
'location': 'http://example.org/location',
'operation-location': 'http://example.org/async_monitor',
},
''
)
async def send(request, **kwargs):
assert request.method == 'GET'
if request.url == 'http://example.org/location':
return TestBasePolling.mock_send(
http_request,
http_response,
'GET',
200,
body={'location_result': True}
).http_response
elif request.url == 'http://example.org/async_monitor':
return TestBasePolling.mock_send(
http_request,
http_response,
'GET',
200,
body={'status': 'Succeeded'}
).http_response
else:
pytest.fail("No other query allowed")
client = async_pipeline_client_builder(send)
# LRO options with Location final state
poll = async_poller(
client,
initial_response,
deserialization_cb,
AsyncLROBasePolling(0))
result = await poll
assert result['location_result'] == True
# Location has no body
async def send(request, **kwargs):
assert request.method == 'GET'
if request.url == 'http://example.org/location':
return TestBasePolling.mock_send(
http_request,
http_response,
'GET',
200,
body=None
).http_response
elif request.url == 'http://example.org/async_monitor':
return TestBasePolling.mock_send(
http_request,
http_response,
'GET',
200,
body={'status': 'Succeeded'}
).http_response
else:
pytest.fail("No other query allowed")
client = async_pipeline_client_builder(send)
poll = async_poller(
client,
initial_response,
deserialization_cb,
AsyncLROBasePolling(0))
result = await poll
assert result is None
@pytest.mark.asyncio
@pytest.mark.parametrize("http_request,http_response", request_and_responses_product(ASYNCIO_REQUESTS_TRANSPORT_RESPONSES))
async def test_post_resource_location(async_pipeline_client_builder, deserialization_cb, http_request, http_response):
# ResourceLocation
# The initial response contains both Location and Operation-Location, a 202 and no Body
initial_response = TestBasePolling.mock_send(
http_request,
http_response,
'POST',
202,
{
'operation-location': 'http://example.org/async_monitor',
},
''
)
async def send(request, **kwargs):
assert request.method == 'GET'
if request.url == 'http://example.org/resource_location':
return TestBasePolling.mock_send(
http_request,
http_response,
'GET',
200,
body={'location_result': True}
).http_response
elif request.url == 'http://example.org/async_monitor':
return TestBasePolling.mock_send(
http_request,
http_response,
'GET',
200,
body={'status': 'Succeeded', 'resourceLocation': 'http://example.org/resource_location'}
).http_response
else:
pytest.fail("No other query allowed")
client = async_pipeline_client_builder(send)
poll = async_poller(
client,
initial_response,
deserialization_cb,
AsyncLROBasePolling(0))
result = await poll
assert result['location_result'] == True
class TestBasePolling(object):
convert = re.compile('([a-z0-9])([A-Z])')
@staticmethod
def mock_send(http_request, http_response, method, status, headers=None, body=RESPONSE_BODY):
if headers is None:
headers = {}
response = Response()
response._content_consumed = True
response._content = json.dumps(body).encode('ascii') if body is not None else None
response.request = Request()
response.request.method = method
response.request.url = RESOURCE_URL
response.request.headers = {
'x-ms-client-request-id': '67f4dd4e-6262-45e1-8bed-5c45cf23b6d9'
}
response.status_code = status
response.headers = headers
response.headers.update({"content-type": "application/json; charset=utf8"})
response.reason = "OK"
if is_rest(http_request):
request = http_request(
response.request.method,
response.request.url,
headers=response.request.headers,
content=body,
)
else:
request = CLIENT._request(
response.request.method,
response.request.url,
None, # params
response.request.headers,
body,
None, # form_content
None # stream_content
)
response = create_transport_response(http_response, request, response)
if is_rest(http_response):
response.body()
return PipelineResponse(
request,
response,
None # context
)
@staticmethod
def mock_update(http_request, http_response, url, headers=None):
response = Response()
response._content_consumed = True
response.request = mock.create_autospec(Request)
response.request.method = 'GET'
response.headers = headers or {}
response.headers.update({"content-type": "application/json; charset=utf8"})
response.reason = "OK"
if url == ASYNC_URL:
response.request.url = url
response.status_code = POLLING_STATUS
response._content = ASYNC_BODY.encode('ascii')
response.randomFieldFromPollAsyncOpHeader = None
elif url == LOCATION_URL:
response.request.url = url
response.status_code = POLLING_STATUS
response._content = LOCATION_BODY.encode('ascii')
response.randomFieldFromPollLocationHeader = None
elif url == ERROR:
raise BadEndpointError("boom")
elif url == RESOURCE_URL:
response.request.url = url
response.status_code = POLLING_STATUS
response._content = RESOURCE_BODY.encode('ascii')
else:
raise Exception('URL does not match')
request = http_request(
response.request.method,
response.request.url,
)
response = create_transport_response(http_response, request, response)
if is_rest(http_response):
response.body()
return PipelineResponse(
request,
response,
None # context
)
@staticmethod
def mock_outputs(pipeline_response):
response = pipeline_response.http_response
try:
body = json.loads(response.text())
except ValueError:
raise DecodeError("Impossible to deserialize")
body = {TestBasePolling.convert.sub(r'\1_\2', k).lower(): v
for k, v in body.items()}
properties = body.setdefault('properties', {})
if 'name' in body:
properties['name'] = body['name']
if properties:
properties = {TestBasePolling.convert.sub(r'\1_\2', k).lower(): v
for k, v in properties.items()}
del body['properties']
body.update(properties)
resource = SimpleResource(**body)
else:
raise DecodeError("Impossible to deserialize")
resource = SimpleResource(**body)
return resource
@staticmethod
def mock_deserialization_no_body(pipeline_response):
"""Use this mock when you don't expect a return (last body irrelevant)
"""
return None
@pytest.mark.asyncio
@pytest.mark.parametrize("http_request,http_response", request_and_responses_product(ASYNCIO_REQUESTS_TRANSPORT_RESPONSES))
async def test_long_running_put(http_request, http_response):
#TODO: Test custom header field
CLIENT.http_request_type = http_request
CLIENT.http_response_type = http_response
# Test throw on non LRO related status code
response = TestBasePolling.mock_send(
http_request, http_response, 'PUT', 1000, {}
)
with pytest.raises(HttpResponseError):
await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
AsyncLROBasePolling(0))
# Test with no polling necessary
response_body = {
'properties':{'provisioningState': 'Succeeded'},
'name': TEST_NAME
}
response = TestBasePolling.mock_send(
http_request,
http_response,
'PUT', 201,
{}, response_body
)
def no_update_allowed(url, headers=None):
raise ValueError("Should not try to update")
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
polling_method
)
assert poll.name == TEST_NAME
assert not hasattr(polling_method._pipeline_response, 'randomFieldFromPollAsyncOpHeader')
# Test polling from operation-location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'PUT', 201,
{'operation-location': ASYNC_URL})
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
polling_method)
assert poll.name == TEST_NAME
assert not hasattr(polling_method._pipeline_response, 'randomFieldFromPollAsyncOpHeader')
# Test polling location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'PUT', 201,
{'location': LOCATION_URL})
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
polling_method)
assert poll.name == TEST_NAME
assert polling_method._pipeline_response.http_response.internal_response.randomFieldFromPollLocationHeader is None
# Test polling initial payload invalid (SQLDb)
response_body = {} # Empty will raise
response = TestBasePolling.mock_send(
http_request,
http_response,
'PUT', 201,
{'location': LOCATION_URL}, response_body)
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
polling_method)
assert poll.name == TEST_NAME
assert polling_method._pipeline_response.http_response.internal_response.randomFieldFromPollLocationHeader is None
# Test fail to poll from operation-location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'PUT', 201,
{'operation-location': ERROR})
with pytest.raises(BadEndpointError):
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
AsyncLROBasePolling(0))
# Test fail to poll from location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'PUT', 201,
{'location': ERROR})
with pytest.raises(BadEndpointError):
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
AsyncLROBasePolling(0))
@pytest.mark.asyncio
@pytest.mark.parametrize("http_request,http_response", request_and_responses_product(ASYNCIO_REQUESTS_TRANSPORT_RESPONSES))
async def test_long_running_patch(http_request, http_response):
CLIENT.http_request_type = http_request
CLIENT.http_response_type = http_response
# Test polling from location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'PATCH', 202,
{'location': LOCATION_URL},
body={'properties':{'provisioningState': 'Succeeded'}})
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
polling_method)
assert poll.name == TEST_NAME
assert polling_method._pipeline_response.http_response.internal_response.randomFieldFromPollLocationHeader is None
# Test polling from operation-location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'PATCH', 202,
{'operation-location': ASYNC_URL},
body={'properties':{'provisioningState': 'Succeeded'}})
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
polling_method)
assert poll.name == TEST_NAME
assert not hasattr(polling_method._pipeline_response, 'randomFieldFromPollAsyncOpHeader')
# Test polling from location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'PATCH', 200,
{'location': LOCATION_URL},
body={'properties':{'provisioningState': 'Succeeded'}})
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
polling_method)
assert poll.name == TEST_NAME
assert polling_method._pipeline_response.http_response.internal_response.randomFieldFromPollLocationHeader is None
# Test polling from operation-location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'PATCH', 200,
{'operation-location': ASYNC_URL},
body={'properties':{'provisioningState': 'Succeeded'}})
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
polling_method)
assert poll.name == TEST_NAME
assert not hasattr(polling_method._pipeline_response, 'randomFieldFromPollAsyncOpHeader')
# Test fail to poll from operation-location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'PATCH', 202,
{'operation-location': ERROR})
with pytest.raises(BadEndpointError):
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
AsyncLROBasePolling(0))
# Test fail to poll from location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'PATCH', 202,
{'location': ERROR})
with pytest.raises(BadEndpointError):
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
AsyncLROBasePolling(0))
@pytest.mark.asyncio
@pytest.mark.parametrize("http_request,http_response", request_and_responses_product(ASYNCIO_REQUESTS_TRANSPORT_RESPONSES))
async def test_long_running_delete(http_request, http_response):
# Test polling from operation-location header
CLIENT.http_request_type = http_request
CLIENT.http_response_type = http_response
response = TestBasePolling.mock_send(
http_request,
http_response,
'DELETE', 202,
{'operation-location': ASYNC_URL},
body=""
)
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_deserialization_no_body,
polling_method)
assert poll is None
assert polling_method._pipeline_response.http_response.internal_response.randomFieldFromPollAsyncOpHeader is None
@pytest.mark.asyncio
@pytest.mark.parametrize("http_request,http_response", request_and_responses_product(ASYNCIO_REQUESTS_TRANSPORT_RESPONSES))
async def test_long_running_post(http_request, http_response):
CLIENT.http_request_type = http_request
CLIENT.http_response_type = http_response
# Test polling from operation-location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'POST', 201,
{'operation-location': ASYNC_URL},
body={'properties':{'provisioningState': 'Succeeded'}})
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_deserialization_no_body,
polling_method)
assert polling_method._pipeline_response.http_response.internal_response.randomFieldFromPollAsyncOpHeader is None
# Test polling from operation-location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'POST', 202,
{'operation-location': ASYNC_URL},
body={'properties':{'provisioningState': 'Succeeded'}})
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_deserialization_no_body,
polling_method)
assert polling_method._pipeline_response.http_response.internal_response.randomFieldFromPollAsyncOpHeader is None
# Test polling from location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'POST', 202,
{'location': LOCATION_URL},
body={'properties':{'provisioningState': 'Succeeded'}})
polling_method = AsyncLROBasePolling(0)
poll = await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
polling_method)
assert poll.name == TEST_NAME
assert polling_method._pipeline_response.http_response.internal_response.randomFieldFromPollLocationHeader is None
# Test fail to poll from operation-location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'POST', 202,
{'operation-location': ERROR})
with pytest.raises(BadEndpointError):
await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
AsyncLROBasePolling(0))
# Test fail to poll from location header
response = TestBasePolling.mock_send(
http_request,
http_response,
'POST', 202,
{'location': ERROR})
with pytest.raises(BadEndpointError):
await async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
AsyncLROBasePolling(0))
@pytest.mark.asyncio
@pytest.mark.parametrize("http_request,http_response", request_and_responses_product(ASYNCIO_REQUESTS_TRANSPORT_RESPONSES))
async def test_long_running_negative(http_request, http_response):
global LOCATION_BODY
global POLLING_STATUS
CLIENT.http_request_type = http_request
CLIENT.http_response_type = http_response
# Test LRO PUT throws for invalid json
LOCATION_BODY = '{'
response = TestBasePolling.mock_send(
http_request,
http_response,
'POST', 202,
{'location': LOCATION_URL})
poll = async_poller(
CLIENT,
response,
TestBasePolling.mock_outputs,
AsyncLROBasePolling(0)
)
with pytest.raises(DecodeError):
await poll
LOCATION_BODY = '{\'"}'
response = TestBasePolling.mock_send(
http_request,
http_response,
'POST', 202,
{'location': LOCATION_URL})
poll = async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
AsyncLROBasePolling(0))
with pytest.raises(DecodeError):
await poll
LOCATION_BODY = '{'
POLLING_STATUS = 203
response = TestBasePolling.mock_send(
http_request,
http_response,
'POST', 202,
{'location': LOCATION_URL})
poll = async_poller(CLIENT, response,
TestBasePolling.mock_outputs,
AsyncLROBasePolling(0))
with pytest.raises(HttpResponseError) as error: # TODO: Node.js raises on deserialization
await poll
assert error.value.continuation_token == base64.b64encode(pickle.dumps(response)).decode('ascii')
LOCATION_BODY = json.dumps({ 'name': TEST_NAME })
POLLING_STATUS = 200
@pytest.mark.asyncio
@pytest.mark.parametrize("http_request,http_response", request_and_responses_product(ASYNCIO_REQUESTS_TRANSPORT_RESPONSES))
async def test_post_final_state_via(async_pipeline_client_builder, deserialization_cb, http_request, http_response):
# Test POST LRO with both Location and Operation-Location
CLIENT.http_request_type = http_request
CLIENT.http_response_type = http_response
# The initial response contains both Location and Operation-Location, a 202 and no Body
initial_response = TestBasePolling.mock_send(
http_request,
http_response,
'POST',
202,
{
'location': 'http://example.org/location',
'operation-location': 'http://example.org/async_monitor',
},
''
)
async def send(request, **kwargs):
assert request.method == 'GET'
if request.url == 'http://example.org/location':
return TestBasePolling.mock_send(
http_request,
http_response,
'GET',
200,
body={'location_result': True}
).http_response
elif request.url == 'http://example.org/async_monitor':
return TestBasePolling.mock_send(
http_request,
http_response,
'GET',
200,
body={'status': 'Succeeded'}
).http_response
else:
pytest.fail("No other query allowed")
client = async_pipeline_client_builder(send)
# Test 1, LRO options with Location final state
poll = async_poller(
client,
initial_response,
deserialization_cb,
AsyncLROBasePolling(0, lro_options={"final-state-via": "location"}))
result = await poll
assert result['location_result'] == True
# Test 2, LRO options with Operation-Location final state
poll = async_poller(
client,
initial_response,
deserialization_cb,
AsyncLROBasePolling(0, lro_options={"final-state-via": "operation-location"}))
result = await poll
assert result['status'] == 'Succeeded'
# Test 3, "do the right thing" and use Location by default
poll = async_poller(
client,
initial_response,
deserialization_cb,
AsyncLROBasePolling(0))
result = await poll
assert result['location_result'] == True
# Test 4, location has no body
async def send(request, **kwargs):
assert request.method == 'GET'
if request.url == 'http://example.org/location':
return TestBasePolling.mock_send(
http_request,
http_response,
'GET',
200,
body=None
).http_response
elif request.url == 'http://example.org/async_monitor':
return TestBasePolling.mock_send(
http_request,
http_response,
'GET',
200,
body={'status': 'Succeeded'}
).http_response
else:
pytest.fail("No other query allowed")
client = async_pipeline_client_builder(send)
poll = async_poller(
client,
initial_response,
deserialization_cb,
AsyncLROBasePolling(0, lro_options={"final-state-via": "location"}))
result = await poll
assert result is None
@pytest.mark.asyncio
@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
async def test_final_get_via_location(port, http_request, deserialization_cb):
client = AsyncTestRestClient(port)
request = http_request(
"PUT",
"http://localhost:{}/polling/polling-with-options".format(port),
)
request.set_json_body({"hello": "world!"})
initial_response = await client._client._pipeline.run(request)
poller = AsyncLROPoller(
client._client,
initial_response,
deserialization_cb,
AsyncLROBasePolling(0, lro_options={"final-state-via": "location"}),
)
result = await poller.result()
assert result == {"returnedFrom": "locationHeaderUrl"}
| Azure/azure-sdk-for-python | sdk/core/azure-core/tests/async_tests/test_base_polling_async.py | Python | mit | 30,931 |
from organise import app
app.run()
| msanatan/organise | run.py | Python | mit | 36 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.gis.geos import GeometryCollection
def change_line_to_multiline(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
Poi = apps.get_model("webmap", "Poi")
for poi in Poi.objects.all():
if poi.geom:
poi.geom_multi = GeometryCollection([poi.geom, ])
poi.save()
class Migration(migrations.Migration):
dependencies = [
('webmap', '0011_auto_20160101_0521'),
]
operations = [
migrations.RunPython(change_line_to_multiline),
]
| auto-mat/django-webmap-corpus | webmap/migrations/0012_line_to_multiline.py | Python | mit | 736 |
from eth_utils import (
is_hex,
is_string,
is_integer,
remove_0x_prefix,
force_text,
)
def is_predefined_block_number(value):
if not is_string(value):
return False
return force_text(value) in {"latest", "pending", "earliest"}
def is_hex_encoded_block_hash(value):
if not is_string(value):
return False
return len(remove_0x_prefix(value)) == 64 and is_hex(value)
def is_hex_encoded_block_number(value):
if not is_string(value):
return False
elif is_hex_encoded_block_hash(value):
return False
try:
value_as_int = int(value, 16)
except ValueError:
return False
return 0 <= value_as_int < 2**256
def select_method_for_block_identifier(value, if_hash, if_number, if_predefined):
if is_predefined_block_number(value):
return if_predefined
elif isinstance(value, bytes):
return if_hash
elif is_hex_encoded_block_hash(value):
return if_hash
elif is_integer(value) and (0 <= value < 2**256):
return if_number
elif is_hex_encoded_block_number(value):
return if_number
else:
raise ValueError(
"Value did not match any of the recognized block identifiers: {0}".format(value)
)
| pipermerriam/web3.py | web3/utils/blocks.py | Python | mit | 1,270 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^', include('ebets.urls')),
url(r'^admin/', include(admin.site.urls)),
)
| pisskidney/dota | dota/urls.py | Python | mit | 222 |
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
inspected_dict = {}
for i, num in enumerate(nums):
try:
j = inspected_dict[num]
return j+1, i+1
except KeyError:
inspected_dict[target-num] = i | chenjiancan/LeetCodeSolutions | src/two_sum/two_sum.py | Python | mit | 414 |
#! /usr/bin/env python
"""Unit tests for the image downloader."""
import unittest
import download
__author__ = "Nick Pascucci ([email protected])"
class DownloadTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_img_matcher(self):
html = """<html>
<body>
<b>Hi there!</b>
<img src="abcd-(myfile)[1].jpg">
</body>
</html>
"""
paths = download.get_image_paths(html)
assert paths == ["abcd-(myfile)[1].jpg"]
def test_img_matcher_http(self):
html = """<html>
<body>
<b>Hi there!</b>
<img src="http://www.def.com/abcd-(myfile)[1].jpg">
</body>
</html>
"""
paths = download.get_image_paths(html)
assert paths == ["http://www.def.com/abcd-(myfile)[1].jpg"]
def test_extension_matcher(self):
filename = "abcdef.jpg"
assert download.match_extension(filename)
filename = "abcdef.txt"
assert not download.match_extension(filename)
def test_sitename_matcher(self):
site = "http://www.xkcd.com/208/"
sitename = download.sitename(site)
assert "http://www.xkcd.com" == sitename
if __name__ == "__main__":
unittest.main()
| nickpascucci/AppDesign | download/download_test.py | Python | mit | 1,195 |
# -*- coding: utf-8 -*-
r"""
.. _SoftiMAX:
SoftiMAX at MAX IV
------------------
The images below are produced by scripts in
``\examples\withRaycing\14_SoftiMAX``.
The beamline will have two branches:
- STXM (Scanning Transmission X-ray Microscopy) and
- CXI (Coherent X-ray Imaging),
see the scheme provided by K. Thånell.
.. imagezoom:: _images/softiMAX_layout.*
STXM branch
~~~~~~~~~~~
.. rubric:: Rays vs. hybrid
The propagation through the first optical elements – from undulator to front
end (FE) slit, to M1, to M2 and to plane grating (PG) – is done with rays:
+------------+------------+------------+------------+
| FE | M1 | M2 | PG |
+============+============+============+============+
| |st_rFE| | |st_rM1| | |st_rM2| | |st_rPG| |
+------------+------------+------------+------------+
.. |st_rFE| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-00-FE.*
.. |st_rM1| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-01-M1local.*
.. |st_rM2| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-02-M2local.*
.. |st_rPG| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-02a-PGlocal.*
:loc: upper-right-corner
Starting from PG – to M3, to exit slit, to Fresnel zone plate (FZP) and to
variously positioned sample screen – the propagation is done by rays or waves,
as compared below. Despite the M3 footprint looks not perfect (not black at
periphery), the field at normal surfaces (exit slit, FZP (not shown) and sample
screen) is of perfect quality. At the best focus, rays and waves result in a
similar image. Notice a micron-sized depth of focus.
+-----------+---------------------+---------------------+
| | rays | wave |
+===========+=====================+=====================+
| M3 | |st_rM3| | |st_hM3| |
+-----------+---------------------+---------------------+
| exit slit | |st_rES| | |st_hES| |
+-----------+---------------------+---------------------+
| sample | |st_rS| | |st_hS| |
+-----------+---------------------+---------------------+
.. |st_rM3| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-03-M3local.*
.. |st_hM3| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-03-M3local.*
:loc: upper-right-corner
.. |st_rES| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-04-ExitSlit.*
.. |st_hES| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-04-ExitSlit.*
:loc: upper-right-corner
.. |st_rS| animation:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hS| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-06i-ExpFocus-Is
:loc: upper-right-corner
.. rubric:: Influence of emittance
Non-zero emittance radiation is treated in xrt by incoherent addition of single
electron intensities. The single electron (filament) fields are considered as
fully coherent and are resulted from filament trajectories (one per repeat)
that attain positional and angular shifts within the given emittance
distribution. The following images are calculated for the exit slit and the
focus screen for zero and non-zero emittance
(for MAX IV 3 GeV ring: ε\ :sub:`x`\ =263 pm·rad,
β\ :sub:`x`\ =9 m, ε\ :sub:`z`\ =8 pm·rad, β\ :sub:`z`\ =2 m). At the real
emittance, the horizontal focal size increases by ~75%. A finite energy band,
as determined by vertical size of the exit slit, results in somewhat bigger
broadening due to a chromatic dependence of the focal length.
+-----------+---------------------+---------------------+---------------------+
| | 0 emittance | real emittance | |refeb| |
+===========+=====================+=====================+=====================+
| exit slit | |st_hESb| | |st_hES2| | |st_hES3| |
+-----------+---------------------+---------------------+---------------------+
| sample | |st_hSb| | |st_hS2| | |st_hS3| |
+-----------+---------------------+---------------------+---------------------+
.. |refeb| replace:: real emittance, finite energy band
.. |st_hESb| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-04-ExitSlit.*
.. |st_hES2| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-04-ExitSlit.*
.. |st_hS2| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hES3| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-wideE-04-ExitSlit.*
:loc: upper-right-corner
.. |st_hSb| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hS3| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-wideE-06i-ExpFocus-Is
:loc: upper-right-corner
.. rubric:: Correction of emittance effects
The increased focal size can be amended by closing the exit slit. With flux
loss of about 2/3, the focal size is almost restored.
+-----------+--------------------+--------------------+
| | 80 µm exit slit | 20 µm exit slit |
+===========+====================+====================+
| exit slit | |st_hES2b| | |st_hES4| |
+-----------+--------------------+--------------------+
| sample | |st_hS2b| | |st_hS4| |
+-----------+--------------------+--------------------+
.. |st_hES2b| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-04-ExitSlit.*
.. |st_hES4| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-025H-04-ExitSlit.*
:loc: upper-right-corner
.. |st_hS2b| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hS4| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-025H-06i-ExpFocus-Is
:loc: upper-right-corner
.. rubric:: Coherence signatures
The beam improvement can also be viewed via the coherence properties by the
four available methods (see :ref:`coh_signs`). As the horizontal exit slit
becomes smaller, one can observe the increase of the coherent fraction ζ and
the increase of the primary (coherent) mode weight. The width of degree of
coherence (DoC) relative to the width of the intensity distribution determines
the coherent beam fraction. Both widths vary with varying screen position
around the focal point such that their ratio is not invariant, so that the
coherent fraction also varies, which is counter-intuitive. An important
advantage of the eigen-mode or PCA methods is a simple definition of the
coherent fraction as the eigenvalue of the zeroth mode (component); this
eigenvalue appears to be invariant around the focal point, see below. Note that
the methods 2 and 3 give equal results. The method 4 that gives the degree of
transverse coherence (DoTC) is also invariant around the focal point, see DoTC
values on the pictures of Principal Components.
+-----------+--------------------------+--------------------------+
| | 80 µm exit slit | 20 µm exit slit |
+===========+==========================+==========================+
| method 1 | |st_hS80m1| | |st_hS20m1| |
+-----------+--------------------------+--------------------------+
| method 2 | |st_hS80m3| | |st_hS20m3| |
+-----------+--------------------------+--------------------------+
| method 3, | |st_hS80m4| | |st_hS20m4| |
| method 4b | | |
+-----------+--------------------------+--------------------------+
.. |st_hS80m1| animation:: _images/stxm-IDOC-2D-2-hybr-non0e-0enSpread-monoE
.. |st_hS20m1| animation:: _images/stxm-IDOC-2D-2-hybr-non0e-0enSpread-monoE-025H
:loc: upper-right-corner
.. |st_hS80m3| animation:: _images/stxm-Modes-2D-2-hybr-non0e-0enSpread-monoE
.. |st_hS20m3| animation:: _images/stxm-Modes-2D-2-hybr-non0e-0enSpread-monoE-025H
:loc: upper-right-corner
.. |st_hS80m4| animation:: _images/stxm-PCA-2D-2-hybr-non0e-0enSpread-monoE
.. |st_hS20m4| animation:: _images/stxm-PCA-2D-2-hybr-non0e-0enSpread-monoE-025H
:loc: upper-right-corner
CXI branch
~~~~~~~~~~
.. rubric:: 2D vs 1D
Although the sample screen images are of good quality (the dark field is almost
black), the mirror footprints may be noisy and not well convergent in the
periphery. Compare the M3 footprint with that in the previous section (STXM
branch) where the difference is in the mirror area and thus in the sample
density. The used 10\ :sup:`6` wave samples (i.e. 10\ :sup:`12` possible paths)
are not enough for the slightly enlarged area in the present example. The
propagation is therefore performed in separated horizontal and vertical
directions, which dramatically improves the quality of the footprints.
Disadvantages of the cuts are losses in visual representation and incorrect
evaluation of the flux.
+------+----------------------+-----------------------+-----------------------+
| | 2D | 1D horizontal cut | 1D vertical cut |
+======+======================+=======================+=======================+
| |M3| | |cxiM32D| | |cxiM31Dh| | |cxiM31Dv| |
+------+----------------------+-----------------------+-----------------------+
| |SS| | |cxiS2D| | |cxiS1Dh| | |cxiS1Dv| |
+------+----------------------+-----------------------+-----------------------+
.. |M3| replace:: M3 footprint
.. |SS| replace:: sample screen
.. |cxiM32D| imagezoom:: _images/cxi_2D-2-hybr-0emit-0enSpread-monoE-03-M3local.*
.. |cxiM31Dh| imagezoom:: _images/cxi_1D-2-hybr-1e6hor-0emit-0enSpread-monoE-03-M3local.*
.. |cxiM31Dv| imagezoom:: _images/cxi_1D-2-hybr-1e6ver-0emit-0enSpread-monoE-03-M3local.*
:loc: upper-right-corner
.. |cxiS2D| animation:: _images/cxi_S2D
.. |cxiS1Dh| animation:: _images/cxi_S1Dh
.. |cxiS1Dv| animation:: _images/cxi_S1Dv
:loc: upper-right-corner
.. _wavefronts:
.. rubric:: Flat screen vs normal-to-k screen (wave front)
The following images demonstrate the correctness of the directional
Kirchhoff-like integral (see :ref:`seq_prop`). Five diffraction integrals are
calculated on flat screens around the focus position: for two polarizations and
for three directional components. The latter ones define the wave fronts at
every flat screen position; these wave fronts are further used as new curved
screens. The calculated diffraction fields on these curved screens have narrow
phase distributions, as shown by the color histograms, which is indeed expected
for a wave front by its definition. In contrast, the *flat* screens at the same
positions have rapid phase variation over several Fresnel zones.
.. note::
In the process of wave propagation, wave fronts -- surfaces of
constant phase -- are not used in any way. We therefore call it “wave
propagation”, not “wave *front* propagation” as frequently called by
others. The wave fronts in this example were calculated to solely
demonstrate the correctness of the local propagation directions after
having calculated the diffracted field.
+------------------------------+------------------------------+
| flat screen | curved screen (wave front) |
+==============================+==============================+
| |cxiFlat| | |cxiFront| |
+------------------------------+------------------------------+
.. |cxiFlat| animation:: _images/cxi-S1DhFlat
.. |cxiFront| animation:: _images/cxi-S1DhFront
:loc: upper-right-corner
The curvature of the calculated wave fronts varies across the focus position.
The wave fronts become more flat as one approaches the focus, see the figure
below. This is in contrast to *ray* propagation, where the angular ray
distribution is invariant at any position between two optical elements.
.. imagezoom:: _images/cxi_waveFronts.*
.. rubric:: Rays, waves and hybrid
The following images are horizontal cuts at the footprints and sample screens
calculated by
- rays,
- rays + waves hybrid (rays up to PG and wave from PG) and
- purely by waves.
+-----------------+-------------------+-------------------+-------------------+
| | rays | hybrid | waves |
+=================+===================+===================+===================+
| front end slit | |cxi-hFE| | same as rays | |cxi-wFE| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M1 | |cxi-hM1| | same as rays | |cxi-wM1| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M2 | |cxi-hM2| | same as rays | |cxi-wM2| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on PG | |cxi-hPG| | same as rays | |cxi-wPG| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M3 | |cxi-rM3| | |cxi-hM3| | |cxi-wM3| |
+-----------------+-------------------+-------------------+-------------------+
| exit slit | |cxi-rES| | |cxi-hES| | |cxi-wES| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M4 | |cxi-rM4| | |cxi-hM4| | |cxi-wM4| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M5 | |cxi-rM5| | |cxi-hM5| | |cxi-wM5| |
+-----------------+-------------------+-------------------+-------------------+
| sample screen | |cxi-rS| | |cxi-hS| | |cxi-wS| |
+-----------------+-------------------+-------------------+-------------------+
.. |cxi-hFE| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-00-FE.*
.. |cxi-wFE| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-00-FE.*
:loc: upper-right-corner
.. |cxi-hM1| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-01-M1local.*
.. |cxi-wM1| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-01-M1local.*
:loc: upper-right-corner
.. |cxi-hM2| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-02-M2local.*
.. |cxi-wM2| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-02-M2local.*
:loc: upper-right-corner
.. |cxi-hPG| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-02-PGlocal.*
.. |cxi-wPG| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-02-PGlocal.*
:loc: upper-right-corner
.. |cxi-rM3| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-03-M3local.*
.. |cxi-hM3| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-03-M3local.*
.. |cxi-wM3| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-03-M3local.*
:loc: upper-right-corner
.. |cxi-rES| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-04-ExitSlit.*
.. |cxi-hES| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-04-ExitSlit.*
.. |cxi-wES| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-04-ExitSlit.*
:loc: upper-right-corner
.. |cxi-rM4| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-05-M4local.*
.. |cxi-hM4| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-05-M4local.*
.. |cxi-wM4| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-05-M4local.*
:loc: upper-right-corner
.. |cxi-rM5| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-06-M5local.*
.. |cxi-hM5| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-06-M5local.*
.. |cxi-wM5| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-06-M5local.*
:loc: upper-right-corner
.. |cxi-rS| animation:: _images/cxi-rS
.. |cxi-hS| animation:: _images/cxi-hS
.. |cxi-wS| animation:: _images/cxi-wS
:loc: upper-right-corner
.. rubric:: Coherence signatures
This section demonstrates the methods 1 and 3 from :ref:`coh_signs`. Notice
again the difficulty in determining the width of DoC owing to its complex shape
(at real emittance) or the restricted field of view (the 0 emittance case). In
contrast, the eigen mode analysis yields an almost invariant well defined
coherent fraction.
+-----------+--------------------------+--------------------------+
| | 0 emittance | real emittance |
+===========+==========================+==========================+
| method 1 | |cxi-coh1-0emit| | |cxi-coh1-non0e| |
+-----------+--------------------------+--------------------------+
| method 3 | |cxi-coh3-0emit| | |cxi-coh3-non0e| |
+-----------+--------------------------+--------------------------+
.. |cxi-coh1-0emit| animation:: _images/cxi-coh1-0emit
.. |cxi-coh1-non0e| animation:: _images/cxi-coh1-non0e
.. |cxi-coh3-0emit| animation:: _images/cxi-coh3-0emit
.. |cxi-coh3-non0e| animation:: _images/cxi-coh3-non0e
:loc: upper-right-corner
"""
pass
| kklmn/xrt | examples/withRaycing/14_SoftiMAX/__init__.py | Python | mit | 16,930 |
import unittest
import pandas as pd
import nose.tools
from mia.features.blobs import detect_blobs
from mia.features.intensity import detect_intensity
from mia.utils import preprocess_image
from ..test_utils import get_file_path
class IntensityTests(unittest.TestCase):
@classmethod
def setupClass(cls):
img_path = get_file_path("mias/mdb154.png")
msk_path = get_file_path("mias/masks/mdb154_mask.png")
cls._img, cls._msk = preprocess_image(img_path, msk_path)
# def test_detect_intensity(self):
# blobs = detect_blobs(self._img, self._msk)
# intensity = detect_intensity(self._img, blobs)
#
# nose.tools.assert_true(isinstance(intensity, pd.DataFrame))
# nose.tools.assert_equal(intensity.shape[1], 10)
| samueljackson92/major-project | src/tests/regression_tests/intensity_regression_test.py | Python | mit | 780 |
#!usr/bin/python2.7
# coding: utf-8
# date: 16-wrzesień-2016
# autor: B.Kozak
# Simple script giving length of sequences from fasta file
import Bio
from Bio import SeqIO
import sys
import os.path
filename = sys.argv[-1]
outname = filename.split('.')
outname1 = '.'.join([outname[0], 'txt'])
FastaFile = open(filename, 'rU')
f = open(outname1, 'w')
for rec in SeqIO.parse(FastaFile, 'fasta'):
name = rec.id
seq = rec.seq
seqLen = len(rec)
print name, seqLen
f.write("%s\t" % name)
f.write("%s\n" % seqLen)
f.close()
print 'Done'
| bartosz-kozak/Sample-script | python/seq_len.py | Python | mit | 544 |
"""
[2015-12-28] Challenge #247 [Easy] Secret Santa
https://www.reddit.com/r/dailyprogrammer/comments/3yiy2d/20151228_challenge_247_easy_secret_santa/
# Description
Every December my friends do a "Secret Santa" - the traditional gift exchange
where everybody is randomly assigned to give a gift to a friend. To make
things exciting, the matching is all random (you cannot pick your gift
recipient) and nobody knows who got assigned to who until the day when the
gifts are exchanged - hence, the "secret" in the name.
Since we're a big group with many couples and families, often a husband gets
his wife as secret santa (or vice-versa), or a father is assigned to one of
his children. This creates a series of issues:
* If you have a younger kid and he/she is assigned to you, you might end up
paying for your own gift and ruining the surprise.
* When your significant other asks "who did you get for Secret Santa", you
have to lie, hide gifts, etc.
* The inevitable "this game is rigged!" commentary on the day of revelation.
To fix this, you must design a program that randomly assigns the Secret Santa
gift exchange, but *prevents people from the same family to be assigned to
each other*.
# Input
A list of all Secret Santa participants. People who belong to the same family
are listed in the same line separated by spaces. Thus, "Jeff Jerry" represents
two people, Jeff and Jerry, who are family and should not be assigned to
eachother.
Joe
Jeff Jerry
Johnson
# Output
The list of Secret Santa assignments. As Secret Santa is a random assignment,
output may vary.
Joe -> Jeff
Johnson -> Jerry
Jerry -> Joe
Jeff -> Johnson
But **not** `Jeff -> Jerry` or `Jerry -> Jeff`!
# Challenge Input
Sean
Winnie
Brian Amy
Samir
Joe Bethany
Bruno Anna Matthew Lucas
Gabriel Martha Philip
Andre
Danielle
Leo Cinthia
Paula
Mary Jane
Anderson
Priscilla
Regis Julianna Arthur
Mark Marina
Alex Andrea
# Bonus
The assignment list must avoid "closed loops" where smaller subgroups get
assigned to each other, breaking the overall loop.
Joe -> Jeff
Jeff -> Joe # Closed loop of 2
Jerry -> Johnson
Johnson -> Jerry # Closed loop of 2
# Challenge Credit
Thanks to /u/oprimo for his idea in /r/dailyprogrammer_ideas
"""
def main():
pass
if __name__ == "__main__":
main()
| DayGitH/Python-Challenges | DailyProgrammer/DP20151228A.py | Python | mit | 2,377 |
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
import os, os.path
from matplotlib import pyplot as plt
from pylab import get_cmap
import SimpleCV as cv
from glob import glob
# <codecell>
def show_img(img, ax = None):
if ax is not None:
plt.sca(ax)
nimg = img.getNumpy()
return plt.imshow(nimg, aspect='equal')
# <codecell>
path = '/home/will/Dropbox/burnimages/*.jpg'
norm_files = sorted(f for f in glob(path) if '-e' not in f)
masked_files = sorted(f for f in glob(path) if '-e' in f)
fig, axs = plt.subplots(6,6, figsize = (10,10))
for f, ax in zip(norm_files, axs.flatten()):
img = cv.Image(f)
show_img(img, ax = ax)
ax.set_xticks([])
ax.set_yticks([])
fig.tight_layout()
# <codecell>
from itertools import islice, izip_longest
from dateutil.parser import parse
def make_wound_mask(norm_img, green_img, color,
minsize = None,
maxsize = None):
wmask = green_img.hueDistance(color).invert().threshold(200)
blobs = norm_img.findBlobsFromMask(wmask,
minsize = minsize,
maxsize = maxsize)
return wmask, blobs
fig, axs = plt.subplots(6,6, figsize = (10,10))
results = []
for fname, mf, of, ax in izip_longest(norm_files, masked_files, norm_files, axs.flatten()):
mask_img = cv.Image(mf)
norm_img = cv.Image(of)
dt = parse(fname.rsplit(os.sep,1)[1].replace('.jpg', '').replace('.',':'))
wound_mask, wound_blobs = make_wound_mask(norm_img, mask_img, cv.Color.GREEN,
minsize = 1000)
dime_mask, dime_blobs = make_wound_mask(norm_img, mask_img, cv.Color.BLUE,
minsize = 500)
layer = cv.DrawingLayer((norm_img.width, norm_img.height))
wound_blobs[-1].drawHull(color=cv.Color.BLUE, width = 100, layer = layer)
dime_blobs[-1].drawHull(color=cv.Color.RED, width = 100, layer = layer)
norm_img.addDrawingLayer(layer)
fnorm = norm_img.applyLayers()
ratio = wound_blobs[-1].area()/dime_blobs[-1].area()
results.append((dt, ratio))
if ax is not None:
show_img(fnorm, ax = ax)
ax.set_xticks([])
ax.set_yticks([])
ax.set_title(ratio)
fig.tight_layout()
# <codecell>
import pandas as pd
res_df = pd.DataFrame(sorted(results), columns = ['SampleTime', 'Ratio'])
dime_diameter = 18 #mm
dime_area = 3.141*(dime_diameter/2)**2
res_df['Area-mm2'] = dime_area*res_df['Ratio']
res_df.set_index('SampleTime', inplace=True)
res_df
# <codecell>
res_df['Area-mm2'].plot()
out = pd.ewma(res_df['Area-mm2'], freq='d', span = 1)
out.plot(lw = 10, alpha = 0.7)
plt.ylabel('Wound-Area-mm^2')
# <codecell>
| JudoWill/ResearchNotebooks | Woundy.py | Python | mit | 2,781 |
import game
import pygame
from pygame.locals import *
class Resources:
<<<<<<< HEAD
def cambiar(self,imagen):
sheet = game.load_image(imagen)
rects = [pygame.Rect(112,2,26,40),
pygame.Rect(112,2,26,40),
pygame.Rect(112,2,26,40),
pygame.Rect(4,4,30,38),
pygame.Rect(4,4,30,38),
pygame.Rect(4,4,30,38)]
caminando_der = game.load_sprites(sheet, rects, (0,0,0))
caminando_izq = game.flip_sprites(caminando_der)
rects = [pygame.Rect(76,2,26,40),
pygame.Rect(112,2,24,40)]
quieto_der = game.load_sprites(sheet, rects, (0,0,0))
quieto_izq = game.flip_sprites(quieto_der)
rects = [pygame.Rect(4,4,30,38),
pygame.Rect(38,4,30,36)]
saltando_der = game.load_sprites(sheet, rects, (0,0,0))
saltando_izq = game.flip_sprites(saltando_der)
player = [
[quieto_der, quieto_izq],
[caminando_der,caminando_izq],
[saltando_der, saltando_izq]]
return player
def __init__(self,imagen):
# Carga de imagenes
self.imagen=imagen
sheet = game.load_image(self.imagen)
=======
def __init__(self):
# Carga de imagenes
sheet = game.load_image('graphics/arc22.png')
>>>>>>> origin/master
#rects = [#pygame.Rect(514,8,24,34),
# pygame.Rect(550,8,30,34),
# pygame.Rect(582,8,28,34),
# pygame.Rect(550,8,30,34)]
rects = [pygame.Rect(112,2,26,40),
pygame.Rect(112,2,26,40),
pygame.Rect(112,2,26,40),
pygame.Rect(4,4,30,38),
pygame.Rect(4,4,30,38),
pygame.Rect(4,4,30,38)]
caminando_der = game.load_sprites(sheet, rects, (0,0,0))
caminando_izq = game.flip_sprites(caminando_der)
rects = [pygame.Rect(76,2,26,40),
pygame.Rect(112,2,24,40)]
quieto_der = game.load_sprites(sheet, rects, (0,0,0))
quieto_izq = game.flip_sprites(quieto_der)
<<<<<<< HEAD
=======
>>>>>>> origin/master
rects = [pygame.Rect(4,4,30,38),
pygame.Rect(38,4,30,36)]
saltando_der = game.load_sprites(sheet, rects, (0,0,0))
saltando_izq = game.flip_sprites(saltando_der)
self.player = [
[quieto_der, quieto_izq],
[caminando_der,caminando_izq],
[saltando_der, saltando_izq]]
<<<<<<< HEAD
=======
>>>>>>> origin/master
sheet = game.load_image('graphics/blocks11.png')
suelo = game.load_sprite(sheet, pygame.Rect(444,104,32,32))
subsuelo = game.load_sprite(sheet, pygame.Rect(172,138,32,32))
self.tiles = [suelo, subsuelo]
| cangothic/2D-Platformer | resources.py | Python | mit | 2,891 |
'''
author Lama Hamadeh
'''
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
import assignment2_helper as helper
# Look pretty...
matplotlib.style.use('ggplot')
# Do * NOT * alter this line, until instructed!
scaleFeatures = True #Features scaling (if it's false no scaling appears and that affects the 2D plot and the variance values)
# TODO: Load up the dataset and remove any and all
# Rows that have a nan. You should be a pro at this
# by now ;-)
#
# .. your code here ..
df=pd.read_csv('/Users/ADB3HAMADL/Desktop/Anaconda_Packages/DAT210x-master/Module4/Datasets/kidney_disease.csv',index_col = 0)
df = df.reset_index(drop=True) #remove the index column
df=df.dropna(axis=0) #remove any and all Rows that have a nan
#print(df)
# Create some color coded labels; the actual label feature
# will be removed prior to executing PCA, since it's unsupervised.
# You're only labeling by color so you can see the effects of PCA
labels = ['red' if i=='ckd' else 'green' for i in df.classification]
# TODO: Use an indexer to select only the following columns:
# ['bgr','wc','rc']
#
# .. your code here ..
df=df[['bgr', 'rc','wc']] #select only the following columns: bgr, rc, and wc
# TODO: Print out and check your dataframe's dtypes. You'll probably
# want to call 'exit()' after you print it out so you can stop the
# program's execution.
#
# You can either take a look at the dataset webpage in the attribute info
# section: https://archive.ics.uci.edu/ml/datasets/Chronic_Kidney_Disease
# or you can actually peek through the dataframe by printing a few rows.
# What kind of data type should these three columns be? If Pandas didn't
# properly detect and convert them to that data type for you, then use
# an appropriate command to coerce these features into the right type.
#
# .. your code here ..
print(df.dtypes) #
df.rc = pd.to_numeric(df.rc, errors='coerce') #
df.wc = pd.to_numeric(df.wc, errors='coerce') #
# TODO: PCA Operates based on variance. The variable with the greatest
# variance will dominate. Go ahead and peek into your data using a
# command that will check the variance of every feature in your dataset.
# Print out the results. Also print out the results of running .describe
# on your dataset.
#
# Hint: If you don't see all three variables: 'bgr','wc' and 'rc', then
# you probably didn't complete the previous step properly.
#
# .. your code here ..
print(df.var()) #
print(df.describe()) #
# TODO: This method assumes your dataframe is called df. If it isn't,
# make the appropriate changes. Don't alter the code in scaleFeatures()
# just yet though!
#
# .. your code adjustment here ..
if scaleFeatures: df = helper.scaleFeatures(df)
# TODO: Run PCA on your dataset and reduce it to 2 components
# Ensure your PCA instance is saved in a variable called 'pca',
# and that the results of your transformation are saved in 'T'.
#
# .. your code here ..
from sklearn import decomposition
pca = decomposition.PCA(n_components=2)
pca.fit(df)
decomposition.PCA(copy=True, n_components=2, whiten=False)
T= pca.transform(df)
# Plot the transformed data as a scatter plot. Recall that transforming
# the data will result in a NumPy NDArray. You can either use MatPlotLib
# to graph it directly, or you can convert it to DataFrame and have pandas
# do it for you.
#
# Since we've already demonstrated how to plot directly with MatPlotLib in
# Module4/assignment1.py, this time we'll convert to a Pandas Dataframe.
#
# Since we transformed via PCA, we no longer have column names. We know we
# are in P.C. space, so we'll just define the coordinates accordingly:
ax = helper.drawVectors(T, pca.components_, df.columns.values, plt, scaleFeatures)
T = pd.DataFrame(T)
T.columns = ['component1', 'component2']
T.plot.scatter(x='component1', y='component2', marker='o', c=labels, alpha=0.75, ax=ax)
plt.show()
| LamaHamadeh/Microsoft-DAT210x | Module-4/assignment2.py | Python | mit | 3,877 |
import unittest
import os
from sqltxt.table import Table
from sqltxt.column import Column, ColumnName, AmbiguousColumnNameError
from sqltxt.expression import Expression
class TableTest(unittest.TestCase):
def setUp(self):
self.data_path = os.path.join(os.path.dirname(__file__), '../data')
table_header = ["col_a", "col_b"]
table_contents = """1,1
2,3
3,2"""
self.table_a = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e "{0}"'.format(table_contents),
columns = table_header
)
table_header = ["col_a", "col_b"]
table_contents = """1,w
2,x
2,y
5,z"""
self.table_b = Table.from_cmd(
name = 'table_b',
cmd = 'echo -e "{0}"'.format(table_contents),
columns = table_header
)
def test_subset_rows(self):
conditions = [
[Expression('col_b', '==', '1'), 'or', Expression('col_a', '==', '2')]
]
self.table_a.subset_rows(conditions)
cmds_actual = self.table_a.cmds
cmds_expected = [
'echo -e "1,1\n2,3\n3,2"',
"awk -F',' 'OFS=\",\" { if (($2 == 1 || $1 == 2)) { print $1,$2 } }'"]
self.assertEqual(cmds_actual, cmds_expected)
def test_order_columns(self):
col_name_order = [ColumnName('col_b'), ColumnName('col_a')]
self.table_a.order_columns(col_name_order)
cmds_actual = self.table_a.cmds
cmds_expected = ['echo -e "1,1\n2,3\n3,2"', "awk -F',' 'OFS=\",\" { print $2,$1 }'"]
self.assertEqual(cmds_actual, cmds_expected)
def test_sort(self):
sort_by_col_names = [ColumnName('col_a'), ColumnName('col_b')]
self.table_a.sort(sort_by_col_names)
cmds_actual = self.table_a.cmds
cmds_expected = ['echo -e "1,1\n2,3\n3,2"', "sort -t, -k 1,1 -k 2,2"]
self.assertEqual(cmds_actual, cmds_expected)
sort_by_cols = [self.table_a.get_column_for_name(cn) for cn in sort_by_col_names]
self.assertEqual(self.table_a.sorted_by, sort_by_cols)
def test_is_sorted_by(self):
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = ['col_a', 'col_b'])
table_from_cmd.sorted_by = [Column('table_a.col_a'), Column('table_a.col_b')]
self.assertTrue(table_from_cmd.is_sorted_by([0]))
self.assertFalse(table_from_cmd.is_sorted_by([1]))
self.assertTrue(table_from_cmd.is_sorted_by([0,1]))
def test_get_column_for_name_raises_on_ambiguity(self):
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = ['col_a', 'col_a'])
with self.assertRaisesRegexp(AmbiguousColumnNameError, 'Ambiguous column reference'):
table_from_cmd.get_column_for_name(ColumnName('col_a'))
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = ['ta.col_a', 'tb.col_a'])
with self.assertRaisesRegexp(AmbiguousColumnNameError, 'Ambiguous column reference'):
table_from_cmd.get_column_for_name(ColumnName('col_a'))
first_column = Column('ta.col_a')
first_column.add_name('col_alpha')
second_column = Column('tb.col_a')
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = [first_column, second_column])
with self.assertRaisesRegexp(AmbiguousColumnNameError, 'Ambiguous column reference'):
table_from_cmd.get_column_for_name(ColumnName('col_a'))
def test_sample_rows(self):
self.table_a.sample_rows(1)
cmds_actual = self.table_a.cmds
cmds_expected = ['echo -e "1,1\n2,3\n3,2"',
"""awk -v seed=$RANDOM -v n={0} '
BEGIN {{ srand(seed) }}
NR <= n {{ reservoir[NR] = $0 }}
NR > n {{ M = int(rand() * NR) + 1; if (M <= n) {{ reservoir[M] = $0 }}}}
END {{ for (key in reservoir) {{ print reservoir[key] }}}}'""".format(1)
]
self.assertEqual(cmds_actual, cmds_expected)
def test_get_cmd_str(self):
table_from_file = Table.from_file_path(os.path.join(self.data_path, 'table_a.txt'))
# output from a file-backed Table to STDOUT
cmd_actual = table_from_file.get_cmd_str()
cmd_expected = 'tail -n+2 {}/table_a.txt'.format(self.data_path)
self.assertEqual(cmd_actual, cmd_expected)
table_from_cmd = Table.from_cmd(
'table_a',
cmd = 'echo -e "1,2,3,4"',
columns = ['col_a', 'col_b', 'col_c', 'col_d'])
# output from a command-backed Table to STDOUT
cmd_actual = table_from_cmd.get_cmd_str()
cmd_expected = 'echo -e "1,2,3,4"'
self.assertEqual(cmd_actual, cmd_expected)
# add a command, then output
table_from_cmd.cmds += ['sort']
# to STDOUT
cmd_actual = table_from_cmd.get_cmd_str()
cmd_expected = 'echo -e "1,2,3,4" | sort'
self.assertEqual(cmd_actual, cmd_expected)
| shahin/sqltxt | tests/unit/table_test.py | Python | mit | 5,179 |
#!/usr/bin/env python
# --------------------------------------------------------
# Test regression propagation on ImageNet VID video
# Modified by Kai KANG ([email protected])
# --------------------------------------------------------
"""Test a Fast R-CNN network on an image database."""
import argparse
import pprint
import time
import os
import os.path as osp
import sys
import cPickle
import numpy as np
this_dir = osp.dirname(__file__)
# add py-faster-rcnn paths
sys.path.insert(0, osp.join(this_dir, '../../external/py-faster-rcnn/lib'))
from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list
# add external libs
sys.path.insert(0, osp.join(this_dir, '../../external'))
from vdetlib.utils.protocol import proto_load, proto_dump
# add src libs
sys.path.insert(0, osp.join(this_dir, '../../src'))
from tpn.propagate import gt_motion_propagation
from tpn.target import add_track_targets
from tpn.data_io import save_track_proto_to_zip
def parse_args():
"""
Parse input arguments
"""
parser = argparse.ArgumentParser(description='Test a Fast R-CNN network')
parser.add_argument('vid_file')
parser.add_argument('box_file')
parser.add_argument('annot_file', default=None,
help='Ground truth annotation file. [None]')
parser.add_argument('save_file', help='Save zip file')
parser.add_argument('--job', dest='job_id', help='Job slot, GPU ID + 1. [1]',
default=1, type=int)
parser.add_argument('--length', type=int, default=20,
help='Propagation length. [20]')
parser.add_argument('--window', type=int, default=5,
help='Prediction window. [5]')
parser.add_argument('--sample_rate', type=int, default=1,
help='Temporal subsampling rate. [1]')
parser.add_argument('--offset', type=int, default=0,
help='Offset of sampling. [0]')
parser.add_argument('--overlap', type=float, default=0.5,
help='GT overlap threshold for tracking. [0.5]')
parser.add_argument('--wait', dest='wait',
help='wait until net file exists',
default=True, type=bool)
parser.set_defaults(vis=False, zip=False, keep_feat=False)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
print 'Called with args:'
print args
if osp.isfile(args.save_file):
print "{} already exists.".format(args.save_file)
sys.exit(1)
vid_proto = proto_load(args.vid_file)
box_proto = proto_load(args.box_file)
annot_proto = proto_load(args.annot_file)
track_proto = gt_motion_propagation(vid_proto, box_proto, annot_proto,
window=args.window, length=args.length,
sample_rate=args.sample_rate, overlap_thres=args.overlap)
# add ground truth targets if annotation file is given
add_track_targets(track_proto, annot_proto)
if args.zip:
save_track_proto_to_zip(track_proto, args.save_file)
else:
proto_dump(track_proto, args.save_file)
| myfavouritekk/TPN | tools/propagate/sequence_roi_gt_propagation.py | Python | mit | 3,197 |
# coding: utf-8
import django_filters
from django import forms
from django.utils.translation import ugettext_lazy as _
from courses.models import Course
from issues.models import Issue
from issues.model_issue_status import IssueStatus
class IssueFilterStudent(django_filters.FilterSet):
is_active = django_filters.ChoiceFilter(label=_('tip_kursa'), name='task__course__is_active')
years = django_filters.MultipleChoiceFilter(
label=_('god_kursa'),
name='task__course__year',
widget=forms.CheckboxSelectMultiple
)
courses = django_filters.MultipleChoiceFilter(label=_('kurs'), name='task__course', widget=forms.SelectMultiple)
responsible = django_filters.MultipleChoiceFilter(label=_('prepodavateli'), widget=forms.SelectMultiple)
status_field = django_filters.MultipleChoiceFilter(label=_('status'), widget=forms.SelectMultiple)
update_time = django_filters.DateRangeFilter(label=_('data_poslednego_izmenenija'))
def set_user(self, user):
for field in self.filters:
self.filters[field].field.label = u'<strong>{0}</strong>'.format(self.filters[field].field.label)
groups = user.group_set.all()
courses = Course.objects.filter(groups__in=groups)
course_choices = set()
year_choices = set()
teacher_set = set()
status_set = set()
for course in courses:
course_choices.add((course.id, course.name))
year_choices.add((course.year.id, unicode(course.year)))
for teacher in course.get_teachers():
teacher_set.add(teacher)
for status in course.issue_status_system.statuses.all():
status_set.add(status)
self.filters['is_active'].field.choices = ((u'', _(u'luboj')),
(1, _(u'aktivnyj')),
(0, _(u'arhiv')))
self.filters['years'].field.choices = tuple(year_choices)
self.filters['courses'].field.choices = tuple(course_choices)
teacher_choices = [(teacher.id, teacher.get_full_name()) for teacher in teacher_set]
self.filters['responsible'].field.choices = tuple(teacher_choices)
lang = user.profile.language
status_choices = [(status.id, status.get_name(lang)) for status in status_set]
for status_id in sorted(IssueStatus.HIDDEN_STATUSES.values(), reverse=True):
status_field = IssueStatus.objects.get(pk=status_id)
status_choices.insert(0, (status_field.id, status_field.get_name(lang)))
self.filters['status_field'].field.choices = tuple(status_choices)
class Meta:
model = Issue
fields = ['is_active', 'years', 'courses', 'responsible', 'status_field', 'update_time']
| znick/anytask | anytask/issues/model_issue_student_filter.py | Python | mit | 2,807 |
from django.db import models
import datetime
def get_choices(lst):
return [(i, i) for i in lst]
#
# Person
#
pprint_pan = lambda pan: "%s %s %s" % (pan[:5], pan[5:9], pan[9:])
class Person(models.Model):
name = models.CharField(max_length=255, db_index=True)
fathers_name = models.CharField(max_length=255, null=True, blank=True, db_index=True)
status = models.CharField(max_length=32, choices=get_choices([
'Individual',
'HUF',
'Partnership Firm',
'Domestic Company',
'LLP',
'Trust(ITR 7)',
]), default='Individual Salaried')
employer = models.CharField(max_length=64, null=True, blank=True)
self_occupied = models.BooleanField()
pan_number = models.CharField(max_length=32, unique=True)
user_id = models.CharField(max_length=32, null=True, blank=True)
password = models.CharField(max_length=32, null=True, blank=True)
bank_name = models.CharField(max_length=255, null=True, blank=True)
bank_branch = models.CharField(max_length=255, null=True, blank=True)
account_number = models.CharField(max_length=32, null=True, blank=True)
micr = models.CharField(max_length=32, blank=True, null=True)
ifsc_code = models.CharField(max_length=32, null=True, blank=True)
account_type = models.CharField(max_length=32, choices=get_choices(['SB', 'CA', 'CC']), default='SB')
contact_number = models.CharField(max_length=13, null=True, blank=True, db_index=True)
email = models.EmailField(null=True, blank=True, db_index=True)
address = models.TextField(max_length=32, null=True, blank=True)
city = models.CharField(max_length=64, null=True, blank=True, db_index=True)
pincode = models.CharField(max_length=10, null=True, blank=True, db_index=True)
date_of_birth_or_incarnation = models.DateField(null=True, blank=True)
def pan_number_pprint(self):
return pprint_pan(self.pan_number)
pan_number_pprint.admin_order_field = 'pan_number_pprint'
pan_number_pprint.short_description = 'Pan Number'
def _trim(self, *args):
for field in args:
value = getattr(self, field)
setattr(self, field, value.replace(' ', ''))
def save(self):
self._trim('pan_number')
super(Person, self).save()
def __unicode__(self):
return u'%s (%s)' % (self.name, self.pan_number)
class MetadataPerson(models.Model):
person = models.ForeignKey(Person)
key = models.CharField(max_length=250)
value = models.CharField(max_length=250)
#
# Report
#
class Report(models.Model):
finanyr = lambda yr: "%s - %s" % (yr, yr+1)
years = [(finanyr(i), finanyr(i)) for i in xrange(1980, 2020)]
person = models.ForeignKey(Person)
financial_year = models.CharField(max_length=11, choices=years, default=finanyr(datetime.datetime.now().year - 1))
assessment_year = models.CharField(max_length=11, choices=years, default=finanyr(datetime.datetime.now().year))
return_filed_on = models.DateField()
returned_income = models.DecimalField(max_digits=12, decimal_places=2)
#Advanced Tax
july = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
september = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
december = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
march = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
#Interest Detail
interest_234_a = models.DecimalField("Interest 234(a)", max_digits=12, decimal_places=2, null=True, blank=True)
interest_234_b = models.DecimalField("Interest 234(b)", max_digits=12, decimal_places=2, null=True, blank=True)
interest_234_c = models.DecimalField("Interest 234(c)", max_digits=12, decimal_places=2, null=True, blank=True)
#Tax detail
tds = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
self_assessment_tax = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
acknowledgement_number = models.CharField("Ack no.", max_length=64, null=True, blank=True)
#Bill Detail
bill_raised_on = models.DateField(null=True, blank=True)
bill_amount = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
bill_received = models.BooleanField("Bill received ?")
mode_of_payment = models.CharField(max_length=16, choices=get_choices(['Cash', 'Cheque', 'DD', 'Bank Transfer']), null=True, blank=True)
payment_detail = models.CharField(max_length=16, null=True, blank=True)
#Order 143(1)
order_received_on_143_1 = models.DateField("143(1) Order received on", null=True, blank=True)
assessed_income_143_1 = models.DecimalField("Assessed income", max_digits=12, decimal_places=2, null=True, blank=True)
assessed_tax_143_1 = models.DecimalField("Assessed tax", max_digits=12, decimal_places=2, null=True, blank=True)
refund_amount_143_1 = models.DecimalField("Refund amount", max_digits=12, decimal_places=2, null=True, blank=True)
demand_raised_amount_143_1 = models.DecimalField("Demand raised for ", max_digits=12, decimal_places=2, null=True, blank=True)
refund_received_on_143_1 = models.DateField("Refund received on", null=True, blank=True)
#Order 143(2)
order_received_on_143_2 = models.DateField("Notice received on", null=True, blank=True)
#Order 143(3)
order_received_on_143_3 = models.DateField("Order received on", null=True, blank=True)
assessed_income_143_3 = models.DecimalField("Assessed income", max_digits=12, decimal_places=2, null=True, blank=True)
assessed_tax_143_3 = models.DecimalField("Assessed tax", max_digits=12, decimal_places=2, null=True, blank=True)
refund_amount_143_3 = models.DecimalField("Refund amount", max_digits=12, decimal_places=2, null=True, blank=True)
demand_raised_amount_143_3 = models.DecimalField("Demand raised for", max_digits=12, decimal_places=2, null=True, blank=True)
refund_received_on_143_3 = models.DateField("Refund received on", null=True, blank=True)
#Appeal before cit
filed_on_cit = models.DateField("Filed on", null=True, blank=True)
order_received_on_cit = models.DateField("Order received on", null=True, blank=True)
assessed_income_cit = models.DecimalField("Assessed income", max_digits=12, decimal_places=2, null=True, blank=True)
assessed_tax_cit = models.DecimalField("Assessed tax", max_digits=12, decimal_places=2, null=True, blank=True)
#Appeal before tribunal
filed_on_tribunal = models.DateField("Filed on", null=True, blank=True)
order_received_on_tribunal = models.DateField("Order received on", null=True, blank=True)
filed_by_tribunal = models.CharField("Filed by", max_length=16, choices=get_choices(['assessee', 'department']), null=True, blank=True)
assessed_income_tribunal = models.DecimalField("Assessed income", max_digits=12, decimal_places=2, null=True, blank=True)
assessed_tax_tribunal = models.DecimalField("Assessed tax", max_digits=12, decimal_places=2, null=True, blank=True)
def got_reimbursement(self):
return self.refund_amount_143_1 > 0
got_reimbursement.admin_order_field = 'got_reimbursement'
got_reimbursement.boolean = True
got_reimbursement.short_description = 'Got reimbursement ?'
def tax_paid(self):
tax = sum([i for i in (self.march, self.september, self.december, self.july) if i is not None])
if tax == 0 and self.tds is not None:
tax = self.tds
return tax
tax_paid.admin_order_field = 'tax_paid'
tax_paid.boolean = False
tax_paid.short_description = 'Tax Paid'
class Meta:
unique_together = ('person', 'financial_year')
def __unicode__(self):
return u'%s - %s' % (self.person, self.financial_year)
class MetadataReport(models.Model):
report = models.ForeignKey(Report)
key = models.CharField(max_length=250)
value = models.CharField(max_length=250)
| annual-client-report/Annual-Report | report/models.py | Python | mit | 8,050 |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="serverless-wsgi",
version="3.0.0",
python_requires=">3.6",
author="Logan Raarup",
author_email="[email protected]",
description="Amazon AWS API Gateway WSGI wrapper",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/logandk/serverless-wsgi",
py_modules=["serverless_wsgi"],
install_requires=["werkzeug>2"],
classifiers=(
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
),
keywords="wsgi serverless aws lambda api gateway apigw flask django pyramid",
)
| logandk/serverless-wsgi | setup.py | Python | mit | 867 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('orders', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='order',
name='paid',
field=models.BooleanField(default=False),
),
]
| spectrumone/online-shop-template | myshop/orders/migrations/0002_auto_20160213_1225.py | Python | mit | 390 |
from click.testing import CliRunner
from sqlitebiter.__main__ import cmd
from sqlitebiter._const import ExitCode
from .common import print_traceback
class Test_version_subcommand:
def test_smoke(self):
runner = CliRunner()
result = runner.invoke(cmd, ["version"])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
| thombashi/sqlitebiter | test/test_version_subcommand.py | Python | mit | 373 |
# Patchless XMLRPC Service for Django
# Kind of hacky, and stolen from Crast on irc.freenode.net:#django
# Self documents as well, so if you call it from outside of an XML-RPC Client
# it tells you about itself and its methods
#
# Brendan W. McAdams <[email protected]>
# SimpleXMLRPCDispatcher lets us register xml-rpc calls w/o
# running a full XMLRPC Server. It's up to us to dispatch data
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from buildfarm.models import Package, Queue
from repository.models import Repository, PisiPackage
from source.models import SourcePackage
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
import xmlrpclib
from django.template.loader import render_to_string
from django.utils import simplejson
from django.template import Context, Template
from django import forms
from django.utils import simplejson
from django.db import transaction
from django.shortcuts import redirect
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib import messages
from buildfarm.tasks import build_all_in_queue
class NewQueueForm (forms.ModelForm):
class Meta:
model = Queue
fields = ( 'name', 'builder', 'source_repo', 'binman', 'sandboxed')
def site_index (request):
queues = Queue.objects.all ()
context = { 'queues': queues, 'navhint': 'queue', 'not_reload': 'true', 'form' : NewQueueForm() }
return render (request, "buildfarm/site_index.html", context)
def package_progress_json (request, queue_id):
rdict = {}
q = Queue.objects.get(id=queue_id)
packages = Package.objects.filter(queue=q)
pct =float ( float(q.current) / q.length ) * 100
rdict = { 'percent' : pct, 'total': q.length, 'current': q.current, 'name_current': q.current_package_name }
json = simplejson.dumps(rdict, ensure_ascii=False)
return HttpResponse( json, content_type='application/json')
@staff_member_required
def delete_from_queue (request, package_id):
pkg = get_object_or_404 (Package, id=package_id)
q_id = pkg.queue.id
pkg.delete ()
return redirect ('/buildfarm/queue/%d/' % q_id)
@staff_member_required
def delete_queue (request, queue_id):
queue = get_object_or_404 (Queue, id=queue_id)
queue.delete ()
return redirect ('/manage/')
@staff_member_required
def new_queue (request):
if request.method == 'POST':
# New submission
form = NewQueueForm (request.POST)
rdict = { 'html': "<b>Fail</b>", 'tags': 'fail' }
context = Context ({'form': form})
if form.is_valid ():
rdict = { 'html': "The new queue has been set up", 'tags': 'success' }
model = form.save (commit=False)
model.current = 0
model.length = 0
model.current_package_name = ""
model.save ()
else:
html = render_to_string ('buildfarm/new_queue.html', {'form_queue': form})
rdict = { 'html': html, 'tags': 'fail' }
json = simplejson.dumps(rdict, ensure_ascii=False)
print json
# And send it off.
return HttpResponse( json, content_type='application/json')
else:
form = NewQueueForm ()
context = {'form': form }
return render (request, 'buildfarm/new_queue.html', context)
def queue_index(request, queue_id=None):
q = get_object_or_404 (Queue, id=queue_id)
packages = Package.objects.filter(queue=q).order_by('build_status')
paginator = Paginator (packages, 15)
pkg_count = q.length
if (pkg_count > 0):
pct =float ( float(q.current) / q.length ) * 100
else:
pct = 0
page = request.GET.get("page")
try:
packages = paginator.page(page)
except PageNotAnInteger:
packages = paginator.page (1)
except EmptyPage:
packages = paginator.page (paginator.num_pages)
context = {'navhint': 'queue', 'queue': q, 'package_list': packages, 'total_packages': q.length, 'current_package': q.current, 'total_pct': pct, 'current_package_name': q.current_package_name}
return render (request, "buildfarm/index.html", context)
@staff_member_required
def build_queue (request, queue_id):
queue = Queue.objects.get (id=queue_id)
messages.info (request, "Starting build of \"%s\" queue" % queue.name)
build_all_in_queue.delay (queue_id)
return redirect ('/manage/')
@staff_member_required
def populate_queue (request, queue_id):
q = Queue.objects.get(id=queue_id)
packages = SourcePackage.objects.filter (repository=q.source_repo)
failList = list ()
for package in packages:
binaries = PisiPackage.objects.filter(source_name=package.name)
if len(binaries) == 0:
# We have no binaries
print "New package for source: %s" % (package.name)
failList.append (package)
else:
for package2 in binaries:
if package2.release != package.release:
print "Newer release for: %s" % package2.name
failList.append (package)
break
try:
binary = Package.objects.get(queue=q, name=package.name)
failList.remove (package)
except:
pass
with transaction.commit_on_success():
for fail in failList:
pkg = Package ()
pkg.name = fail.name
pkg.version = fail.version
pkg.build_status = "pending"
pkg.queue = q
pkg.spec_uri = fail.source_uri
pkg.save ()
return redirect ("/buildfarm/queue/%d" % q.id)
| SolusOS-discontinued/RepoHub | buildfarm/views.py | Python | mit | 5,488 |
from django.core.management.base import BaseCommand, CommandError
from ship_data.models import GpggaGpsFix
import datetime
from main import utils
import csv
import os
from django.db.models import Q
import glob
from main.management.commands import findgpsgaps
gps_bridge_working_intervals = None
# This file is part of https://github.com/cpina/science-cruise-data-management
#
# This project was programmed in a hurry without any prior Django experience,
# while circumnavigating the Antarctic on the ACE expedition, without proper
# Internet access, with 150 scientists using the system and doing at the same
# cruise other data management and system administration tasks.
#
# Sadly there aren't unit tests and we didn't have time to refactor the code
# during the cruise, which is really needed.
#
# Carles Pina ([email protected]) and Jen Thomas ([email protected]), 2016-2017.
class Command(BaseCommand):
help = 'Outputs the track in CSV format.'
def add_arguments(self, parser):
parser.add_argument('output_directory', type=str, help="Will delete existing files that started on the same start date")
parser.add_argument('start', type=str, help="Start of the GPS data. Format: YYYYMMDD")
parser.add_argument('end', type=str, help="End of the GPS data. Format: YYYYMMDD or 'yesterday'")
def handle(self, *args, **options):
generate_all_tracks(options['output_directory'], options['start'], options['end'])
def generate_all_tracks(output_directory, start, end):
global gps_bridge_working_intervals
gps_gaps = findgpsgaps.FindDataGapsGps("GPS Bridge1", start, end)
gps_bridge_working_intervals = gps_gaps.find_gps_missings()
generate_fast(output_directory, 3600, "1hour", start, end)
generate_fast(output_directory, 300, "5min", start, end)
generate_fast(output_directory, 60, "1min", start, end)
generate_fast(output_directory, 1, "1second", start, end)
def generate_fast(output_directory, seconds, file_suffix, start, end):
"""
This method uses Mysql datetime 'ends with' instead of doing individual queries
for each 'seconds'. It's faster but harder to find gaps in the data.
"""
first_date = datetime.datetime.strptime(start, "%Y%m%d")
first_date = utils.set_utc(first_date)
if end == "yesterday":
last_date = utils.last_midnight()
else:
last_date = datetime.datetime.strptime(end, "%Y%m%d")
last_date = utils.set_utc(last_date)
starts_file_format = first_date.strftime("%Y%m%d")
ends_file_format = last_date.strftime("%Y%m%d")
filename = "track_{}_{}_{}.csv".format(starts_file_format, ends_file_format, file_suffix)
files_to_delete = glob.glob(os.path.join(output_directory, "track_{}_*_{}.csv".format(starts_file_format,
file_suffix)))
print("Will start processing:", filename)
file_path = os.path.join(output_directory, filename)
if file_path in files_to_delete:
files_to_delete.remove(file_path) # In case that this script is re-generating the file
file = open(file_path + ".tmp", "w")
csv_writer = csv.writer(file)
csv_writer.writerow(["date_time", "latitude", "longitude"])
one_day = datetime.timedelta(days=1)
current_day = first_date
while current_day <= last_date:
process_day(current_day, seconds, csv_writer)
current_day += one_day
delete_files(files_to_delete)
file.close()
os.rename(file_path + ".tmp", file_path)
def process_day(date_time_process, seconds, csv_writer):
date_time_process_tomorrow = date_time_process + datetime.timedelta(days=1)
today_filter = Q(date_time__gte=date_time_process) & Q(date_time__lt=date_time_process_tomorrow)
if seconds == 1:
query_set = GpggaGpsFix.objects.filter(today_filter).order_by('date_time')
elif seconds == 60:
query_set = GpggaGpsFix.objects.filter(today_filter).filter(date_time__contains=':01.').order_by('date_time')
elif seconds == 300:
query_set = GpggaGpsFix.objects.filter(today_filter).filter(Q(date_time__contains=':00:01.') |
Q(date_time__contains=':05:01.') |
Q(date_time__contains=':10:01.') |
Q(date_time__contains=':15:01.') |
Q(date_time__contains=':20:01.') |
Q(date_time__contains=':25:01.') |
Q(date_time__contains=':30:01.') |
Q(date_time__contains=':35:01.') |
Q(date_time__contains=':40:01.') |
Q(date_time__contains=':45:01.') |
Q(date_time__contains=':50:01.') |
Q(date_time__contains=':55:01.')).order_by('date_time')
elif seconds == 3600:
query_set = GpggaGpsFix.objects.filter(today_filter).filter(date_time__contains=':00:01').order_by('date_time')
else:
assert False # need to add a if case for this
# 64: GPS Bridge
# 63: GPS Trimble
query_set = query_set.filter(utils.filter_out_bad_values())
previous_date_time_string = ""
for gps_info in query_set.iterator():
date_time_string = gps_info.date_time.strftime("%Y-%m-%d %H:%M:%S")
if date_time_string == previous_date_time_string:
continue
if which_gps(date_time_string) == "GPS Bridge1":
if gps_info.device_id == 64:
l = [gps_info.date_time.strftime("%Y-%m-%d %H:%M:%S"),
"{:.4f}".format(gps_info.latitude),
"{:.4f}".format(gps_info.longitude)]
# print(l)
csv_writer.writerow(l)
previous_date_time_string = date_time_string
else:
if gps_info.device_id == 63:
l = [gps_info.date_time.strftime("%Y-%m-%d %H:%M:%S"),
"{:.4f}".format(gps_info.latitude),
"{:.4f}".format(gps_info.longitude)]
# print(l)
csv_writer.writerow(l)
previous_date_time_string = date_time_string
def delete_files(files):
for file in files:
print("Deleting file:", file)
os.remove(file)
def generate_method_1(output_directory, seconds, file_suffix):
"""
This method does a query every 'seconds'. Very slow, could be used to find gaps easily on the data.
As it is now it is difficult to decide which GPS the get comes from.
"""
time_delta = datetime.timedelta(seconds=seconds)
first_date = GpggaGpsFix.objects.earliest().date_time
last_date = GpggaGpsFix.objects.latest().date_time
filename = "track_{}_{}_{}.csv".format(first_date.strftime("%Y%m%d"), last_date.strftime("%Y%m%d"), file_suffix)
print("Will start processing:", filename)
file_path = os.path.join(output_directory, filename)
file = open(file_path, "w")
csv_writer = csv.writer(file)
csv_writer.writerow(["date_time", "latitude", "longitude"])
current_date = first_date
previous_date = current_date
while current_date < last_date:
location = utils.ship_location(current_date)
if location.date_time != previous_date:
if location.date_time is not None and location.latitude is not None and location.longitude is not None:
csv_writer.writerow([location.date_time.strftime("%Y-%m-%d %H:%M:%S"), "{:.4f}".format(location.latitude), "{:.4f}".format(location.longitude)])
if location.date_time is None:
print("No data for:", current_date)
if previous_date.day != current_date.day:
print("Generating CSV GPS info:", current_date)
previous_date = current_date
current_date = current_date + time_delta
def which_gps(date_time_str):
for interval in gps_bridge_working_intervals:
if interval['starts'] < date_time_str <= interval['stops']:
# if date_time_str > interval['starts'] and date_time_str <= interval['stops']:
return "GPS Bridge1"
return "Trimble GPS"
| cpina/science-cruise-data-management | ScienceCruiseDataManagement/main/management/commands/exportgpstracks.py | Python | mit | 8,461 |
from yaml import load
from os import environ
from os.path import join, isfile
from ..module_ultra_repo import ModuleUltraRepo
from ..module_ultra_config import ModuleUltraConfig
class RepoDaemonConfig:
"""Represent a MU repo to the MU daemon."""
def __init__(self, **kwargs):
self.repo_name = kwargs['repo_name']
self.repo_path = kwargs['repo_path']
self.pipelines = kwargs['pipelines']
def get_repo(self):
"""Return the MU repo that this represents."""
return ModuleUltraRepo(self.repo_path)
def get_pipeline_list(self):
"""Return a list of (pipe_name, version)."""
return [(pipe['name'], pipe['version']) for pipe in self.pipelines]
def get_pipeline_tolerance(self, pipe_name):
"""Return tolerance for the pipeline."""
for pipe in self.pipelines:
if pipe['name'] == pipe_name:
return pipe.get('tolerance', 0)
def get_pipeline_endpts(self, pipe_name):
"""Return a list of endpts or None."""
return None
def get_pipeline_excluded_endpts(self, pipe_name):
"""Return a list of excluded endpts or None."""
return None
class DaemonConfig:
"""Store config information for the MU daemon."""
def __init__(self, repos, total_jobs=10, run_local=True, pipeline_configs={}):
self.repos = repos
self.total_jobs = int(total_jobs)
self.run_local = run_local
self.pipeline_configs = pipeline_configs
def list_repos(self):
"""Return a list of RepoDaemonConfigs."""
repo_configs = []
for repo_name, repo_path, pipelines in self.repos:
repo_configs.append(RepoDaemonConfig(**{
'repo_name': repo_name,
'repo_path': repo_path,
'pipelines': pipelines,
}))
return repo_configs
def get_pipeline_run_config(self, pipe_name, pipe_version):
"""Return a filepath for the config to be used or None."""
return None
@classmethod
def get_daemon_config_filename(ctype):
try:
return environ['MODULE_ULTRA_DAEMON_CONFIG']
except KeyError:
config_dir = ModuleUltraConfig.getConfigDir()
config_filename = join(config_dir, 'daemon_config.yaml')
if isfile(config_filename):
return config_filename
assert False, "No daemon config found"
@classmethod
def load_from_yaml(ctype, yaml_filename=None):
yaml_filename = yaml_filename if yaml_filename else ctype.get_daemon_config_filename()
raw_config = load(open(yaml_filename))
raw_repos = raw_config['repos']
repo_list = [
(raw_repo['name'], raw_repo['path'], raw_repo['pipelines'])
for raw_repo in raw_repos
]
return DaemonConfig(
repo_list,
total_jobs=raw_config.get('num_jobs', 10),
run_local=raw_config.get('run_on_cluster', True),
pipeline_configs=raw_config.get('pipeline_configs', {})
)
| MetaSUB/ModuleUltra | moduleultra/daemon/config.py | Python | mit | 3,078 |
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014, 2019 zordsdavini
# Copyright (c) 2014 Alexandr Kriptonov
# Copyright (c) 2014 Tycho Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import imaplib
import re
from libqtile.log_utils import logger
from libqtile.widget import base
class GmailChecker(base.ThreadPoolText):
"""A simple gmail checker. If 'status_only_unseen' is True - set 'fmt' for one argument, ex. 'unseen: {0}'"""
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
("update_interval", 30, "Update time in seconds."),
("username", None, "username"),
("password", None, "password"),
("email_path", "INBOX", "email_path"),
("display_fmt", "inbox[{0}],unseen[{1}]", "Display format"),
("status_only_unseen", False, "Only show unseen messages"),
]
def __init__(self, **config):
base.ThreadPoolText.__init__(self, "", **config)
self.add_defaults(GmailChecker.defaults)
def poll(self):
self.gmail = imaplib.IMAP4_SSL('imap.gmail.com')
self.gmail.login(self.username, self.password)
answer, raw_data = self.gmail.status(self.email_path,
'(MESSAGES UNSEEN)')
if answer == "OK":
dec = raw_data[0].decode()
messages = int(re.search(r'MESSAGES\s+(\d+)', dec).group(1))
unseen = int(re.search(r'UNSEEN\s+(\d+)', dec).group(1))
if(self.status_only_unseen):
return self.display_fmt.format(unseen)
else:
return self.display_fmt.format(messages, unseen)
else:
logger.exception(
'GmailChecker UNKNOWN error, answer: %s, raw_data: %s',
answer, raw_data)
return "UNKNOWN ERROR"
| ramnes/qtile | libqtile/widget/gmail_checker.py | Python | mit | 2,825 |
def gpio_init(pin, output):
try:
with open(f"/sys/class/gpio/gpio{pin}/direction", 'wb') as f:
f.write(b"out" if output else b"in")
except Exception as e:
print(f"Failed to set gpio {pin} direction: {e}")
def gpio_set(pin, high):
try:
with open(f"/sys/class/gpio/gpio{pin}/value", 'wb') as f:
f.write(b"1" if high else b"0")
except Exception as e:
print(f"Failed to set gpio {pin} value: {e}")
| commaai/openpilot | common/gpio.py | Python | mit | 432 |
# vim: fileencoding=utf-8
"""
AppHtml settings
@author Toshiya NISHIO(http://www.toshiya240.com)
"""
defaultTemplate = {
'1) 小さいボタン': '${badgeS}',
'2) 大きいボタン': '${badgeL}',
'3) テキストのみ': '${textonly}',
"4) アイコン付き(小)": u"""<span class="appIcon"><img class="appIconImg" height="60" src="${icon60url}" style="float:left;margin: 0px 15px 15px 5px;"></span>
<span class="appName"><strong><a href="${url}" target="itunes_store">${name}</a></strong></span><br>
<span class="appCategory">カテゴリ: ${category}</span><br>
<span class="badgeS" style="display:inline-block; margin:6px">${badgeS}</span><br style="clear:both;">
""",
"5) アイコン付き(大)": u"""<span class="appIcon"><img class="appIconImg" height="100" src="${icon100url}" style="float:left;;margin: 0px 15px 15px 5px;"></span>
<span class="appName"><strong><a href="${url}" target="itunes_store">${name}</a></strong></span><br>
<span class="appCategory">カテゴリ: ${category}</span><br>
<span class="badgeL" style="display:inline-block; margin:4px">${badgeL}</span><br style="clear:both;">
"""
}
settings = {
'phg': "",
'cnt': 8,
'scs': {
'iphone': 320,
'ipad': 320,
'mac': 480
},
'template': {
'software': defaultTemplate,
'iPadSoftware': defaultTemplate,
'macSoftware': defaultTemplate,
'song': defaultTemplate,
'album': defaultTemplate,
'movie': defaultTemplate,
'ebook': defaultTemplate
}
}
| connect1ngdots/AppHtmlME | AppHtmlME.workflow/Scripts/apphtml_settings.py | Python | mit | 1,540 |
#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "houseofdota.production_settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| lucashanke/houseofdota | manage.py | Python | mit | 266 |
"""
Django settings for plasystem project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
from local_settings import *
# Application definition
INSTALLED_APPS = [
'flat_responsive',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'productores',
'organizaciones',
'subsectores',
'lugar',
'resultados',
'reportes',
'smart_selects',
'multiselectfield',
#'nested_admin',
'nested_inline',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'plasystem.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'plasystem.wsgi.application'
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'es-ni'
TIME_ZONE = 'America/Managua'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files
MEDIA_ROOT = os.environ.get('MEDIA_ROOT', os.path.join(BASE_DIR, 'media'))
MEDIA_URL = '/media/'
STATIC_ROOT = os.environ.get('STATIC_ROOT', os.path.join(BASE_DIR, 'static'))
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static_media"),
)
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer'
| CARocha/plasystem | plasystem/settings.py | Python | mit | 3,017 |
"""Tests for the object departures module."""
import responses
# initialize package, and does not mix up names
import test as _test
import navitia_client
import requests
class DeparturesTest(_test.TestCase):
def setUp(self):
self.user = 'leo'
self.core_url = "https://api.navitia.io/v1/"
self.client = navitia_client.Client(self.user)
self.coords = '2.333333;48.866667'
def test_no_region_nor_coords(self):
# Should raise error if no region nor coords specified
pass
| leonardbinet/navitia_client | test/test_departures.py | Python | mit | 529 |
# http://github.com/timestocome
# use hidden markov model to predict changes in a stock market index fund
# http://cs229.stanford.edu/proj2009/ShinLee.pdf
# https://www.dartmouth.edu/~chance/teaching_aids/books_articles/probability_book/Chapter11.pdf
import numpy as np
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
# pandas display options
pd.options.display.max_rows = 1000
pd.options.display.max_columns = 25
pd.options.display.width = 1000
######################################################################
# data
########################################################################
# read in datafile created in LoadAndMatchDates.py
data = pd.read_csv('StockDataWithVolume.csv', index_col='Date', parse_dates=True)
features = [data.columns.values]
# create target --- let's try Nasdaq value 1 day change
data['returns'] = (data['NASDAQ'] - data['NASDAQ'].shift(1)) / data['NASDAQ']
# remove nan row from target creation
data = data.dropna()
###################################################################
# Simple markov chain
###################################################################
# first pass only used 4 bins ( highGain, lowGain, lowLoss, highLoss )
# looks to be ~0.13, -.112, ~.25 diff between highest and lowest
# divide returns into bins
# round(2) gives 22 unique bins
# round(3) gives 157 bins
# round(4) gives 848 bins
round_values = 4
data['gainLoss'] = data['returns'].round(round_values)
total_samples = len(data)
n_bins = data['gainLoss'].nunique()
value_count = data['gainLoss'].value_counts()
value_count = value_count.sort_index()
b = value_count.index.tolist()
bins = ['%.4f' % z for z in b] # match to round value
#print(value_count)
# calculate probability of a return value on a random day
probability = value_count / total_samples
#print(probability)
# built transition matrix
transitions = np.zeros((n_bins, n_bins))
def map_transition(this_return, previous_return):
current = np.where(probability.index==this_return)[0] - 1 # pandas starts at 1, numpy starts at zero
previous = np.where(probability.index==previous_return)[0] - 1
transitions[current, previous] += 1
total_transitions = 0
for i in range(len(data)-1):
total_transitions += 1
previous = data.iloc[i]['gainLoss']
current = data.iloc[i+1]['gainLoss']
map_transition(current, previous)
# normalize matrix, then normalize rows
transitions /= total_transitions
transitions /= transitions.sum(axis=0)
#######################################################################################
# make a prediction
# n number of days into future
# s today's state hg, lg, ll, hl
# t transition matrix that was calculated
s = -.03 # today's gain or loss --- be sure it is a valid bin
n = 5
t = transitions
prediction_probabilities = (t **n)
row_number = np.where(probability.index==s)[0] - 1 # pandas starts at 1, numpy starts at zero
probabilities = prediction_probabilities[row_number]
mostlikely = probabilities.argmax()
bin_value = float(bins[mostlikely])
print("%d days from now, the market return will be %.2f" % (n, bin_value))
######################################################################################
# plot predictions over time
# scale prediction for plotting
def convert_return_for_plot(r):
return bins[r]
days_ahead = 5
p = []
for i in range(len(data)-1):
s = data.iloc[i]['gainLoss'] # get current day return from market
prediction_probabilities = (transitions **n) # predict all probabilities for future date
row_number = np.where(probability.index==s)[0] - 1 # get row number matching today's return
probabilities = prediction_probabilities[row_number]
mostlikely = probabilities.argmax()
bin_value = bins[mostlikely]
p.append(bin_value)
# pad begining of p
p = ([0] * 1 + p)
data['predicted'] = p
plt.figure(figsize=(12,12))
plt.title("Nasdaq daily gain/loss using single chain markov 5 days out")
plt.plot(data['returns'], label='Actual')
plt.plot(data['predicted'], label='Predicted', alpha=0.5)
plt.legend(loc='best')
plt.savefig("SingleChainMarkov.png")
plt.show()
| timestocome/Test-stock-prediction-algorithms | Misc experiments/Stocks_SimpleMarkovChain.py | Python | mit | 4,218 |
"""
Django settings for ecommerce project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
#root of project
#BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'csqwlmc8s55o($rt6ozh7u+ui9zb-et00w$d90j8$^!nvj41_r'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['*']
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = '[email protected]'
EMAIL_HOST_PASSWORD = 'yourpassword'
EMAIL_PORT = 587
EMAIL_USE_TLS = True
'''
If using gmail, you will need to
unlock Captcha to enable Django
to send for you:
https://accounts.google.com/displayunlockcaptcha
'''
# Application definition
INSTALLED_APPS = (
#django app
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
#third party apps
'crispy_forms',
'registration',
#my apps
'answers',
'newsletter',
"products",
"carts",
"billing",
"django_filters",
"storages",
'gunicorn',
"djstripe",
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'ecommerce.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ecommerce.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'EST'
USE_I18N = True
USE_L10N = True
USE_TZ = True
'''Image storage Amazon S3'''
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'examplefy'
S3_URL = 'http://%s.s3.amazonaws.com/' % AWS_STORAGE_BUCKET_NAME
STATIC_URL = S3_URL
AWS_QUERYSTRING_AUTH = False
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
'''Static storage'''
# # Static files (CSS, JavaScript, Images)
# # https://docs.djangoproject.com/en/1.8/howto/static-files/
# STATIC_ROOT = 'staticfiles'
# STATIC_URL = '/static/'
# STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_in_env", "static_root")
# MEDIA_URL = '/media/'
# MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_in_env", "media_root")
# PROTECTED_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_in_env", "protected_root")
# STATICFILES_DIRS = (
# os.path.join(BASE_DIR, "static", "static_root"),
# #os.path.join(BASE_DIR, "static_in_env"),
# #'/var/www/static/',
# )
#Production Code
#Parse database configuration from $DATABASE_URL
#import dj_database_url
#DATABASES['default'] = dj_database_url.config()
# #BOTO S3 Storage for Production ONLY
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static', "static_root"),
)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
# STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static", "static_root")
MEDIA_URL = S3_URL
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static", "media_root")
PROTECTED_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static", "protected_root")
# TEMPLATE_DIRS = (
# os.path.join(BASE_DIR, "templates"),
# )
# here() gives us file paths from the root of the system to the directory
# holding the current file.
here = lambda * x: os.path.join(os.path.abspath(os.path.dirname(__file__)), *x)
PROJECT_ROOT = here("..")
# root() gives us file paths from the root of the system to whatever
# folder(s) we pass it starting at the parent directory of the current file.
root = lambda * x: os.path.join(os.path.abspath(PROJECT_ROOT), *x)
TEMPLATE_DIRS = (
root('templates'),
)
#Crispy FORM TAGs SETTINGS
CRISPY_TEMPLATE_PACK = 'bootstrap3'
#DJANGO REGISTRATION REDUX SETTINGS
ACCOUNT_ACTIVATION_DAYS = 7
REGISTRATION_AUTO_LOGIN = True
SITE_ID = 1
LOGIN_REDIRECT_URL = '/'
#Braintree
BRAINTREE_PUBLIC = "hsjhmqhy73rvpqbv"
BRAINTREE_PRIVATE = "37b06da7e2cdb493bf0e0ddb1c47cbcd"
BRAINTREE_MERCHANT = "bgd7scxjbcrz6dd2"
BRAINTREE_ENVIRONMENT = "Sandbox"
#Stripe
STRIPE_PUBLIC_KEY = os.environ.get("STRIPE_PUBLIC_KEY", "pk_test_lLFAbBOc7bHtpxq5QnIp94xh")
STRIPE_SECRET_KEY = os.environ.get("STRIPE_SECRET_KEY", "sk_test_hWkIxMrsvR3IGJIRKLRy1Rts")
CURRENCIES = getattr(settings, "DJSTRIPE_CURRENCIES", (
('usd', 'U.S. Dollars',),
('gbp', 'Pounds (GBP)',),
('eur', 'Euros',))
)
DJSTRIPE_PLANS = {
"one-time": {
"stripe_plan_id": "one-time",
"name": "Examplefy ($0.99)",
"description": "A one-time buy to Examplefy",
"price": 99, # $0.99
"currency": "usd",
"interval": "day"
},
"monthly": {
"stripe_plan_id": "pro-monthly",
"name": "Examplefy Pro ($4.99/month)",
"description": "The monthly subscription plan to Examplefy",
"price": 499, # $4.99
"currency": "usd",
"interval": "month",
"interval_count": 1
},
"yearly": {
"stripe_plan_id": "pro-yearly",
"name": "Examplefy Prime ($49/year)",
"description": "The annual subscription plan to Examplefy",
"price": 4900, # $49.00
"currency": "usd",
"interval": "year",
"interval_count": 1
}
} | Maelstroms38/ecommerce | src/ecommerce/settings/local.py | Python | mit | 7,411 |
"""geo.py: Implementation of class AbstractTwitterGeoCommand
and its subclasses.
"""
from argparse import ArgumentParser
from . import (AbstractTwitterCommand, call_decorator)
from ..parsers import (filter_args, cache)
# GET geo/id/:place_id
# POST geo/place DEPRECATED
# GET geo/reverse_geocode
# GET geo/search
GEO_ID_PLACE_ID = ('geo/id/:place_id', 'id')
GEO_REVERSE_GEOCODE = ('geo/reverse_geocode', 'reverse')
GEO_SEARCH = ('geo/search', 'search')
# pylint: disable=abstract-method
class AbstractTwitterGeoCommand(AbstractTwitterCommand):
"""n/a"""
pass
class IdPlaceId(AbstractTwitterGeoCommand):
"""Output all the information about a known place."""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
GEO_ID_PLACE_ID[0],
aliases=GEO_ID_PLACE_ID[1:],
help=self.__doc__)
parser.add_argument(
'place_id',
help='a place in the world where can be retrieved '
'from geo/reverse_geocode')
return parser
@call_decorator
def __call__(self):
"""Request GET geo/id/:place_id for Twitter."""
# pylint: disable=protected-access
kwargs = dict(_id=self.args.place_id)
return kwargs, self.twhandler.geo.id._id # hack?
class ReverseGeocode(AbstractTwitterGeoCommand):
"""Search for up to 20 places that can be used as a place_id."""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
GEO_REVERSE_GEOCODE[0],
aliases=GEO_REVERSE_GEOCODE[1:],
parents=[parser_geo_common()],
help=self.__doc__)
parser.add_argument(
'long',
metavar='{-180.0..180.0}',
help='the longitude to search around')
parser.add_argument(
'lat',
metavar='{-90.0..90.0}',
help='the latitude to search around')
return parser
@call_decorator
def __call__(self):
"""Request GET geo/reverse_geocode for Twitter."""
kwargs = filter_args(
vars(self.args),
'lat', 'long', 'accuracy', 'granularity', 'max_results')
return kwargs, self.twhandler.geo.reverse_geocode
class Search(AbstractTwitterGeoCommand):
"""Search for places that can be attached to a statuses/update."""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
GEO_SEARCH[0],
aliases=GEO_SEARCH[1:],
parents=[parser_geo_common()],
help=self.__doc__)
parser.add_argument(
'--long',
metavar='{-180.0..180.0}',
help='the longitude to search around')
parser.add_argument(
'--lat',
metavar='{-90.0..90.0}',
help='the latitude to search around')
parser.add_argument(
'-q', '--query',
metavar='<text>',
help='free-form text to match against '
'while executing a geo-based query')
parser.add_argument(
'-i', '--ip-address',
dest='ip',
metavar='<ip-address>',
help='an IP address')
parser.add_argument(
'-c', '--contained-within',
dest='contained_within',
metavar='<place_id>',
help='the place_id which you would like '
'to restrict the search results to')
parser.add_argument(
'-s', '--street-address',
dest='street_address',
metavar='<text>',
help='search for places which have this given street address')
return parser
@call_decorator
def __call__(self):
"""Request GET geo/search for Twitter."""
kwargs = filter_args(
vars(self.args),
'lat', 'long', 'accuracy', 'granularity', 'max_results',
'query', 'ip', 'contained_within', 'street_address')
return kwargs, self.twhandler.geo.search
def make_commands(manager):
"""Prototype"""
# pylint: disable=no-member
return (cmd_t(manager) for cmd_t in
AbstractTwitterGeoCommand.__subclasses__())
CHOICES = ('poi', 'neighborhood', 'city', 'admin', 'country')
@cache
def parser_geo_common():
"""Return the parser for common arguments."""
parser = ArgumentParser(add_help=False)
parser.add_argument(
'-a', '--accuracy',
help='a hint on the region in which to search')
parser.add_argument(
'-g', '--granularity',
choices=CHOICES,
metavar='|'.join(CHOICES),
help='the minimal granularity of place types to return')
parser.add_argument(
'-m', '--max-results',
type=int,
dest='max_results',
help='a hint as to the number of results to return')
return parser
| showa-yojyo/bin | twmods/commands/geo.py | Python | mit | 4,854 |
from __future__ import absolute_import
from collections import defaultdict as ddict
import os.path as op
def enum(**enums):
"""#enumeration
#backward compatible
:param enums:
"""
return type('Enum', (), enums)
IONISATION_MODE = enum(NEG=-1, POS=1)
class ExperimentalSettings(object):
"""
:param mz_tol_ppm:
:param ionisation_mode:
:param is_dims_experiment:
"""
ADDUCTS_POS = op.abspath("mzos/ressources/POS_ADDUCTS_IMS.csv")
ADDUCTS_NEG = op.abspath("mzos/ressources/NEG_ADDUCTS_IMS.csv")
FRAGMENTS = op.abspath("mzos/ressources/FRAGMENTS_IMS.csv")
def __init__(self, mz_tol_ppm, polarity, is_dims_exp,
frag_conf=None,
neg_adducts_conf=None,
pos_adducts_conf=None):
self.samples = set()
self.polarity = polarity # warning is an ENUM
self.mz_tol_ppm = mz_tol_ppm
self.is_dims_exp = is_dims_exp
# self.databases = databases
self.group_by_id = ddict(set)
self.group_by_sample = {}
# setting isos file, same for both polarity
# self.isos_file = ExperimentalSettings.ISOS
# setting good frags_file
self.frags_file = frag_conf or ExperimentalSettings.FRAGMENTS
self.adducts_file = neg_adducts_conf or ExperimentalSettings.ADDUCTS_NEG \
if polarity == IONISATION_MODE.NEG else pos_adducts_conf or ExperimentalSettings.ADDUCTS_POS
def get_frags(self):
"""
:return:
"""
lines = list()
with open(self.frags_file) as f:
lines += [l.split(",") for l in f.readlines()[1:]]
return [((float(l[3]), 1), l[0]) for l in lines]
def get_adducts(self):
"""
:return:
"""
lines = list()
with open(self.adducts_file) as f:
lines += [l.split(",") for l in f.readlines()[1:]]
return [((float(l[3]), 1), l[0]) for l in lines]
def get_mass_to_check(self):
"""
:return:
"""
if self.is_dims_exp:
return self.get_frags()
return self.get_adducts() + self.get_frags()
def create_group(self, id_, samples):
"""
:param id_:
:param samples:
:return:
"""
group = Group(id_, samples)
for s in list(samples):
self.group_by_sample[s] = group
self.group_by_id[id_] = group
self.samples.union(set(samples))
return group
def get_group(self, id_):
"""
:param id_:
:return:
"""
return self.group_by_id.get(id_)
def get_group_of(self, sample):
"""
:param sample:
:return: return group or None
"""
return self.group_by_sample.get(sample)
def get_group_id_of(self, sample):
"""
:param sample:
:return:
"""
group = self.get_group_of(sample)
if group is None:
return None
return group.name_id
class Group(list):
"""
:param name_id:
:param samples:
:param description:
"""
def __init__(self, name_id, samples, description=""):
super(Group, self).__init__()
self.samples = samples
self.description = description
self.name_id = name_id | jerkos/mzOS | mzos/exp_design.py | Python | mit | 3,317 |
# -*- coding: utf-8 -*-
from nose.tools import (
eq_,
raises,
)
from py3oauth2.utils import (
normalize_netloc,
normalize_path,
normalize_query,
normalize_url,
)
def test_normalize_url():
eq_(normalize_url('http://a/b/c/%7Bfoo%7D'),
normalize_url('hTTP://a/./b/../b/%63/%7bfoo%7d'))
@raises(ValueError)
def test_normalize_url_unknown_scheme():
normalize_url('example://example.com/')
@raises(ValueError)
def test_normalize_url_fragment():
normalize_url('http://example.com/#foo')
@raises(ValueError)
def test_normalize_url_invalid_port():
normalize_url('https://example.com:1bb/#foo')
def test_normalize_netloc():
eq_(normalize_netloc('eXamPLe.com', 80), 'example.com')
eq_(normalize_netloc('user:[email protected]', 80), 'user:[email protected]')
eq_(normalize_netloc('user:@example.com', 80), '[email protected]')
eq_(normalize_netloc(':[email protected]', 80), ':[email protected]')
eq_(normalize_netloc('example.com:443', 80), 'example.com:443')
eq_(normalize_netloc('example.com:80', 80), 'example.com')
eq_(normalize_netloc('example.com:', 80), 'example.com')
def test_normalize_query():
eq_(normalize_query(''), '')
eq_(normalize_query('b=c&a=b'), 'a=b&b=c')
eq_(normalize_query('b&a=b'), 'a=b')
eq_(normalize_query('b=&a=b'), 'a=b')
eq_(normalize_query('b=%e3%81%84&a=%e3%81%82'), 'a=%E3%81%82&b=%E3%81%84')
def test_normalize_path():
eq_(normalize_path(''), '/')
eq_(normalize_path('//'), '/')
eq_(normalize_path('/a//b'), '/a/b/')
eq_(normalize_path('/a/./b'), '/a/b/')
eq_(normalize_path('/a/foo/../b'), '/a/b/')
eq_(normalize_path('/%e3%81%82%a%e3%81%84'), '/%E3%81%82%a%E3%81%84/')
eq_(normalize_path('/%e3%81%82a%e3%81%84'), '/%E3%81%82a%E3%81%84/')
| GehirnInc/py3oauth2 | py3oauth2/tests/test_utils.py | Python | mit | 1,798 |
#!/usr/bin/env python3
# coding=utf-8
"""Executa o servidor de nomes ".br"."""
import logging
import dns
def main():
logging.basicConfig(
format='[%(levelname)s]%(threadName)s %(message)s',
level=logging.INFO)
brNS = dns.NameServer('.br', 2, '127.0.0.1', 10001)
brNS.add_record('uem.br', '127.0.0.1:10002')
brNS.run()
if __name__ == '__main__':
main()
| marcokuchla/name-systems | runbr.py | Python | mit | 390 |
from __future__ import unicode_literals
from django.apps import AppConfig
class DevelopersConfig(AppConfig):
name = 'developers'
| neldom/qessera | developers/apps.py | Python | mit | 136 |
"""
Contains all elements of this package. They act as the formal elements of the law.
"""
import json
import sys
def from_json(data):
"""
Reconstructs any `BaseElement` from its own `.as_json()`. Returns the element.
"""
def _decode(data_dict):
values = []
if isinstance(data_dict, str):
return data_dict
assert(len(data_dict) == 1)
klass_string = next(iter(data_dict.keys()))
klass = getattr(sys.modules[__name__], klass_string)
args = []
for e in data_dict[klass_string]:
x = _decode(e)
if isinstance(x, str):
args.append(x)
else:
args += x
values.append(klass(*args))
return values
return _decode(json.loads(data))[0]
class BaseElement(object):
"""
Defines the interface of all elements.
"""
def as_html(self):
"""
How the element converts itself to HTML.
"""
raise NotImplementedError
def as_str(self):
"""
How the element converts itself to simple text.
"""
raise NotImplementedError
def as_dict(self):
"""
How the element converts itself to a dictionary.
"""
raise NotImplementedError
def as_json(self):
"""
How the element converts itself to JSON. Not to be overwritten.
"""
return json.dumps(self.as_dict())
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, repr(self.as_str()))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.as_dict() == other.as_dict()
else:
return False
@staticmethod
def _build_html(tag, text, attrib):
text = text.replace('\n', '') # \n have no meaning in HTML
if not text:
# ignore empty elements
return ''
attributes = ' '.join('%s="%s"' % (key, value)
for (key, value) in sorted(attrib.items())
if value is not None)
if attributes:
attributes = ' ' + attributes
return '<{0}{1}>{2}</{0}>'.format(tag, attributes, text)
class Token(BaseElement):
"""
A simple string.
"""
def __init__(self, string):
assert isinstance(string, str)
self._string = string
def as_str(self):
return self.string
def as_html(self):
return self.as_str()
def as_dict(self):
return {self.__class__.__name__: [self.as_str()]}
@property
def string(self):
return self._string
class Reference(Token):
"""
A generic reference to anything. Contains a number (str) and a parent, which
must be either `None` or a `Token` (or a subclass of `Token`).
"""
def __init__(self, number, parent=None):
super(Reference, self).__init__(number)
assert isinstance(number, str)
assert isinstance(parent, Token) or parent is None
self._parent = parent
def __repr__(self):
return '<%s %s %s>' % (self.__class__.__name__,
repr(self.number), repr(self.parent))
def as_html(self):
return self._build_html('a', self.as_str(), {})
def as_dict(self):
r = {self.__class__.__name__: [self.number]}
if self.parent:
r[self.__class__.__name__].append(self.parent.as_dict())
return r
@property
def number(self):
return self.string
@property
def parent(self):
return self._parent
class DocumentReference(Reference):
"""
A concrete Reference to a document. Contains an href that identifies where
it points to, as well as a `set_href` to set it.
"""
def __init__(self, number, parent, href=''):
super(DocumentReference, self).__init__(number, parent)
self._href = href
def __repr__(self):
return '<%s %s %s>' % (self.__class__.__name__, repr(self.as_str()),
repr(self.parent.as_str()))
@property
def name(self):
return self.parent.as_str()
def set_href(self, href):
self._href = href
def as_html(self):
if self._href:
return self._build_html('a', self.as_str(), {'href': self._href})
return super(DocumentReference, self).as_html()
def as_dict(self):
r = super(DocumentReference, self).as_dict()
if self._href:
r[self.__class__.__name__].append(self._href)
return r
class LineReference(Reference):
pass
class NumberReference(Reference):
pass
class ArticleReference(Reference):
pass
class EULawReference(Reference):
"""
A reference to EU law. Its href is built from its name and number.
"""
@staticmethod
def _build_eu_url(name, number):
# example: '2000/29/CE'
year, iden = number.split('/')[:2]
label = {'Diretiva': 'L',
'Decisão de Execução': 'D',
'Regulamento (CE)': 'R',
'Regulamento CE': 'R',
'Regulamento CEE': 'R'}[name]
if label == 'R':
year, iden = iden, year
eur_id = '3%s%s%04d' % (year, label, int(iden))
return 'http://eur-lex.europa.eu/legal-content/PT/TXT/?uri=CELEX:%s' \
% eur_id
def __init__(self, number, parent):
super(EULawReference, self).__init__(number, parent)
def as_html(self):
return self._build_html('a', self.as_str(),
{'href': self._build_eu_url(self.parent.as_str(),
self.number)})
class Anchor(Token):
"""
A generic anchor that defines a section that can be referred to.
"""
name = None
def __init__(self, string):
super(Anchor, self).__init__(string)
self._document_section = None
def as_str(self):
return '%s %s\n' % (self.name, self.number)
def as_dict(self):
return {self.__class__.__name__: [self.number]}
@property
def number(self):
return self.string
@property
def format(self):
return self.__class__
@property
def reference(self):
return self._document_section
@reference.setter
def reference(self, document_section):
assert(isinstance(document_section, DocumentSection))
self._document_section = document_section
def ref_as_href(self):
if self.reference.id_as_html():
return '#' + self.reference.id_as_html()
else:
return None
class Section(Anchor):
name = 'Secção'
class SubSection(Anchor):
name = 'Sub-Secção'
class Clause(Anchor):
name = 'Clausula'
def as_str(self):
return '%s\n' % self.number
class Part(Anchor):
name = 'Parte'
class Chapter(Anchor):
name = 'Capítulo'
class Title(Anchor):
name = 'Título'
class Annex(Anchor):
name = 'Anexo'
def as_str(self):
if self.number:
return '%s %s\n' % (self.name, self.number)
else:
return '%s\n' % self.name
class Article(Anchor):
name = 'Artigo'
def as_html(self):
anchor = self._build_html('a', self.number,
{'href': self.ref_as_href()})
return '%s %s' % (self.name, anchor)
class Number(Anchor):
name = 'Número'
def as_str(self):
return '%s -' % self.number
def as_html(self):
return self._build_html('a', self.as_str(),
{'href': self.ref_as_href()})
class Line(Number):
name = 'Alínea'
def as_str(self):
return '%s' % self.number
class Item(Number):
"""
An item of an unordered list.
"""
name = 'Item'
def as_str(self):
return '%s' % self.number
class BaseDocumentSection(BaseElement):
def __init__(self, *children):
self._children = []
for child in children:
self.append(child)
self._parent_section = None
def append(self, element):
if isinstance(element, BaseDocumentSection):
element._parent_section = self
self._children.append(element)
def __len__(self):
return len(self._children)
def as_str(self):
return ''.join(child.as_str() for child in self._children)
def as_html(self):
string = ''
ol = False
ul = False
for child in self._children:
if ul and not isinstance(child, UnorderedDocumentSection):
string += '</ul>'
ul = False
if ol and not isinstance(child, OrderedDocumentSection):
string += '</ol>'
ol = False
if not ul and isinstance(child, UnorderedDocumentSection):
string += '<ul>'
ul = True
if not ol and isinstance(child, OrderedDocumentSection):
string += '<ol>'
ol = True
string += child.as_html()
if ol:
string += '</ol>'
if ul:
string += '</ul>'
return string
def as_dict(self):
return {self.__class__.__name__: [child.as_dict() for child in
self._children]}
def find_all(self, condition, recursive=False):
if recursive:
def _find_all(root):
result = []
if isinstance(root, BaseDocumentSection):
for child in root._children:
if condition(child):
result.append(child)
result += _find_all(child)
return result
return _find_all(self)
return [child for child in self._children if condition(child)]
def id_tree(self):
tree = []
if self._parent_section is not None:
tree = self._parent_section.id_tree()
tree += [self]
return tree
def get_doc_refs(self):
"""
Yields tuples (name, number) of all its `DocumentReference`s.
"""
refs = self.find_all(lambda x: isinstance(x, DocumentReference), True)
ref_set = set()
for ref in refs:
ref_set.add((ref.name, ref.number))
return ref_set
def set_doc_refs(self, mapping):
"""
Uses a dictionary of the form `(name, ref)-> url` to set the href
of its own `DocumentReference`s.
"""
refs = self.find_all(lambda x: isinstance(x, DocumentReference), True)
for ref in refs:
if (ref.name, ref.number) in mapping:
ref.set_href(mapping[(ref.name, ref.number)])
class Paragraph(BaseDocumentSection):
def as_html(self):
return self._build_html('p', super(Paragraph, self).as_html(), {})
class InlineParagraph(Paragraph):
def as_html(self):
return self._build_html('span', super(Paragraph, self).as_html(), {})
class Document(BaseDocumentSection):
pass
class DocumentSection(BaseDocumentSection):
formal_sections = [Annex, Article, Number, Line, Item]
html_classes = {
Annex: 'annex',
Part: 'part',
Title: 'title',
Chapter: 'chapter',
Section: 'section',
SubSection: 'sub-section',
Clause: 'clause',
Article: 'article',
Number: 'number list-unstyled',
Line: 'line list-unstyled',
Item: 'item list-unstyled',
}
def __init__(self, anchor, *children):
super(DocumentSection, self).__init__(*children)
self._anchor = anchor
self._anchor.reference = self
def as_dict(self):
json = super(DocumentSection, self).as_dict()
json[self.__class__.__name__].insert(0, self.anchor.as_dict())
return json
@property
def anchor(self):
return self._anchor
@property
def format(self):
return self.anchor.format
def formal_id_tree(self):
filtered_tree = []
for e in self.id_tree():
if isinstance(e, QuotationSection):
return [] # sections inside quotations have no tree
if isinstance(e, DocumentSection) and e.format in self.formal_sections:
filtered_tree.append(e)
return filtered_tree
def id_as_html(self):
string = '-'.join(e.anchor.name + '-' + e.anchor.number for e in
self.formal_id_tree())
if string != '':
return string
else:
return None
class TitledDocumentSection(DocumentSection):
def __init__(self, anchor, title=None, *children):
super(TitledDocumentSection, self).__init__(anchor, *children)
self._title = title
def as_dict(self):
json = super(TitledDocumentSection, self).as_dict()
if self._title is not None:
json[self.__class__.__name__].insert(1, self._title.as_dict())
return json
hierarchy_html_titles = {
Part: 'h2',
Annex: 'h2',
Title: 'h3',
Chapter: 'h3',
Section: 'h4',
SubSection: 'h5',
Article: 'h5',
Clause: 'h5',
}
def as_html(self):
inner = self.anchor.as_html()
if self._title is not None:
inner += self._title.as_html()
container = self._build_html(self.hierarchy_html_titles[self.format],
inner, {'class': 'title'})
rest = super(TitledDocumentSection, self).as_html()
return self._build_html('div', container + rest,
{'class': self.html_classes[self.format],
'id': self.id_as_html()})
def as_str(self):
string = self.anchor.as_str()
if self._title is not None:
string += self._title.as_str()
return string + super(TitledDocumentSection, self).as_str()
@property
def title(self):
return self._title
@title.setter
def title(self, title):
assert(isinstance(title, Paragraph))
self._title = title
class InlineDocumentSection(DocumentSection):
"""
A section whose elements are inline.
"""
formats = {}
def as_html(self):
container = self._build_html('span', self.anchor.as_html(), {})
rest = super(InlineDocumentSection, self).as_html()
return self._build_html('li', container + rest,
{'class': self.html_classes[self.format],
'id': self.id_as_html()})
def as_str(self):
return self.anchor.as_str() + super(InlineDocumentSection, self).as_str()
class OrderedDocumentSection(InlineDocumentSection):
"""
A section whose elements are inline and ordered.
"""
formats = {Number, Line}
class UnorderedDocumentSection(InlineDocumentSection):
"""
A section whose elements are inline and un-ordered.
"""
formats = {Item}
class QuotationSection(BaseDocumentSection):
"""
A Section quoting something.
"""
def as_html(self):
return '<blockquote>%s</blockquote>' % \
super(QuotationSection, self).as_html()
def as_str(self):
return '«%s»' % super(QuotationSection, self).as_str()
| publicos-pt/pt_law_parser | pt_law_parser/expressions.py | Python | mit | 15,426 |
import scipy as sp
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
import sys
import matplotlib.lines as lines
import h5py
from matplotlib.font_manager import FontProperties
import matplotlib.ticker as ticker
from scipy.fftpack import fft
axial_label_font = FontProperties()
axial_label_font.set_family('sans-serif')
axial_label_font.set_style('normal')
axial_label_font.set_weight('bold')
# axial_label_font.set_size('x-large')
axial_label_font.set_size(20)
legend_label_font = FontProperties()
legend_label_font.set_family('sans-serif')
legend_label_font.set_style('normal')
legend_label_font.set_weight('normal')
# legend_label_font.set_size('large')
legend_label_font.set_size(16)
def node_response_extraction_sequential(node_ID, file_name, num_DOF):
h5_file = h5py.File(file_name, 'r');
Time = h5_file['time'][:];
displacement_index = int(h5_file['Model/Nodes/Index_to_Generalized_Displacements'][node_ID]);
displacement_component = h5_file['Model/Nodes/Generalized_Displacements'][int(displacement_index):int(displacement_index+num_DOF), :];
acceleration_component = h5_file['Model/Nodes/Generalized_Accelerations'][int(displacement_index):int(displacement_index+num_DOF), :];
for x1 in xrange(0,num_DOF):
displacement_component[x1,:] = displacement_component[x1,:]-displacement_component[x1,0]; ### in case self weight loading stage, get relative displacement
return Time, displacement_component, acceleration_component;
numbercol = 1;
surface_node_ID = 252; ## 252, 250, 249, 251
node_ID = [252, 212, 172, 132, 92, 52, 12]; ## node ID from surface to bottom
depth = [0, 2, 4, 6, 8, 10, 12];
bottom_node_ID = 6; ## node just beyond DRM layer
file_name = 'Motion1C_DRM_propagation.h5.feioutput' ##
parameteric_case = 'Motion1C_Northridge' ##
### ==========================================================================
postfix = '.feioutput';
middle_name_less_than_ten = '0';
num_DOF = 3;
Time, displacement_component_surface, acceleration_component_surface = node_response_extraction_sequential(surface_node_ID, file_name, num_DOF);
Time, displacement_component_bottom, acceleration_component_bottom = node_response_extraction_sequential(bottom_node_ID, file_name, num_DOF);
# surface_acc = np.loadtxt('Kobe_acc.txt');
# surface_disp = np.loadtxt('Kobe_disp.txt');
surface_acc = np.loadtxt('scaled_northridge_acc.dat');
surface_disp = np.loadtxt('scaled_northridge_dis.dat');
########################################################################################
#######===== Print acceleration of nodes ===== ######
########################################################################################
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(surface_acc[:, 0], surface_acc[:, 1], '-r', label='surface analytical', linewidth= 1.5);
ax.plot(Time[200:]-2.0, acceleration_component_surface[0, 200:], '-k', label='DRM propagation', linewidth= 0.5);
plt.gca().set_xlim([0,38]);
# plt.gca().set_ylim([-10,10]);
# plt.gca().get_xaxis().set_ticks(np.arange(0, 60.1, 10))
# plt.gca().get_yaxis().set_ticks(np.arange(-15, 3.1, 3))
plt.gca().get_yaxis().set_major_formatter(ticker.FormatStrFormatter('%0.2f'))
plt.gca().get_xaxis().set_tick_params(direction='in',labelsize='x-large')
plt.gca().get_yaxis().set_tick_params(direction='in',labelsize='x-large')
plt.xlabel('Time [s]', fontproperties=axial_label_font);
plt.ylabel('Acc. [$m/s^2$]', fontproperties=axial_label_font);
plt.grid(True);
plt.legend(ncol= numbercol, loc='upper right', prop=legend_label_font);
filename = 'acc_check_'+ parameteric_case + '.pdf'
plt.savefig(filename, bbox_inches='tight');
plt.show();
# # # ########################################################################################
# # # #######======================== Print Time series response along the depth ===== ######
# # # ########################################################################################
# print "Plot acceleration records along depth!";
# fig = plt.figure()
# ax = fig.add_subplot(111)
# # scale_meter = 7;
# # plt.gca().text(32.7, 1.25, '$1g$', fontsize=20)
# # l1 = lines.Line2D([32, 32], [0.5, 0.5+10/scale_meter], color='k', linewidth=2.0)
# # l2 = lines.Line2D([31.7, 32.3], [0.5, 0.5], color='k', linewidth=0.5)
# # l3 = lines.Line2D([31.7, 32.3], [0.5+10/scale_meter, 0.5+10/scale_meter], color='k', linewidth=0.5)
# # plt.gca().add_line(l1);
# # plt.gca().add_line(l2);
# # plt.gca().add_line(l3);
# PGA_depth = sp.zeros(len(depth));
# for x in xrange(0,len(node_ID)):
# current_node = node_ID[x];
# current_depth = depth[x];
# Time, current_displacement_component, current_acceleration_component = node_response_extraction_sequential(current_node, file_name, num_DOF);
# plot_current_acceleration = current_depth + current_acceleration_component/15.0; ## scale acceleration
# PGA_depth[x] = max(abs(current_acceleration_component[0, :]));
# ax.plot(Time, plot_current_acceleration[0, :], '-k', linewidth= 1);
# plt.gca().set_ylim([-1,13]);
# plt.gca().invert_yaxis()
# # plt.gca().get_xaxis().set_ticks(np.arange(0, 60.1, 10))
# # plt.gca().get_yaxis().set_ticks(np.arange(-15, 3.1, 3))
# plt.gca().get_yaxis().set_major_formatter(ticker.FormatStrFormatter('%0.2f'))
# plt.gca().get_xaxis().set_tick_params(direction='in',labelsize='x-large')
# plt.gca().get_yaxis().set_tick_params(direction='in',labelsize='x-large')
# plt.xlabel('Time [s]', fontproperties=axial_label_font);
# plt.ylabel('Depth. [m]', fontproperties=axial_label_font);
# plt.grid(True);
# plt.legend(ncol= numbercol, loc='upper right', prop=legend_label_font);
# filename = 'acc_depth_'+ parameteric_case + '.pdf'
# plt.savefig(filename, bbox_inches='tight');
# plt.show();
| BorisJeremic/Real-ESSI-Examples | motion_one_component/Deconvolution_DRM_Propagation_Northridge/python_plot_parameteric_study.py | Python | cc0-1.0 | 5,870 |
import smtplib
from email.mime.text import MIMEText
from django.core.mail import EmailMultiAlternatives
from django.conf import settings
def send_message(message):
"""
* desc 快捷发送邮件
* input 要发送的邮件信息
* output None
"""
mail_handler = SendMail()
mail_handler.send_mail(settings.REPORT_USER, 'Error info', message)
class SendMail(object):
"""docstring for SendMail"""
def __init__(self):
self.mail_host = settings.MAIL_HOST
self.mail_host_user = settings.MAIL_HOST_USER
self.mail_host_pwd = settings.MAIL_HOST_PWD
self.smtp = smtplib.SMTP()
self.smtp_login()
def smtp_login(self):
# login the host
self.smtp.connect(self.mail_host)
self.smtp.login(self.mail_host_user, self.mail_host_pwd)
def send_file_mail(self, receiver_list, subject, file_info, file_name):
# 发送附件的方法
part = MIMEApplication(file_info)
part.add_header('Content-Disposition',
'attachment', filename=file_name)
msg.attach(part)
sender = self.mail_host_user
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = ";".join(receiver_list)
self.smtp.sendmail(sender, receiver_list, msg.as_string())
def send_mail(self, receiver_list, subject, context, mail_type="plain"):
"""
* desc 发送邮件的接口
* input receiver_list 收件人的地址列表 subject 主题 context 发送的内容 mail_type 邮件的格式 目前测试成功 plain 和 html
* output 发送成功与否
"""
sender = self.mail_host_user
msg = MIMEText(context, mail_type)
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = ";".join(receiver_list)
self.smtp.sendmail(sender, receiver_list, msg.as_string())
def close(self):
# 关闭建立的链接
self.smtp.close()
class MailHandler(object):
def __init__(self):
pass
def send_mail_message(self, to_user, msg, error=0):
"""
* desc 发送错误邮件
* input 要发送的人 发送的消息 错误还是告警
* output 0 发送成功 1 发送失败
"""
subject = settings.MSUBMAIL
if error:
text_content = 'Virtual Manager Error'
else:
text_content = 'Virtual Manager Warning'
from_email = settings.FMAIL
try:
to = [str(user) + "@hujiang.com" for user in to_user.split(',')]
print(to)
content_msg = EmailMultiAlternatives(
subject, text_content, from_email, to)
html_content = u'<b>' + msg + '</b>'
content_msg.attach_alternative(html_content, 'text/html')
content_msg.send()
return 0
except:
return 1
| hanleilei/note | python/vir_manager/utils/mail_utils.py | Python | cc0-1.0 | 2,926 |
from django.db import models
# Create your models here.
class Pizza(models.Model):
name = models.CharField(max_length=128)
price = models.DecimalField(decimal_places=2, max_digits=5)
ingredients = models.TextField()
picture = models.ImageField(blank=True, null=True)
def __unicode__(self):
return u'Pizza: {}'.format(self.name)
def __repr__(self):
return unicode(self)
| caioherrera/django-pizza | pizzaria/pizza/models.py | Python | cc0-1.0 | 412 |
import json
from django.core.urlresolvers import reverse
from django.http import HttpResponseNotFound
from django.test import TestCase
from mock import Mock
from utils import use_GET_in
from api.views import msas, tables
class ConversionTest(TestCase):
def test_use_GET_in(self):
fn, request = Mock(), Mock()
request.GET.lists.return_value = [('param1', [0]), ('param2', [-1])]
# Dictionaries become JSON
fn.return_value = {'a': 1, 'b': 2}
response = use_GET_in(fn, request)
self.assertEqual(json.loads(response.content), {'a': 1, 'b': 2})
self.assertEqual(fn.call_args[0][0], {'param1': [0], 'param2': [-1]})
# Everything else is unaltered
fn.return_value = HttpResponseNotFound('Oh noes')
response = use_GET_in(fn, request)
self.assertEqual(response.status_code, 404)
self.assertEqual(response.content, 'Oh noes')
class ViewsTests(TestCase):
fixtures = ['agency.json', 'fake_msa.json', 'api_tracts.json', 'test_counties.json', 'fake_respondents.json']
def test_api_all_user_errors(self):
resp = self.client.get(reverse('all'), {'neLat':'42.048794',
'neLon':'-87.430698',
'swLat':'',
'swLon':'-88.225583',
'year':'2013',
'action_taken':'1,2,3,4,5',
'lender':'736-4045996'})
self.assertEqual(resp.status_code, 404)
resp = self.client.get(reverse('all'), {'neLat':'42.048794',
'neLon':'-87.430698',
'swLat':'41.597775',
'swLon':'',
'year':'2013',
'action_taken':'1,2,3,4,5',
'lender':'736-4045996'})
self.assertEqual(resp.status_code, 404)
def test_api_msas_user_errors(self):
resp = self.client.get(reverse('msas'))
self.assertEqual(resp.status_code, 404)
resp = self.client.get(reverse('msas'), {'neLat':'42.048794',
'neLon':'-87.430698',
'swLat':'',
'swLon':'-88.225583',
'year':'2013',
'action_taken':'1,2,3,4,5',
'lender':'736-4045996'})
self.assertEqual(resp.status_code, 404)
resp = self.client.get(reverse('msas'), {'neLat':'42.048794',
'neLon':'-87.430698',
'swLat':'41.597775',
'swLon':'',
'year':'2013',
'action_taken':'1,2,3,4,5',
'lender':'736-4045996'})
self.assertEqual(resp.status_code, 404)
def test_api_msas_endpoint(self):
"""should return a list of MSA ids in view"""
coords = {'neLat': '36.551569', 'neLon':'-78.961487', 'swLat':'35.824494', 'swLon':'-81.828918'}
url = reverse(msas)
resp = self.client.get(url, coords)
result_list = json.loads(resp.content)
self.assertTrue(isinstance(result_list, list))
self.assertContains(resp, '49180')
def test_api_tables_endpoint(self):
"""should return table_data json for a lender/MSA pair"""
params = {'lender': '90000451965', 'metro': '49180'}
url = reverse(tables)
resp = self.client.get(url, params)
result_dict = json.loads(resp.content)
self.assertTrue(isinstance(result_dict, dict))
keys = ['counties', 'msa']
lender_keys = ['hma_pct', 'lma_pct', 'mma_pct', 'lma', 'mma', 'hma', 'lar_total', 'peer_hma_pct', 'peer_lma_pct', 'peer_mma_pct', 'peer_lma', 'peer_mma', 'peer_hma', 'peer_lar_total', 'odds_lma', 'odds_mma', 'odds_hma']
for key in keys:
self.assertTrue(key in result_dict.keys())
for key in lender_keys:
self.assertTrue(key in result_dict['msa'].keys())
self.assertTrue(len(result_dict['msa']) > 0)
| mehtadev17/mapusaurus | mapusaurus/api/tests.py | Python | cc0-1.0 | 4,338 |
"""
The most important object in the Gratipay object model is Participant, and the
second most important one is Ccommunity. There are a few others, but those are
the most important two. Participant, in particular, is at the center of
everything on Gratipay.
"""
from contextlib import contextmanager
from postgres import Postgres
import psycopg2.extras
@contextmanager
def just_yield(obj):
yield obj
class GratipayDB(Postgres):
def get_cursor(self, cursor=None, **kw):
if cursor:
if kw:
raise ValueError('cannot change options when reusing a cursor')
return just_yield(cursor)
return super(GratipayDB, self).get_cursor(**kw)
def self_check(self):
with self.get_cursor() as cursor:
check_db(cursor)
def check_db(cursor):
"""Runs all available self checks on the given cursor.
"""
_check_balances(cursor)
_check_no_team_balances(cursor)
_check_tips(cursor)
_check_orphans(cursor)
_check_orphans_no_tips(cursor)
_check_paydays_volumes(cursor)
def _check_tips(cursor):
"""
Checks that there are no rows in tips with duplicate (tipper, tippee, mtime).
https://github.com/gratipay/gratipay.com/issues/1704
"""
conflicting_tips = cursor.one("""
SELECT count(*)
FROM
(
SELECT * FROM tips
EXCEPT
SELECT DISTINCT ON(tipper, tippee, mtime) *
FROM tips
ORDER BY tipper, tippee, mtime
) AS foo
""")
assert conflicting_tips == 0
def _check_balances(cursor):
"""
Recalculates balances for all participants from transfers and exchanges.
https://github.com/gratipay/gratipay.com/issues/1118
"""
b = cursor.all("""
select p.username, expected, balance as actual
from (
select username, sum(a) as expected
from (
select participant as username, sum(amount) as a
from exchanges
where amount > 0
and (status is null or status = 'succeeded')
group by participant
union all
select participant as username, sum(amount-fee) as a
from exchanges
where amount < 0
and (status is null or status <> 'failed')
group by participant
union all
select tipper as username, sum(-amount) as a
from transfers
group by tipper
union all
select participant as username, sum(amount) as a
from payments
where direction='to-participant'
group by participant
union all
select participant as username, sum(-amount) as a
from payments
where direction='to-team'
group by participant
union all
select tippee as username, sum(amount) as a
from transfers
group by tippee
) as foo
group by username
) as foo2
join participants p on p.username = foo2.username
where expected <> p.balance
""")
assert len(b) == 0, "conflicting balances: {}".format(b)
def _check_no_team_balances(cursor):
if cursor.one("select exists (select * from paydays where ts_end < ts_start) as running"):
# payday is running
return
teams = cursor.all("""
SELECT t.slug, balance
FROM (
SELECT team, sum(delta) as balance
FROM (
SELECT team, sum(-amount) AS delta
FROM payments
WHERE direction='to-participant'
GROUP BY team
UNION ALL
SELECT team, sum(amount) AS delta
FROM payments
WHERE direction='to-team'
GROUP BY team
) AS foo
GROUP BY team
) AS foo2
JOIN teams t ON t.slug = foo2.team
WHERE balance <> 0
""")
assert len(teams) == 0, "teams with non-zero balance: {}".format(teams)
def _check_orphans(cursor):
"""
Finds participants that
* does not have corresponding elsewhere account
* have not been absorbed by other participant
These are broken because new participants arise from elsewhere
and elsewhere is detached only by take over which makes a note
in absorptions if it removes the last elsewhere account.
Especially bad case is when also claimed_time is set because
there must have been elsewhere account attached and used to sign in.
https://github.com/gratipay/gratipay.com/issues/617
"""
orphans = cursor.all("""
select username
from participants
where not exists (select * from elsewhere where elsewhere.participant=username)
and not exists (select * from absorptions where archived_as=username)
""")
assert len(orphans) == 0, "missing elsewheres: {}".format(list(orphans))
def _check_orphans_no_tips(cursor):
"""
Finds participants
* without elsewhere account attached
* having non zero outstanding tip
This should not happen because when we remove the last elsewhere account
in take_over we also zero out all tips.
"""
orphans_with_tips = cursor.all("""
WITH valid_tips AS (SELECT * FROM current_tips WHERE amount > 0)
SELECT username
FROM (SELECT tipper AS username FROM valid_tips
UNION
SELECT tippee AS username FROM valid_tips) foo
WHERE NOT EXISTS (SELECT 1 FROM elsewhere WHERE participant=username)
""")
assert len(orphans_with_tips) == 0, orphans_with_tips
def _check_paydays_volumes(cursor):
"""
Recalculate *_volume fields in paydays table using exchanges table.
"""
if cursor.one("select exists (select * from paydays where ts_end < ts_start) as running"):
# payday is running
return
charge_volume = cursor.all("""
select * from (
select id, ts_start, charge_volume, (
select coalesce(sum(amount+fee), 0)
from exchanges
where timestamp > ts_start
and timestamp < ts_end
and amount > 0
and recorder is null
and (status is null or status <> 'failed')
) as ref
from paydays
order by id
) as foo
where charge_volume != ref
""")
assert len(charge_volume) == 0
charge_fees_volume = cursor.all("""
select * from (
select id, ts_start, charge_fees_volume, (
select coalesce(sum(fee), 0)
from exchanges
where timestamp > ts_start
and timestamp < ts_end
and amount > 0
and recorder is null
and (status is null or status <> 'failed')
) as ref
from paydays
order by id
) as foo
where charge_fees_volume != ref
""")
assert len(charge_fees_volume) == 0
ach_volume = cursor.all("""
select * from (
select id, ts_start, ach_volume, (
select coalesce(sum(amount), 0)
from exchanges
where timestamp > ts_start
and timestamp < ts_end
and amount < 0
and recorder is null
) as ref
from paydays
order by id
) as foo
where ach_volume != ref
""")
assert len(ach_volume) == 0
ach_fees_volume = cursor.all("""
select * from (
select id, ts_start, ach_fees_volume, (
select coalesce(sum(fee), 0)
from exchanges
where timestamp > ts_start
and timestamp < ts_end
and amount < 0
and recorder is null
) as ref
from paydays
order by id
) as foo
where ach_fees_volume != ref
""")
assert len(ach_fees_volume) == 0
def add_event(c, type, payload):
SQL = """
INSERT INTO events (type, payload)
VALUES (%s, %s)
"""
c.run(SQL, (type, psycopg2.extras.Json(payload)))
| mccolgst/www.gittip.com | gratipay/models/__init__.py | Python | cc0-1.0 | 8,904 |
Subsets and Splits