content
stringlengths 5
1.05M
|
---|
from sqlalchemy import BigInteger, Column, DateTime, Enum, Integer, String
from virtool.pg.base import Base
from virtool.pg.utils import SQLEnum
class AnalysisFormat(str, SQLEnum):
"""
Enumerated type for analysis file formats
"""
sam = "sam"
bam = "bam"
fasta = "fasta"
fastq = "fastq"
csv = "csv"
tsv = "tsv"
json = "json"
class AnalysisFile(Base):
"""
SQL model to store new analysis files
"""
__tablename__ = "analysis_files"
id = Column(Integer, primary_key=True)
analysis = Column(String)
description = Column(String)
format = Column(Enum(AnalysisFormat))
name = Column(String)
name_on_disk = Column(String, unique=True)
size = Column(BigInteger)
uploaded_at = Column(DateTime)
|
import click
from prog.cli import create
from prog.cli import delete
from prog.cli import set
from prog.cli import show
from prog.cli import unset
from prog import client
from prog import output
from prog import utils
import json
from argparse import Namespace
CriteriaOpRegex = "regex"
CriteriaOpNotRegex = "!regex"
def _list_waf_rule_display_format(rule):
f = "name"
if f not in rule:
rule[f] = ""
f = "id"
if f not in rule:
rule[f] = ""
f = "patterns"
if f in rule:
fo = output.key_output(f)
s = ""
for crt in rule[f]:
op = "~"
if crt["op"] == CriteriaOpRegex:
op = "~"
elif crt["op"] == CriteriaOpNotRegex:
op = "!~"
s += "%s %s context:%s; \n" % (op, crt["value"], crt["context"])
rule[fo] = s.rstrip("\n")
def _list_waf_multival_display_format(rule, f):
sens = ""
if f in rule:
for s in rule[f]:
if sens == "":
sens = s
else:
sens = sens + ", " + s
rule[f] = sens
def _list_waf_multival_group_list_display_format(rule, f):
sens = ""
if f in rule:
for s in rule[f]:
if sens == "":
sens = s
else:
sens = sens + ", " + s
rule[f] = sens
@show.group("waf")
@click.pass_obj
def show_waf(data):
"""Show waf configuration."""
@show_waf.group("rule", invoke_without_command=True)
@click.option("--page", default=5, type=click.IntRange(1), help="list page size, default=5")
@click.option('--sort_dir', type=click.Choice(['asc', 'desc']), default='asc',
help="sort direction.")
@click.pass_obj
@click.pass_context
def show_waf_rule(ctx, data, page, sort_dir):
"""Show waf rules."""
if ctx.invoked_subcommand is not None:
return
args = {'start': 0, 'limit': page}
while True:
drs = data.client.list("waf/rule", "rule", **args)
columns = ("name", "id", "patterns")
for dr in drs:
_list_waf_rule_display_format(dr)
output.list(columns, drs)
if args["limit"] > 0 and len(drs) < args["limit"]:
break
click.echo("Press <esc> to exit, press other key to continue ...")
c = utils.keypress()
if ord(c) == 27:
break
args["start"] += page
@show_waf_rule.command()
@click.argument("name")
@click.pass_obj
def detail(data, name):
"""Show waf rule detail."""
rentry = data.client.show("waf/rule", "rule", name)
if not rentry:
return
fdr = "sensors"
if fdr not in rentry:
rentry[fdr] = ""
else:
_list_waf_multival_display_format(rentry, fdr)
click.echo("Used by sensor(s): %s" % rentry[fdr])
columns = ("name", "id", "patterns")
for r in rentry["rules"]:
_list_waf_rule_display_format(r)
output.list(columns, rentry["rules"])
@show_waf.group("sensor", invoke_without_command=True)
@click.option("--page", default=5, type=click.IntRange(1), help="list page size, default=5")
@click.option('--sort_dir', type=click.Choice(['asc', 'desc']), default='asc',
help="sort direction.")
@click.pass_obj
@click.pass_context
def show_waf_sensor(ctx, data, page, sort_dir):
"""Show waf sensors."""
if ctx.invoked_subcommand is not None:
return
args = {'start': 0, 'limit': page}
while True:
drs = data.client.list("waf/sensor", "sensor", **args)
for dr in drs:
click.echo("Sensor: %s" % (dr["name"]))
gr = "groups"
if gr not in dr:
dr[gr] = ""
else:
_list_waf_multival_group_list_display_format(dr, gr)
click.echo("Used by group(s):%s" % (dr[gr]))
gr = "comment"
if gr not in dr:
dr[gr] = ""
click.echo("Comment:\"%s\"" % (dr[gr]))
click.echo("Type: %s" % (client.CfgTypeDisplay[dr["cfg_type"]]))
gr = "predefine"
if gr not in dr:
dr[gr] = False
if dr[gr]:
click.echo("Predefined:True")
else:
click.echo("Predefined:False")
columns = ("name", "patterns")
fdr = "rules"
if fdr not in dr:
dr[fdr] = ""
click.echo("%s" % (dr[fdr]))
else:
for dre in dr[fdr]:
_list_waf_rule_display_format(dre)
output.list(columns, dr["rules"])
if args["limit"] > 0 and len(drs) < args["limit"]:
break
click.echo("Press <esc> to exit, press other key to continue ...")
c = utils.keypress()
if ord(c) == 27:
break
args["start"] += page
@show_waf_sensor.command()
@click.argument("name")
@click.option("--page", default=5, type=click.IntRange(1), help="list page size, default=5")
@click.option('--sort_dir', type=click.Choice(['asc', 'desc']), default='asc',
help="sort direction.")
@click.pass_obj
def detail(data, page, sort_dir, name):
"""Show waf sensor detail."""
dr = data.client.show("waf/sensor", "sensor", name)
if not dr:
return
gr = "groups"
if gr not in dr:
dr[gr] = ""
else:
_list_waf_multival_group_list_display_format(dr, gr)
click.echo("Used by group(s):%s" % (dr[gr]))
gr = "comment"
if gr not in dr:
dr[gr] = ""
click.echo("Comment:\"%s\"" % (dr[gr]))
click.echo("Type: %s" % (client.CfgTypeDisplay[dr["cfg_type"]]))
gr = "predefine"
if gr not in dr:
dr[gr] = False
if dr[gr]:
click.echo("Predefined:True")
else:
click.echo("Predefined:False")
fdr = "rules"
if fdr not in dr:
dr[fdr] = ""
click.echo("%s" % (dr[fdr]))
else:
for r in dr["rules"]:
_list_waf_rule_display_format(r)
# columns = ("name", "id", "pattern")
columns = ("name", "patterns")
output.list(columns, dr["rules"])
# create
def _add_waf_criterion(key, value, context):
k = key
v = value
op = CriteriaOpRegex
ctxt = context
# Empty value is not allowed.
if len(v) > 1:
if v[0] == '~':
op = CriteriaOpRegex
v = v[1:]
elif len(v) > 2 and v[0] == '!' and v[1] == '~':
op = CriteriaOpNotRegex
v = v[2:]
else:
return None
else:
return None
return {"key": k, "value": v, "op": op, "context": ctxt}
def _add_waf_criteria(pct, key, value, context):
e = _add_waf_criterion(key, value, context)
if not e:
click.echo("Error: Invalid input of --%s %s" % (key, value))
return False
pct.append(e)
return True
@create.group("waf")
@click.pass_obj
def create_waf(data):
"""Create waf object. """
@create_waf.group("sensor")
@click.argument('name')
@click.option("--comment", default="", help="Sensor comment")
@click.pass_obj
def create_waf_sensor(data, name, comment):
"""Create waf sensor."""
data.id_or_name = name
data.comment = comment
@create_waf_sensor.command("rule")
@click.argument('name')
@click.argument('pattern')
@click.option("--context", default="packet", type=click.Choice(['url', 'header', 'body', 'packet']),
help="Set pattern match context, eg. HTTP URL, HEADER , BODY or PACKET")
@click.pass_obj
def create_waf_sensor_rule(data, name, pattern, context):
"""Create waf sensor with rule
For PATTERN, use regex: ~'value', empty string pattern is not allowed.
"""
pct = []
if not _add_waf_criteria(pct, "pattern", pattern, context):
return
if len(pct) == 0:
click.echo("Error: Must create waf rule with pattern.")
return
rule = {"name": name, "patterns": pct}
cfg = {"name": data.id_or_name, "rules": [rule], "comment": data.comment}
data.client.create("waf/sensor", {"config": cfg})
# delete
@delete.group("waf")
@click.pass_obj
def delete_waf(data):
"""Delete waf object. """
@delete_waf.command("sensor")
@click.argument('name')
@click.pass_obj
def delete_waf_sensor(data, name):
"""Delete waf sensor."""
data.client.delete("waf/sensor", name)
# set
@set.group("waf")
@click.pass_obj
def set_waf(data):
"""Set waf configuration. """
@set_waf.group("sensor")
@click.argument('name')
@click.option("--comment", default="", help="Sensor comment")
@click.pass_obj
def set_waf_sensor(data, name, comment):
"""Set waf sensor configuration."""
data.id_or_name = name
data.comment = comment
@set_waf_sensor.command("rule")
@click.argument('name')
@click.argument('pattern')
@click.option("--context", default="packet", type=click.Choice(['url', 'header', 'body', 'packet']),
help="Set pattern match context, eg. HTTP URL, HEADER , BODY or PACKET")
@click.pass_obj
def set_waf_sensor_rule(data, name, pattern, context):
"""Add waf rule to sensor
For PATTERN, use regex: ~'value', empty string pattern is not allowed.
"""
pct = []
if not _add_waf_criteria(pct, "pattern", pattern, context):
return
if len(pct) == 0:
click.echo("Error: Must create waf rule with pattern.")
return
rule = {"name": name, "patterns": pct}
cfg = {"name": data.id_or_name, "change": [rule], "comment": data.comment}
data.client.config("waf/sensor", data.id_or_name, {"config": cfg})
@unset.group("waf")
@click.pass_obj
def unset_waf(data):
"""Unset waf configuration. """
@unset_waf.group("sensor")
@click.argument('name')
@click.pass_obj
def unset_waf_sensor(data, name):
"""Set waf sensor configuration."""
data.id_or_name = name
@unset_waf_sensor.command("rule")
@click.argument('name')
@click.pass_obj
def unset_waf_sensor_rule(data, name):
"""Delete rule from sensor. """
cfg = {"name": data.id_or_name, "delete": [{"name": name}]}
data.client.config("waf/sensor", data.id_or_name, {"config": cfg})
|
import sys
import tkinter
class EmbeddedConsole:
def __init__(self, window):
self.frame = tkinter.Frame(window)
self.entry = tkinter.Entry(self.frame)
self.entry.pack()
self.doIt = tkinter.Button(self.frame, text="Execute", command=self.on_enter)
self.doIt.pack()
self.output = tkinter.Text(self.frame)
self.output.pack()
sys.stdout = self
def on_enter(self):
print(eval(self.entry.get()))
def write(self, txt):
self.output.insert('end', str(txt))
|
from swift.ipvl.inspect_custom import whoami, whosdaddy
pass # (WIS) print __name__
class ContainerQuotaMiddleware(object):
"""docstring for ContainerQuotaMiddleware"""
def __init__(self, app):
pass # (WIS) print "%s %s (%s -> %s)" % (__name__, self.__class__.__name__, whosdaddy(), whoami())
self.app = app
def __call__(self, env, start_response):
pass # (WIS) print "%s %s\n" % (self.__class__.__name__, env)
start_response('200 OK', [('Content-Type', 'text/plain')])
return self.__class__.__name__ + " -> " + self.app(env, start_response)
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
pass # (WIS) print "%s (%s -> %s)" % (__name__, whosdaddy(), whoami())
conf = global_conf.copy()
conf.update(local_conf)
# register_swift_info('container_quotas')
def container_quota_filter(app):
pass # (WIS) print "%s (%s -> %s)" % (__name__, whosdaddy(), whoami())
return ContainerQuotaMiddleware(app)
return container_quota_filter
|
#!/usr/bin/python -tt
"""Ansible CallBackModule to log output."""
# pylint: disable=W0212
from datetime import datetime
from ansible.plugins.callback import CallbackBase
try:
from spotmax import spotmax
except ImportError:
pass
class PlayLogger(object):
"""Store log output in a single object."""
def __init__(self):
self.log = ''
self.runtime = 0
def append(self, log_line):
"""append to log"""
self.log += log_line+"\n\n"
class CallbackModule(CallbackBase):
"""Format Ansible output."""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stored'
CALLBACK_NAME = 'database'
def __init__(self):
super(CallbackModule, self).__init__()
self.logger = PlayLogger()
self.start_time = datetime.now()
def v2_runner_on_failed(self, result, ignore_errors=False):
"""Failed host."""
delegated_vars = result._result.get('_ansible_delegated_vars', None)
# Catch an exception
# This may never be called because default handler deletes
# the exception, since Ansible thinks it knows better
if 'exception' in result._result:
# Extract the error message and log it
error = result._result['exception'].strip().split('\n')[-1]
self.logger.append(error)
# Remove the exception from the result so it's not shown every time
del result._result['exception']
# Else log the reason for the failure
if result._task.loop and 'results' in result._result:
self._process_items(result) # item_on_failed, item_on_skipped, item_on_ok
else:
if delegated_vars:
self.logger.append("fatal: [%s -> %s]: FAILED! => %s" % (
result._host.get_name(),
delegated_vars['ansible_host'],
self._dump_results(result._result)))
else:
self.logger.append("fatal: [%s]: FAILED! => %s" % (result._host.get_name(),
self._dump_results(result._result)))
def v2_runner_on_ok(self, result):
"""OK host."""
self._clean_results(result._result, result._task.action)
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if result._task.action == 'include':
return
elif result._result.get('changed', False):
if delegated_vars:
msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "changed: [%s]" % result._host.get_name()
else:
if delegated_vars:
msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "ok: [%s]" % result._host.get_name()
if 'ansible_facts' not in result._result:
msg += '\n%s' % self._dump_results(result._result)
if result._task.loop and 'results' in result._result:
self._process_items(result) # item_on_failed, item_on_skipped, item_on_ok
else:
self.logger.append(msg)
def v2_runner_on_skipped(self, result):
"""Skipped host."""
if result._task.loop and 'results' in result._result:
self._process_items(result) # item_on_failed, item_on_skipped, item_on_ok
else:
msg = "skipping: [%s]" % result._host.get_name()
self.logger.append(msg)
def v2_runner_on_unreachable(self, result):
"""Unreachable host."""
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if delegated_vars:
self.logger.append("fatal: [%s -> %s]: UNREACHABLE! => %s" % (
result._host.get_name(),
delegated_vars['ansible_host'],
self._dump_results(result._result)))
else:
self.logger.append("fatal: [%s]: UNREACHABLE! => %s" % (result._host.get_name(),
self._dump_results(result._result)))
def v2_runner_on_no_hosts(self, task):
self.logger.append("skipping: no hosts matched")
def v2_playbook_on_task_start(self, task, is_conditional):
self.logger.append("TASK [%s]" % task.get_name().strip())
def v2_playbook_on_play_start(self, play):
name = play.get_name().strip()
if not name:
msg = "PLAY"
else:
msg = "PLAY [%s]" % name
self.logger.append(msg)
def v2_playbook_item_on_ok(self, result):
"""OK item."""
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if result._task.action == 'include':
return
elif result._result.get('changed', False):
if delegated_vars:
msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "changed: [%s]" % result._host.get_name()
else:
if delegated_vars:
msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "ok: [%s]" % result._host.get_name()
msg += " => (item=%s)" % (result._result['item'])
self.logger.append(msg)
def v2_playbook_item_on_failed(self, result):
"""Failed item."""
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if 'exception' in result._result:
# Extract the error message and log it
error = result._result['exception'].strip().split('\n')[-1]
self.logger.append(error)
# Remove the exception from the result so it's not shown every time
del result._result['exception']
if delegated_vars:
self.logger.append("failed: [%s -> %s] => (item=%s) => %s" % (
result._host.get_name(),
delegated_vars['ansible_host'],
result._result['item'],
self._dump_results(result._result)))
else:
self.logger.append("failed: [%s] => (item=%s) => %s" % (result._host.get_name(),
result._result['item'],
self._dump_results(result._result)))
def v2_playbook_item_on_skipped(self, result):
"""Skipped item."""
msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), result._result['item'])
self.logger.append(msg)
def v2_playbook_on_stats(self, stats):
"""Recap."""
run_time = datetime.now() - self.start_time
self.logger.runtime = run_time.seconds # returns an int, unlike run_time.total_seconds()
hosts = sorted(stats.processed.keys())
for host in hosts:
host_status = stats.summarize(host)
msg = "PLAY RECAP [%s] : %s %s %s %s %s" % (
host,
"ok: %s" % (host_status['ok']),
"changed: %s" % (host_status['changed']),
"unreachable: %s" % (host_status['unreachable']),
"skipped: %s" % (host_status['skipped']),
"failed: %s" % (host_status['failures']),
)
self.logger.append(msg)
def record_logs(self,
username,
success=False,
extra_vars=None,
playbook=None,
search_filter=None):
"""Log Ansible run.
Args:
username: string, username running playbook
success: boolean, run success
extra_vars: dict, ansible extra variables
playbook: string, playbook file name
search_filter: string, hosts for the playbook
"""
log = spotmax.SPOTLog()
# Remove password
try:
del extra_vars['password']
except KeyError:
pass
log.log(username=username,
playbook=playbook.split('/',)[-1],
search_filter=search_filter,
arguments=extra_vars,
runtime=self.logger.runtime,
success=success,
output=self.logger.log)
|
import math
import random
import re
from kivy.app import App
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.garden.iconfonts import icon
from kivy.garden.magnet import Magnet
from kivy.graphics import Color, Ellipse, Line, Rectangle, RoundedRectangle
from kivy.metrics import dp, sp
from kivy.properties import (ColorProperty, ListProperty, NumericProperty,
ObjectProperty, StringProperty)
from kivy.uix.behaviors import ButtonBehavior
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.label import Label
from kivy.uix.modalview import ModalView
from kivy.uix.textinput import TextInput
from kivy.uix.togglebutton import ToggleButton
from kivy.utils import rgba
from app.storage import Database
from customs.customs import MaterialWidget
from vendor.graph import BarPlot, LinePlot, MeshLinePlot, SmoothLinePlot
class Card(Magnet):
card_id = NumericProperty()
back = ColorProperty([0,0,1,1])
name = StringProperty()
vendor = StringProperty()
num = StringProperty()
exp = StringProperty()
balance = StringProperty()
def __init__(self, **kw):
super().__init__(**kw)
def on_vendor(self, inst, val):
if val == 'mastercard':
master = MasterCard()
self.ids.logo.add_widget(master)
elif val == 'visa':
lbl = Label(text='VISA')
lbl.font_name = 'app/assets/fonts/Roboto-BlackItalic.ttf'
self.ids.logo.add_widget(lbl)
def on_balance(self, inst, value):
self.ids.balance.text = str(value)
class Container(MaterialWidget):
primary = ColorProperty()
empty = NumericProperty(1)
def __init__(self, **kw):
super().__init__(**kw)
self.primary = rgba("#2C323C")
def draw_back(self, dtx):
with self.canvas.before:
Color(rgba=App.get_running_app().root.primary)
Rectangle(pos=self.pos, size=self.size)
class FlatView(ModalView):
def __init__(self, **kw):
super().__init__(**kw)
self.background_color = [0,0,0,0]
self.background_normal = ''
class Graph(Magnet):
def __init__(self, **kw):
super().__init__(**kw)
def draw_bar(self, data: list):
norm = self.normalise(data)
for i, d in enumerate(norm):
bar = Bar()
bar.background_color = [0,1,0,1]
bar.text = str(data[i])
bar.size_hint_y = d
# bar.color = [0,0,0,0]
self.ids.graph.add_widget(bar)
def normalise(self, x: list):
"""Given a list of values, normalise The
values to a range of 0 - 1
Parameters
----------
x : list
A list of values to normalise
Returns
-------
list
A normalised list
"""
norm = list()
for xi in x:
zi = (xi-min(x)) / (max(x) - min(x))
norm.append(zi)
return norm
class OutlineInput(TextInput):
def __init__(self, **kw):
super().__init__(**kw)
class DateInput(OutlineInput):
def __init__(self, **kw):
super().__init__(**kw)
pat = re.compile('[^0-9]')
def insert_text(self, substring, from_undo=False):
_text = ''.join([self.text, substring])
if len(_text) > 5:
return
pat = self.pat
s = re.sub(pat, '', substring)
if len(self.text) >= 2:
x = self.text[:2]
y = self.text[2:]
if int(x) > 12 or int(x) < 1:
x = '12'
self.text = '/'.join([x, y])
self.text = self.text.replace('//','/')
return super().insert_text(s, from_undo=from_undo)
class CardInput(OutlineInput):
def __init__(self, **kw):
super().__init__(**kw)
pat = re.compile('[^0-9]')
def insert_text(self, substring, from_undo=False):
_text = ''.join([self.text, substring])
if len(_text) > 4:
return
pat = self.pat
s = re.sub(pat, '', substring)
return super().insert_text(s, from_undo=from_undo)
class NewDeposit(Container):
def __init__(self, **kw):
super().__init__(**kw)
class NewCard(Container):
def __init__(self, **kw):
super().__init__(**kw)
class NewExpense(Container):
def __init__(self, **kw):
super().__init__(**kw)
class Bar(Label):
def __init__(self, **kw):
super().__init__(**kw)
self.color = [0,0,0,0]
class ExpenseProgress(BoxLayout):
max = NumericProperty()
name = StringProperty()
value = NumericProperty()
def __init__(self, **kw):
super().__init__(**kw)
class MasterCard(Label):
def __init__(self, **kw):
super().__init__(**kw)
class FloatingButton(FloatLayout):
callback = ObjectProperty()
def __init__(self, **kw):
super().__init__(**kw)
def trigger_release(self):
self.callback()
class ExpenseChip(Container):
expense_id = NumericProperty()
icon = StringProperty()
name = StringProperty()
card = StringProperty()
amount = StringProperty()
def __init__(self, **kw):
super().__init__(**kw)
class FlatToggle(ToggleButton):
def __init__(self, **kw):
super().__init__(**kw)
def on_state(self, inst, value):
if value == 'normal':
self.color = rgba('#ffffff')
else:
self.color = App.get_running_app().root.success
class MainWindow(BoxLayout):
primary = ColorProperty()
card_colors = ListProperty()
back = ColorProperty()
success = ColorProperty()
danger = ColorProperty()
secondary = ColorProperty()
tertiary = ColorProperty()
def __init__(self, **kw):
super().__init__(**kw)
self.db = Database()
Clock.schedule_once(self.init_view, .1)
# Clock.schedule_once(lambda x: self.change_theme('light'), 5)
# Clock.schedule_once(self.draw_card, 2)
def init_view(self, dtx):
self.primary = rgba("#2C323C")
self.back = rgba("#282C34")
self.danger = rgba('#ff003f')
self.secondary = rgba('#ffffff')
self.tertiary = rgba('#212232')
self.success = rgba('#05a95c')
self.card_colors = [
rgba("#ff621b"),
rgba("#ff5722"),
rgba("#c51162"),
rgba("#f50057"),
rgba("#ff31432b")]
# self.primary = rgba("#242c3f")
# expenses - (card, card_num, expense, cost, paid, recurring, day_paid)
self.expenses = self.db.get_expenses()
self.all_expenses = dict()
self.sample = (0,'sample card | 0123', 'mastercard', '01/94', '2048.00')
self.cards = [list(x) for x in self.db.get_cards()]
if len(self.cards) < 1:
self.cards.append(self.sample)
self.total_balance = round(sum([float(x[4]) for x in self.cards]), 3)
bal, c = str(self.total_balance).rsplit('.',1)
bal = '%s[size=%s]%s[/size]'%(bal,int(sp(10)), c)
self.ids.total_bal.text = bal
for e in self.cards:
key = e[1].upper()
self.all_expenses[key] = list()
for e in self.expenses:
key = ' | '.join([e[1].upper(), e[2]])
self.all_expenses[key].append(e)
data = [float(x[4]) for x in self.expenses]
graph = Graph()
if len(data) > 2:
graph.draw_bar(data)
self.ids.graph_wrapper.add_widget(graph)
self.data_error(data, graph)
tcard = self.cards[0][1].upper()
card_total = sum([float(x[4]) for x in self.all_expenses[tcard.upper()] if x[8] == 'True'])
if card_total < 1:
card_total = '0.00'
total, cents = str(card_total).rsplit('.')
cents = '[size=%s].%s[/size]'%(int(sp(12)), cents)
self.ids.card_expense_total.text = ''.join(['$ ', total, cents])
for ex in self.all_expenses[tcard]:
ec = ExpenseChip()
ec.icon = icon('zmdi-store')
ec.card = ' | '.join([ex[1], ex[2]])
ec.name = ex[3]
ec.amount = ex[4]
ec.size_hint_y = None
ec.height = sp(42)
ec.ids.amount.color = rgba('#00ff00') if ex[8] == 'False' else self.danger
self.ids.cards_expenses.add_widget(ec)
self.data_error(self.all_expenses[tcard], self.ids.cards_expenses)
for ex in self.expenses:
ec = ExpenseChip()
ec.icon = icon('zmdi-store') if ex[8] == 'True' else icon('zmdi-balance')
ec.card = ' | '.join([ex[1], ex[2]])
ec.name = ex[3]
ec.amount = ex[4]
ec.size_hint_y = None
ec.height = sp(42)
ec.ids.amount.color = rgba('#00ff00') if ex[8] == 'False' else self.danger
self.ids.overview_history.add_widget(ec)
self.data_error(self.all_expenses[tcard], self.ids.overview_history)
for card in self.cards:
_card, num = card[1].rsplit(' | ',1)
c = Card()
c.card_id = card[0]
c.name = _card.upper()
c.vendor = card[2]
c.num = num
c.exp = card[3]
sc = Card()
sc.card_id = card[0]
sc.name = _card
sc.vendor = card[2]
sc.num = num
sc.exp = card[3]
balance = card[4] if '.' in card[4] else '.'.join([card[4], '00'])
bal, cents = balance.rsplit('.',1)
balance = '%s[size=%s].%s[/size]'%(bal, int(sp(14)), cents)
c.balance = balance
sc.balance = balance
self.ids.cards_wrapper.add_widget(c)
self.ids.stats_cards.add_widget(sc)
self.data_error(self.cards, self.ids.stats_cards)
self.data_error(self.cards, self.ids.cards_wrapper)
# ToDo - Allow rotation
# if Window.width < Window.height or len(self.ids.cards_wrapper.children) > 0: #landscape mode
# break
# else:
# self.ids.cards_wrapper.spacing = sp(15)
#init tabs
self.ids.default_tab.state = 'down'
tcard = self.cards[0][1].upper()
self.update_stats(self.all_expenses[tcard])
# Add Recurring expenses
# expenses - (card, card_num, expense, cost, paid, recurring, date_paid)
recurring = [x for x in self.all_expenses[tcard] if x[6] == 'True']
# print(ipoints)
for r in recurring:
ep = ExpenseProgress()
ep.max = float(r[4])
ep.value = float(r[5])
ep.name = r[3].lower()
ep.size_hint_y = None
ep.height = sp(18)
self.ids.recurring_wrapper.add_widget(ep)
self.data_error(recurring, self.ids.recurring_wrapper)
def data_error(self, expenses, w):
lbl = Label(text='no data to show', shorten=True)
lbl.shorten_from='right'
lbl.color=rgba('#ffffff42')
lbl.text_size = lbl.size
lbl.valign = 'middle'
lbl.halign = 'center'
if len(expenses) == 0:
print(w)
w.add_widget(lbl)
def update_stats(self, expenses):
ipoints = self.get_points(expenses)
try:
ymax = max(ipoints, key=lambda x: x[1])[1]
# xmax = max(ipoints, key=lambda x: x[0])
except ValueError:
ymax = 100
self.ids.stats_graph.y_ticks_major = math.ceil(ymax/9)
self.ids.stats_graph.ymax = ymax
self.plot = LinePlot(color=self.danger)
self.plot.line_width = 1.4
self.plot.points = ipoints
self.ids.stats_graph.add_plot(self.plot)
def get_points(self, expenses):
points = [(int(x[7].split('/',1)[0]), float(x[4])) for x in expenses]
points.insert(0, (0,0))
points = sorted(points, key=lambda x: x[0])
ipoints = points
dups = list()
for x, p in enumerate(points):
for y, i in enumerate(points):
if x == y:
continue
else:
if p[0] == i[0]:
dups.append((p,i))
ipoints.remove(p)
ipoints.remove(i)
for d in dups:
val = d[0][1] + d[1][1]
ipoints.append((d[0][0], val))
ipoints = sorted(ipoints, key=lambda x: x[0])
return ipoints
def view_stats(self, carousel, next=0):
try:
if next == 0:
cname = carousel.next_slide.name
cnum = carousel.next_slide.num
carousel.load_next()
elif next == 1:
cname = carousel.previous_slide.name
cnum = carousel.previous_slide.num
carousel.load_previous()
else:
cname = carousel.current_slide.name
cnum = carousel.current_slide.num
tcard = ' | '.join([cname, cnum])
# sample - ('paypal prepaid', '2519', 'GoDaddy', '3.65', '3.65', 'True', '27')
self.ids.stats_graph.remove_plot(self.plot)
self.update_stats(self.all_expenses[tcard.upper()])
self.ids.recurring_wrapper.clear_widgets()
recurring = [x for x in self.all_expenses[tcard.upper()] if x[6] == 'True']
# print(ipoints)
for r in recurring:
ep = ExpenseProgress()
ep.max = float(r[4])
ep.value = float(r[5])
ep.name = r[3].lower()
ep.size_hint_y = None
ep.height = sp(18)
self.ids.recurring_wrapper.add_widget(ep)
self.data_error(recurring, self.ids.recurring_wrapper)
except AttributeError:
pass
# self.graph.remove_plot(self.plot)
def view_expenses(self, carousel, next=0):
try:
if next == 0:
cname = carousel.next_slide.name
cnum = carousel.next_slide.num
carousel.load_next()
elif next == 1:
cname = carousel.previous_slide.name
cnum = carousel.previous_slide.num
carousel.load_previous()
else:
cname = carousel.current_slide.name
cnum = carousel.current_slide.num
tcard = ' | '.join([cname, cnum])
card_total = sum([float(x[5]) for x in self.all_expenses[tcard.upper()] if x[8] == 'True'])
card_total = round(card_total,2)
if card_total == 0:
card_total = '0.00'
total, cents = str(card_total).rsplit('.')
cents = '[size=%s].%s[/size]'%(int(sp(12)), cents)
self.ids.card_expense_total.text = ''.join(['$ ', total, cents])
self.ids.cards_expenses.clear_widgets()
for ex in self.all_expenses[tcard.upper()]:
ec = ExpenseChip()
ec.icon = icon('zmdi-store')
ec.card = ' | '.join([ex[1], ex[2]])
ec.name = ex[3]
ec.amount = ex[5]
ec.size_hint_y = None
ec.height = sp(42)
ec.ids.amount.color = rgba('#00ff00') if ex[8] == 'False' else self.danger
self.ids.cards_expenses.add_widget(ec)
except AttributeError:
pass
def add_new(self, modal, add='card'):
modal.dismiss()
fv = ModalView(size_hint=[.8, .5], padding=sp(10))
if add == 'card':
nc = NewCard()
nc.ids.submit.bind(on_release=lambda x: self.new_card(fv, nc))
fv.add_widget(nc)
elif add == 'deposit':
nd = NewDeposit()
nd.ids.submit.bind(on_release=lambda x: self.new_deposit(fv, nd))
fv.add_widget(nd)
else:
ne = NewExpense()
ne.ids.submit.bind(on_release=lambda x: self.new_expense(fv, ne))
fv.add_widget(ne)
fv.open()
def new_card(self, modal, obj):
modal.dismiss()
name = obj.ids.name.text.upper()
num = obj.ids.num.text
vendor = obj.ids.vendor.text
exp = obj.ids.date.text
balance = obj.ids.balance.text
if len(name) > 3 and len(num) == 4 and vendor is not 'card vendor' and len(exp) > 3 and len(balance) > 1:
balance = balance if '.' in balance else '.'.join([balance, '00'])
_balance = balance
bal, cents = balance.rsplit('.',1)
balance = '%s[size=%s].%s[/size]'%(bal, int(sp(14)), cents)
m, y = exp.rsplit('/',1)
y = y[2:]
exp = '/'.join([m,y])
# ToDo: Add Card To DB
# ('sample card', 'mastercard', '0123', '01/94', '2048.00')
key = ' | '.join([name.upper(), num])
new_card = [key, vendor, exp, _balance]
cid = self.db.add_card(new_card)
if not cid == -1:
new_card.insert(0, cid)
self.cards.append(new_card)
self.all_expenses[key] = list()
card = Card()
card.card_id = cid
card.balance = balance
card.name = name
card.exp = exp
card.vendor = vendor
card.num = num
scard = Card()
scard.card_id = cid
scard.balance = balance
scard.name = name
scard.exp = exp
scard.vendor = vendor
scard.num = num
cw = self.ids.cards_wrapper
sc = self.ids.stats_cards
cw.add_widget(card)
sc.add_widget(scard)
for c in cw.slides:
if c.card_id == 0:
cw.remove_widget(c)
for c in sc.slides:
if c.card_id == 0:
sc.remove_widget(c)
self.total_balance = round(sum([float(x[4]) for x in self.cards]), 3)
bal, c = str(self.total_balance).rsplit('.',1)
bal = '%s[size=%s]%s[/size]'%(bal,int(sp(10)), c)
self.ids.total_bal.text = bal
def new_deposit(self, modal, obj):
modal.dismiss()
amount = obj.ids.amount.text
name = obj.ids.name.text
card = obj.ids.card.text
date = obj.ids.date.text
if len(amount) > 0 and len(name) > 2 and card is not 'Deposit Card':
amount = amount if '.' in amount else '.'.join([amount, '00'])
bal, cents = amount.rsplit('.',1)
# expenses - (card, card_num, expense, cost, paid, recurring, day_paid)
c, n = card.rsplit(' | ',1)
new_expense = [c, n, name, amount, amount, 'False', date, 'False']
eid = self.db.add_expense(new_expense)
if not eid == -1:
for c in self.cards:
if c[1] == card:
prev_bal = c[4]
_balance = round(float(prev_bal) + float(amount), 3)
c[4] = str(round(_balance,3))
break
for c in self.ids.cards_wrapper.slides:
name = ' | '.join([c.name, c.num])
if name == card:
_b,_c = str(_balance).rsplit('.',1)
_bal = "%s[size=%s].%s[/size]"%(_b,int(sp(10)),_c)
c.balance = _bal
self.db.update_card((_balance, c.card_id))
new_expense.insert(0, eid)
self.expenses.append(new_expense)
self.all_expenses[card].append(new_expense)
amount = '%s[size=%s].%s[/size]'%(bal, int(sp(14)), cents)
self.total_balance = round(sum([float(x[4]) for x in self.cards]), 3)
bal, c = str(self.total_balance).rsplit('.',1)
bal = '%s[size=%s]%s[/size]'%(bal,int(sp(10)), c)
self.ids.total_bal.text = bal
ec = ExpenseChip()
ec.icon = icon('zmdi-balance')
ec.card = card
ec.name = name
ec.amount = amount
ec.size_hint_y = None
ec.height = sp(42)
ec.ids.amount.color = rgba('#00ff00')
idx = len(self.ids.overview_history.children)
self.ids.overview_history.add_widget(ec, idx)
self.view_stats(self.ids.stats_cards, -1)
self.view_expenses(self.ids.cards_wrapper, -1)
def new_expense(self, modal, obj):
modal.dismiss()
name = obj.ids.name.text
cost = obj.ids.cost.text
day = obj.ids.day.text
card = obj.ids.card.text
recurring = obj.ids.recurring.state
paid = obj.ids.paid.state
if len(cost) > 0 and len(name) > 2 and card is not 'Expense Card':
cost = cost if '.' in cost else '.'.join([cost, '00'])
_cost = cost
bal, cents = cost.rsplit('.',1)
cost = '%s[size=%s].%s[/size]'%(bal, int(sp(14)), cents)
# expenses - (card, card_num, expense, cost, paid, recurring, day_paid)
c, n = card.rsplit(' | ',1)
if recurring == 'down':
sub = 'True'
else:
sub = 'False'
new_expense = [c, n, name, _cost, _cost, sub, day, 'True']
eid = self.db.add_expense(new_expense)
if eid is not -1:
for c in self.cards:
if c[1] == card:
prev_bal = c[4]
_balance = round(float(prev_bal) - float(_cost), 3)
c[4] = str(round(_balance,3))
break
for c in self.ids.cards_wrapper.slides:
name = ' | '.join([c.name, c.num])
if name == card:
_b,_c = str(_balance).rsplit('.',1)
_bal = "%s[size=%s].%s[/size]"%(_b,int(sp(10)),_c)
c.balance = _bal
self.db.update_card((_balance, c.card_id))
new_expense.insert(0, eid)
self.expenses.append(new_expense)
self.all_expenses[card.upper()].append(new_expense)
self.total_balance = round(sum([float(x[4]) for x in self.cards]), 3)
bal, c = str(self.total_balance).rsplit('.',1)
bal = '%s[size=%s]%s[/size]'%(bal,int(sp(10)), c)
self.ids.total_bal.text = bal
if recurring == 'down':
ep = ExpenseProgress()
ep.size_hint_y = None
ep.height = sp(18)
ep.name = name
ep.max = float(_cost)
if paid == 'down':
ep.value = float(_cost)
else:
ep.value = 0
self.ids.recurring_wrapper.add_widget(ep)
ec = ExpenseChip()
ec.icon = icon('zmdi-store')
ec.card = card
ec.name = name
ec.amount = cost
ec.size_hint_y = None
ec.height = sp(42)
idx = len(self.ids.overview_history.children)
self.ids.overview_history.add_widget(ec, idx)
self.view_stats(self.ids.stats_cards, -1)
self.view_expenses(self.ids.cards_wrapper, -1)
def add_expense(self):
fv = ModalView(size_hint=[.6, .3], padding=sp(10))
btn_card = Button(text='Add New Card', background_color=rgba('#2073B5'), background_normal='')
btn_card.bind(on_release=lambda x: self.add_new(fv, 'card'))
btn_expense = Button(text='Add New Expense', background_color=rgba('#2073B5'), background_normal='')
btn_expense.bind(on_release=lambda x: self.add_new(fv, 'expense'))
btn_deposit = Button(text='Add New Deposit', background_color=rgba('#2073B5'), background_normal='')
btn_deposit.bind(on_release=lambda x: self.add_new(fv, 'deposit'))
ctn = Container(orientation='vertical')
ctn.spacing = sp(15)
ctn.elevation = 4
ctn.add_widget(btn_card)
ctn.add_widget(btn_expense)
ctn.add_widget(btn_deposit)
fv.add_widget(ctn)
fv.open()
def draw_card(self, dtx):
c = self.ids.cards_wrapper.children[0]
with c.canvas.before:
pos_x = range(int(c.pos[0]), int(c.size[0]))
pos_y = range(int(c.pos[1]), int(c.size[1]))
size_x = range(int(sp(10)), int(c.size[0]))
size_y = range(int(sp(10)), int(c.size[1]))
Color(rgba=rgba('#ffffff10'))
Ellipse(
pos=[random.choice(pos_x), random.choice(pos_y)],
size=[random.choice(size_x), random.choice(size_y)]
)
def on_back(self, *args):
with self.canvas.before:
Color(rgba=self.back)
Rectangle(
pos=self.pos,
size=self.size
)
def change_theme(self, theme='dark'):
if theme == 'dark':
self.back = rgba("#262d4f")
self.primary = rgba("#242c3f")
self.secondary = rgba('#ffffff')
self.tertiary = rgba('#212232')
self.success = rgba('#05a95c')
else:
self.primary = rgba("#ffffff")
self.secondary = rgba('#242c3f')
self.tertiary = rgba('#f4f4f4')
self.success = rgba('#05a95cb4')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
http://www.myengineeringworld.net/2013/10/Excel-thermochemical-NASA-polynomials-Burcat.html
"""
#
# http://www.wsp.ru/en/..%5Cdownload%5Cdocs%5Carticles%5Cen%5CTENG221.pdf
# http://www.wsp.ru/en/test/wspgGCGS.asp?gas_specification=air
#import sys
#sys.path.append("/home/tux/PycharmProjects/m2py")
from math import (log, exp)
from m2py import units
from m2py.utils import resource_path
from m2py import utils
#import shelve
import json
#data = shelve.open(resource_path("thermo_nasa_poly7.dat"))
data = utils.load_json_file((resource_path("data/gas_nasa_poly7.json")))
nasapoly7coefs = data['coefficients']
molarmass = data['molarmass']
substances = data['substances']
# Unity conversion factors
cal2J = units.factor("cal/J")
J2cal = units.factor("J/cal")
R_ = 1.987204118 # cal/(mol.K)
p0 = 0.1 # MPa
R = 8.31451*1e-3 # KJ/(mol.K)
Tr = 1000 # K
air_table = [-3.62171168554944, 13.1878685737717, -11.61002657829, 6.1800155085671, -1.97996023924462,
0.352570060264284, -0.026853107411115, 1.26880226994069, 4.69260613574416e-1,
-3.09569582156729e-1, 7.2153490824886e-2, -8.07371553566351e-3, 3.61550066177588e-4]
def Rgas(gas):
"""
Return the specific gas constant
"""
return R/molarmass[gas]
def cp_t(T, a):
tau = T / Tr # Reduced temperature
C = 0
for i in range(6):
C += a[i] * tau ** i
_tau = 1 / tau
for i in range(7, 12):
C += a[i] * _tau ** (i - 6)
cp = C * R
return cp
def h_t(T, hint, a):
tau = T / Tr
C = 0
for i in range(6):
C += a[i] / (i + 1) * tau ** i
C = a[7] * log(tau)
_tau = 1 / tau
for i in range(8, 12):
C = a[i] / (7 - i) * _tau ** (i - 7)
h = Tr*R*C #+ hint
return h
def get_substances():
return list(nasapoly7coefs.keys())
def get_molarmass(substance):
"""Return molar mass in kg/mol"""
return molarmass[substance]
def __cp_nasap_p7__(T, substance):
"""
Calculates Heat Capacity cp - [cal/(mol.K)]
from Nasa 7-coefficient Polynomials.
Cp/R = a1 + a2 T + a3 T^2 + a4 T^3 + a5 T^4
:param T: Temperature in K
:return: cp Heat Capacity/ Cp heat in [cal/(mol.K)]
"""
a = nasapoly7coefs[substance]
C = 0
for i in range(5):
C = C + a[i]*T**i
return round(C*R_, 2)
def cp_nasa_p7_mol(T, substance):
"""
Calculates Heat Capacity cp - [J/(mol.K)]
from Nasa 7-coefficient Polynomials.
Cp/R = a1 + a2 T + a3 T^2 + a4 T^3 + a5 T^4
:param T: Temperature in C
:return: cp Heat Capacity/ Cp heat in [ J/(mol.K)]
"""
T = units.c2k(T)
cp = __cp_nasap_p7__(T, substance)
cp = cal2J*cp
return cp
def cp_nasa_p7(T, substance):
"""
Calculates Heat Capacity cp - [J/(kg.K)]
from Nasa 7-coefficient Polynomials.
Cp/R = a1 + a2 T + a3 T^2 + a4 T^3 + a5 T^4
:param T: Temperature in C
:return: cp Heat Capacity/ Cp heat in [ J/(kg.K)]
"""
m = molarmass[substance]
T = units.c2k(T)
cp = __cp_nasap_p7__(T, substance)
cp = cal2J*cp/m
return cp
def s_nasa_p7(T, substance):
"""
S/R = a1 lnT + a2 T + a3 T^2 /2 + a4 T^3 /3 + a5 T^4 /4 + a7
"""
a = nasapoly7coefs[substance]
C = a[0]*log(T) + a[1]*T + a[2]*T**2 /2 + a[3]*T**3 /3 + a[4]*T**4 /4 + a[6]
C = C*R_
return round(C, 2)
def h_nasa_p7(T, substance ):
"""
:param T: Temperature in K
:param substance: Substance Name
"""
a = nasapoly7coefs[substance]
C = a[0] + a[1]*T/2 + a[2]*T**2 /3 + a[3]*T**3 /4 + a[4]*T**4 /5 + a[5]/T
H = R_*T*C
return H
"""
3.03399249E+00 2.17691804E-03-1.64072518E-07-9.70419870E-11 1.68200992E-14 2
-3.00042971E+04 4.96677010E+00 4.19864056E+00-2.03643410E-03 6.52040211E-06 3
-5.48797062E-09 1.77197817E-12-3.02937267E+04-8.49032208E-01
"""
|
"""Support for LightsControl logic"""
import logging
import voluptuous as vol
from homeassistant.const import (
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP)
# TODO: check if some CONF constants can be imported from homeassistant.const import CONF_XXX
import homeassistant.helpers.config_validation as cv
from homeassistant.exceptions import TemplateError
from homeassistant.loader import bind_hass
_LOGGER = logging.getLogger(__name__)
LIGHTS_CONTROL = None
DOMAIN = 'lights_control'
# VERSION = '1.0.0'
# Custom configuration names
CONF_SWITCH_MAP = "switch_map"
CONF_SENSOR_MAP = "sensor_map"
CONF_SENSOR_DEFAULT_SWITCH_OFF = "sensor_default_switch_off"
CONF_POWER_SAVE = "power_save"
CONF_ON_STATE = "on_state"
CONF_OFF_STATE = "off_state"
CONF_NOTIFY_TURN_OFF = "notify_turn_off"
CONF_AUTOMATION_MAP = "automation_map"
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
# TODO: deep schema
vol.Optional(CONF_SWITCH_MAP, default={}): dict,
vol.Optional(CONF_SENSOR_MAP, default={}): dict,
vol.Optional(CONF_SENSOR_DEFAULT_SWITCH_OFF, default=5): int,
vol.Optional(CONF_POWER_SAVE, default={}): dict,
vol.Optional(CONF_ON_STATE, default={}): dict,
vol.Optional(CONF_OFF_STATE, default={}): dict,
vol.Optional(CONF_NOTIFY_TURN_OFF, default={}): dict,
vol.Optional(CONF_AUTOMATION_MAP, default={}): dict,
}),
}, extra=vol.ALLOW_EXTRA) # TODO: what ALLOW_EXTRA is about?
ATTR_NAME = "name"
ATTR_VALUE = "value"
ATTR_NAME_TEMPLATE = "value_template"
ATTR_VALUE_TEMPLATE = "value_template"
SERVICE_SWITCH = "switch"
SERVICE_SWITCH_SCHEMA = vol.Schema(
{
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_VALUE): cv.string,
}
)
SERVICE_SWITCH_TEMPLATE = "switch_template"
SERVICE_SWITCH_TEMPLATE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_NAME_TEMPLATE): cv.template,
vol.Required(ATTR_VALUE_TEMPLATE): cv.template,
}
)
SERVICE_SENSOR = "sensor"
SERVICE_SENSOR_SCHEMA = vol.Schema(
{
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_VALUE): cv.match_all,
}
)
SERVICE_SENSOR_TEMPLATE = "sensor_template"
SERVICE_SENSOR_TEMPLATE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_NAME_TEMPLATE): cv.template,
vol.Required(ATTR_VALUE_TEMPLATE): cv.template,
}
)
SERVICE_ON_LIGHT_CHANGE = "on_light_change"
SERVICE_ON_LIGHT_CHANGE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_VALUE): cv.match_all,
}
)
SERVICE_ON_LIGHT_CHANGE_TEMPLATE = "on_light_change_template"
SERVICE_ON_LIGHT_CHANGE_TEMPLATE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_NAME_TEMPLATE): cv.template,
vol.Required(ATTR_VALUE_TEMPLATE): cv.template,
}
)
SERVICE_WATCH_DOG = "watchdog"
SERVICE_WATCH_DOG_SCHEMA = vol.Schema(
{
}
)
SERVICE_RELOAD_GROUPS = "reload_groups"
SERVICE_RELOAD_GROUPS_SCHEMA = vol.Schema(
{
}
)
SERVICE_RESTART = "restart"
SERVICE_RESTART_SCHEMA = vol.Schema(
{
}
)
SERVICE_DUMP = "dump"
SERVICE_DUMP_SCHEMA = vol.Schema(
{
}
)
# NOTE: @bind_hass is to indicate that first argument is HASS
@bind_hass
def switch(
hass,
name,
value
):
"""React to flick of the switch"""
hass.services.call(
DOMAIN,
SERVICE_SWITCH,
{
ATTR_NAME: name,
ATTR_VALUE: value
},
)
@bind_hass
def switch_template(
hass,
name_template,
value_template
):
"""React to flick of the switch"""
hass.services.call(
DOMAIN,
SERVICE_SWITCH,
{
ATTR_NAME_TEMPLATE: name_template,
ATTR_VALUE_TEMPLATE: value_template
},
)
@bind_hass
def sensor(
hass,
name,
value
):
"""React to sensor trigger"""
hass.services.call(
DOMAIN,
SERVICE_SENSOR,
{
ATTR_NAME: name,
ATTR_VALUE: value
},
)
@bind_hass
def sensor_template(
hass,
name_template,
value_template
):
"""React to sensor trigger"""
hass.services.call(
DOMAIN,
SERVICE_SENSOR,
{
ATTR_NAME_TEMPLATE: name_template,
ATTR_VALUE_TEMPLATE: value_template
},
)
@bind_hass
def on_light_change(
hass,
name_template,
value_template
):
"""React to lights state change"""
hass.services.call(
DOMAIN,
SERVICE_ON_LIGHT_CHANGE,
{
ATTR_NAME_TEMPLATE: name_template,
ATTR_VALUE_TEMPLATE: value_template
},
)
@bind_hass
def watchdog(
hass
):
"""Do LightsControl automatics"""
hass.services.call(
DOMAIN,
SERVICE_WATCH_DOG,
{
},
)
@bind_hass
def reload_groups(
hass
):
"""Reload groups"""
hass.services.call(
DOMAIN,
SERVICE_RELOAD_GROUPS,
{
},
)
@bind_hass
def restart(
hass
):
"""Reload groups"""
hass.services.call(
DOMAIN,
SERVICE_RESTART,
{
},
)
@bind_hass
def dump(
hass
):
"""Dump"""
hass.services.call(
DOMAIN,
SERVICE_DUMP,
{
},
)
def setup(hass, config):
"""Setup LightsControl component."""
from .lights_control_core.lights_control import LightsControl
switch_map = config[DOMAIN].get(CONF_SWITCH_MAP, {})
sensor_map = config[DOMAIN].get(CONF_SENSOR_MAP, {})
sensor_default_switch_off = config[DOMAIN].get(CONF_SENSOR_DEFAULT_SWITCH_OFF, 5)
power_save = config[DOMAIN].get(CONF_POWER_SAVE, {})
on_state = config[DOMAIN].get(CONF_ON_STATE, {})
off_state = config[DOMAIN].get(CONF_OFF_STATE, {})
notify_turn_off = config[DOMAIN].get(CONF_NOTIFY_TURN_OFF, {})
automation_map = config[DOMAIN].get(CONF_AUTOMATION_MAP, {})
h = {'hass': hass, 'logger': _LOGGER, 'data': {}}
global LIGHTS_CONTROL
try:
LIGHTS_CONTROL = LightsControl(h, False,
on_state, off_state, power_save, notify_turn_off, switch_map, sensor_map,
sensor_default_switch_off, automation_map)
except Exception as e:
_LOGGER.error("LightsControl failed on creation due to exception {}".format(e))
return False
def stop_lights_control(event):
"""Stop the LightsControl service."""
global LIGHTS_CONTROL
LIGHTS_CONTROL = None
def start_lights_control(event):
"""Start the LightsControl service."""
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_lights_control)
def _render_name_value(template, data, render_name_data, render_value_data):
name = data.get(ATTR_NAME_TEMPLATE)
value = data.get(ATTR_VALUE_TEMPLATE)
try:
name.hass = hass
name = name.render(render_name_data)
except TemplateError as ex:
_LOGGER.error(
"Could not render %s's name_template: %s",
template,
ex,
)
name = None
if not isinstance(name, str):
_LOGGER.error(
"Rendered from %s name is not a string!",
template,
)
name = None
try:
value.hass = hass
value = value.render(render_value_data)
except TemplateError as ex:
_LOGGER.error(
"Could not render %s's value_template: %s",
template,
ex,
)
name = None
return name, value
def switch_service(call):
"""Handle calls to the switch service."""
global LIGHTS_CONTROL
if LIGHTS_CONTROL is not None:
name = call.data.get(ATTR_NAME)
value = call.data.get(ATTR_VALUE)
if name is None:
name, value = _render_name_value("switch_template", call.data, {}, {})
if name is not None:
LIGHTS_CONTROL.switch(name, value)
else:
_LOGGER.warning("{}: failed to do switch call since LightsControl is not running".format(DOMAIN))
def sensor_service(call):
"""Handle calls to the sensor service."""
global LIGHTS_CONTROL
if LIGHTS_CONTROL is not None:
name = call.data.get(ATTR_NAME)
value = call.data.get(ATTR_VALUE)
if name is None:
name, value = _render_name_value("sensor_template", call.data, {}, {})
if name is not None:
LIGHTS_CONTROL.sensor(name, value)
else:
_LOGGER.warning("{}: failed to do sensor call since LightsControl is not running".format(DOMAIN))
def on_light_change_service(call):
"""Handle lights state change"""
global LIGHTS_CONTROL
if LIGHTS_CONTROL is not None:
name = call.data.get(ATTR_NAME, None)
value = call.data.get(ATTR_VALUE, None)
if name is None:
name, value = _render_name_value("on_light_change_template", call.data, {}, {})
if name is not None:
LIGHTS_CONTROL.on_light_change(name, value)
else:
_LOGGER.warning("{}: failed to do on_light_change call since LightsControl is not running".format(DOMAIN))
def watchdog_service(call):
"""Handle calls to the watchdog service."""
global LIGHTS_CONTROL
if LIGHTS_CONTROL is not None:
LIGHTS_CONTROL.watchdog()
else:
_LOGGER.warning("{}: failed to do watchdog call since LightsControl is not running".format(DOMAIN))
def reload_groups_service(call):
"""Handle calls to the reload_groups service."""
global LIGHTS_CONTROL
if LIGHTS_CONTROL is not None:
LIGHTS_CONTROL.reload_groups()
else:
_LOGGER.warning("{}: failed to do reload_groups call since LightsControl is not running".format(DOMAIN))
def restart_service(call):
"""Handle calls to the restart service."""
global LIGHTS_CONTROL
if LIGHTS_CONTROL is not None:
LIGHTS_CONTROL.restart()
else:
_LOGGER.warning("{}: failed to do restart call since LightsControl is not running".format(DOMAIN))
def dump_service(call):
"""Handle calls to the dump service."""
global LIGHTS_CONTROL
if LIGHTS_CONTROL is not None:
LIGHTS_CONTROL.dump()
else:
_LOGGER.warning("{}: failed to do dump call since LightsControl is not running".format(DOMAIN))
services = [
(SERVICE_SWITCH, switch_service, SERVICE_SWITCH_SCHEMA),
(SERVICE_SENSOR, sensor_service, SERVICE_SENSOR_SCHEMA),
(SERVICE_ON_LIGHT_CHANGE, on_light_change_service, SERVICE_ON_LIGHT_CHANGE_SCHEMA),
(SERVICE_SWITCH_TEMPLATE, switch_service, SERVICE_SWITCH_TEMPLATE_SCHEMA),
(SERVICE_SENSOR_TEMPLATE, sensor_service, SERVICE_SENSOR_TEMPLATE_SCHEMA),
(SERVICE_ON_LIGHT_CHANGE_TEMPLATE, on_light_change_service, SERVICE_ON_LIGHT_CHANGE_TEMPLATE_SCHEMA),
(SERVICE_WATCH_DOG, watchdog_service, SERVICE_WATCH_DOG_SCHEMA),
(SERVICE_RELOAD_GROUPS, reload_groups_service, SERVICE_RELOAD_GROUPS_SCHEMA),
(SERVICE_RESTART, restart_service, SERVICE_RESTART_SCHEMA),
(SERVICE_DUMP, dump_service, SERVICE_DUMP_SCHEMA),
]
for s in services:
hass.services.register(
DOMAIN,
s[0],
s[1],
schema=s[2],
)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_lights_control)
return True
|
"""
Cameron, M. J., Tran, D. T., Abboud, J., Newton,
E. K., Rashidian, H., & Dupuis, J. Y. (2018).
Prospective external validation of three preoperative
risk scores for prediction of new onset atrial
fibrillation after cardiac surgery. Anesthesia &
Analgesia, 126(1), 33-38.
"""
#Author: Eduardo Valverde
import numpy as np
from afib import BaseRisk
POAF_PTS = [1,2,3,1,1,1,1,1,1]
def poaf(age, copd, egfr, emrgncy, pibp, lvef, vs):
"""Calculates the POAF score.
Args:
age: age of the person in years.
copd: t/f has chronic obstructive pulmonary disease.
egfr: estimated glomerular filtration rate.
emrgncy: t/f is emergency.
pibp: t/f has had preoperative intra-aortic balloon pump.
lvef: left ventricular ejection fraction (x/100).
vs: t/f has had valve surgery.
Returns:
the POAF score.
Raises:
TypeError: if bools not t/f, if ints/floats not a number.
"""
arr = np.array([60 <= age <= 69,
70 <= age <= 79,
age >= 80,
copd,
egfr < 15,
emrgncy,
pibp,
lvef < (30/100),
vs], dtype=int)
return arr.dot(POAF_PTS)
class PoafC(BaseRisk):
'''
'''
def score(self, row):
return poaf(row["age"],
row["copd"],
row["egfr"],
row["emrgncy"],
row["pibp"],
row["lvef"],
row["vs"])
|
import inspect
from functools import wraps
import typing
from .compat import check_async
from .request import Request
from .response import Response
from .routing import HTTPRoute
from .views import View, Handler, get_handlers
HookFunction = typing.Callable[
[Request, Response, dict], typing.Awaitable[None]
]
HookCollection = typing.Dict[HTTPRoute, HookFunction]
BEFORE = "before"
AFTER = "after"
class Hooks:
"""Hooks manager."""
__slots__ = ()
def before(self, hook: HookFunction, *args, **kwargs):
"""Register a before hook on a handler.
# Parameters
hook (callable): a hook function.
"""
return self._hook_decorator(BEFORE, hook, *args, **kwargs)
def after(self, hook: HookFunction, *args, **kwargs):
"""Register an after hook on a handler.
# Parameters
hook (callable): a hook function.
"""
return self._hook_decorator(AFTER, hook, *args, **kwargs)
@staticmethod
def _prepare(hook, *args, **kwargs) -> HookFunction:
class_based = not inspect.isfunction(hook)
if class_based:
assert hasattr(
hook, "__call__"
), "class-based hooks must implement __call__()"
check_target = hook.__call__ if class_based else hook
name = (
hook.__class__.__name__ + ".__call__"
if class_based
else hook.__name__
)
check_async(check_target, reason=f"hook '{name}' must be asynchronous")
# Enclose args and kwargs
async def hook_func(req: Request, res: Response, params: dict):
await hook(req, res, params, *args, **kwargs)
return hook_func
def _hook_decorator(
self, hook_type: str, hook: HookFunction, *args, **kwargs
):
hook = self._prepare(hook, *args, **kwargs)
def attach_hook(view):
if inspect.isclass(view):
# Recursively apply hook to all view handlers.
for method, handler in get_handlers(view).items():
setattr(view, method, attach_hook(handler))
return view
return _with_hook(hook_type, hook, view)
return attach_hook
def _with_hook(hook_type: str, func: HookFunction, handler: Handler):
async def call_hook(args, kw):
if len(args) == 2:
req, res = args
else:
# method that has `self` as a first parameter
req, res = args[1:3]
assert isinstance(req, Request)
assert isinstance(res, Response)
await func(req, res, kw)
if hook_type == BEFORE:
@wraps(handler)
async def with_before_hook(*args, **kwargs):
await call_hook(args, kwargs)
await handler(*args, **kwargs)
return with_before_hook
assert hook_type == AFTER
@wraps(handler)
async def with_after_hook(*args, **kwargs):
await handler(*args, **kwargs)
await call_hook(args, kwargs)
return with_after_hook
# Pre-bind to module
_HOOKS = Hooks()
before = _HOOKS.before
after = _HOOKS.after
|
# import the necessary packages
from imutils import paths
import argparse
import cv2
import numpy
from scipy import misc
def variance_of_laplacian(image):
# compute the Laplacian of the image and then return the focus
# measure, which is simply the variance of the Laplacian
return cv2.Laplacian(image, cv2.CV_64F).var()
def var_of_sobel(image):
return cv2.Sobel(image, cv2.CV_64F, 1, 0).var()
def double_sobel(image):
gx = cv2.Sobel(image, cv2.CV_32F, 1, 0)
gy = cv2.Sobel(image, cv2.CV_32F, 0, 1)
#dnorm = numpy.sqrt(gx**2 + gy**2)
dm = cv2.magnitude(gx, gy)
#return numpy.average(dnorm)
return numpy.sum(dm)
def maxdouble_sobel(image):
gx = cv2.Sobel(image, cv2.CV_32F, 1, 0)
gy = cv2.Sobel(image, cv2.CV_32F, 0, 1)
#dnorm = numpy.sqrt(gx**2 + gy**2)
dm = cv2.magnitude(gx, gy)
return numpy.average(dm)
def variance_of_laplacian2(image):
# compute the Laplacian of the image and then return the focus
eimage = cv2.equalizeHist(image);
return cv2.Laplacian(eimage, cv2.CV_64F).var()
def max_of_laplacian(gray_image):
return numpy.max(cv2.convertScaleAbs(cv2.Laplacian(gray_image,3)))
def fft_evaluate(img_gry):
rows, cols = img_gry.shape
crow, ccol = rows/2, cols/2
f = numpy.fft.fft2(img_gry)
fshift = numpy.fft.fftshift(f)
fshift[crow-75:crow+75, ccol-75:ccol+75] = 0
f_ishift = numpy.fft.ifftshift(fshift)
img_fft = numpy.fft.ifft2(f_ishift)
img_fft = 20*numpy.log(numpy.abs(img_fft))
result = numpy.mean(img_fft)
return result
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--images", required=True,
help="path to input directory of images")
args = vars(ap.parse_args())
# loop over the input images
for i, imagePath in enumerate(sorted(paths.list_images(args["images"]))):
# load the image, convert it to grayscale, and compute the
# focus measure of the image using the Variance of Laplacian
# method
image = cv2.imread(imagePath)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
eqhist = cv2.equalizeHist(gray)
metrics = {}
width, height, _ = image.shape
cropped = image[50:height-50,50:width-50,:]
grayc = cv2.cvtColor(cropped, cv2.COLOR_RGB2GRAY)
lapvar = variance_of_laplacian(gray)
metrics['lapvar']=lapvar
lapvar2 = variance_of_laplacian2(gray)
metrics['lapvar2']=lapvar2
sobvar = var_of_sobel(gray)
metrics['sobvar']=sobvar
doubsob = double_sobel(gray)
metrics['doubsob']=doubsob
doubsob2 = double_sobel(image)
metrics['doubsob2']=doubsob2
eqdoubsob = double_sobel(eqhist)
metrics['hist eq doubsob']=eqdoubsob
maxdoubsob = maxdouble_sobel(image)
metrics['maxdoubsob']=doubsob2
lapmax = max_of_laplacian(gray)
metrics['lapmax']=lapmax
fft1 = fft_evaluate(gray)
metrics['fft1']=fft1
cv2.putText(image, '{t:.3f}'.format(t=doubsob2/1000), (10, height-10),
cv2.FONT_HERSHEY_DUPLEX, 0.5, (255, 0, 0), 1, 8, False)
misc.imsave('blur_class/{}.png'.format(i), image)
metricsc = {}
lapvar = variance_of_laplacian(grayc)
metricsc['lapvar']=lapvar
lapvar2 = variance_of_laplacian2(grayc)
metricsc['lapvar2']=lapvar2
sobvar = var_of_sobel(grayc)
metricsc['sobvar']=sobvar
doubsob = double_sobel(grayc)
metricsc['doubsob']=doubsob
doubsob2 = double_sobel(cropped)
metricsc['doubsob2']=doubsob2
eqdoubsob = double_sobel(eqhist)
metricsc['hist eq doubsob']=eqdoubsob
maxdoubsob = maxdouble_sobel(cropped)
metricsc['maxdoubsob']=doubsob2
lapmax = max_of_laplacian(grayc)
metricsc['lapmax']=lapmax
fft1 = fft_evaluate(grayc)
metricsc['fft1']=fft1
cv2.putText(cropped, '{t:.3f}'.format(t=doubsob2/1000), (10, height-10),
cv2.FONT_HERSHEY_DUPLEX, 0.5, (255, 0, 0), 1, 8, False)
misc.imsave('cropped/{}_{}.png'.format(i, doubsob2/1000), cropped)
# show the image
#cv2.putText(image, "{}: {:.2f}".format(text, fm), (10, 30),
# cv2.FONT_HERSHEY_SIMPLEX, 0.8, (0, 0, 255), 3)
#cv2.imshow("Image", image)
#key = cv2.waitKey(0)
print imagePath, '->', 'Not Blurry' if doubsob > 1000000.0 else 'Blurry'
print metrics
print metricsc, '\n' |
from rest_framework.generics import ListAPIView,CreateAPIView
from SavedPostAPP.models import ModelSavedPost
from .serializers import SerializerSavedUserPost,SerializerCreateSavePost
from PostAPP.models import ModelPost
from django.shortcuts import get_object_or_404
class SavedUserPostListAPIView(ListAPIView):
# Kullanıcının kayıtlı postlarını listelediğimiz view
serializer_class = SerializerSavedUserPost
def get_queryset(self):
return ModelSavedPost.objects.filter(user=self.request.user)
class CreateSavedPostAPIView(CreateAPIView):
# Bir postu kaydedilenlere atmak için çalışan view
queryset = ModelSavedPost.objects.all()
serializer_class = SerializerCreateSavePost
def perform_create(self, serializer):
post = get_object_or_404(ModelPost,unique_id=self.kwargs.get("unique_id"))
serializer.save(user=self.request.user,post=post)
|
"""
Functions and objects describing optical components.
"""
from arch.block import Block
from arch.connectivity import Connectivity
from arch.models.model import Linear, LinearGroupDelay
from sympy import Matrix, sqrt, exp, I, eye
import arch.port as port
import numpy as np
class Beamsplitter(Block):
reference_prefix = "BS"
def define(self, R=0.5):
self.add_port(name='in0', kind=port.kind.optical, direction=port.direction.inp)
self.add_port(name='in1', kind=port.kind.optical, direction=port.direction.inp)
self.add_port(name='out0', kind=port.kind.optical, direction=port.direction.out)
self.add_port(name='out1', kind=port.kind.optical, direction=port.direction.out)
r = self.add_port(name='R', kind=port.kind.real, direction=port.direction.inp,
default=R)
print(r)
M = Matrix([
[sqrt(r), I*sqrt(1 - r)],
[I*sqrt(1 - r), sqrt(r)] ])
self.add_model(Linear('simple R '+self.name, block=self, unitary_matrix=M))
class PhaseShifter(Block):
reference_prefix = "P"
def define(self, phi=None):
self.add_port(name='inp', kind=port.kind.optical, direction=port.direction.inp)
self.add_port(name='out', kind=port.kind.optical, direction=port.direction.out)
p = self.add_port(name='phi', kind=port.kind.real, direction=port.direction.inp,
default=phi)
M = Matrix([[exp(I*p)]])
self.add_model(Linear('simple phase '+self.name, block=self, unitary_matrix=M))
self.add_model(LinearGroupDelay('group delay phase '+self.name, block=self,
unitary_matrix=M, delay=1))
class MachZehnder(Block):
reference_prefix = "MZ"
def define(self, R0=1/2, R1=1/2):
bs0 = Beamsplitter(R=R0)
bs1 = Beamsplitter(R=R1)
ps = PhaseShifter()
con = Connectivity([
(bs0.out0, ps.inp),
(ps.out, bs1.in0),
(bs0.out1, bs1.in1) ])
self.use_port(name='in0', original=bs0.in0)
self.use_port(name='in1', original=bs0.in1)
self.use_port(name='out0', original=bs1.out0)
self.use_port(name='out1', original=bs1.out1)
self.use_port(name='phi', original=ps.phi)
self.use_port(name='R0', original=bs0.R)
self.use_port(name='R1', original=bs1.R)
self.add_model(
Linear.compound(
'compound '+self.name,
models=[bs0.model, ps.model, bs1.model],
connectivity=con))
class RingResonator(Block):
reference_prefix = "RR"
def define(self, R=None, phi=None):
bs = Beamsplitter(R=R)
ps = PhaseShifter()
con = Connectivity([
(bs.out0, ps.inp),
(ps.out, bs.in0) ])
self.use_port(name='in', original=bs.in1)
self.use_port(name='out', original=bs.out1)
self.use_port(name='phi', original=ps.phi)
self.use_port(name='R', original=bs.R)
raise NotImplementedError("TODO")
class Interferometer(Block):
"""
Class to calculate the evolution of a quantum state through
an interferometer described by some unitary.
For the moment must be instantiated with a unitary matrix.
"""
reference_prefix = "IF"
def define(self, unitary):
self.add_port(name='out0', kind=port.kind.photonic, direction=port.direction.out)
self.add_port(name='out1', kind=port.kind.photonic, direction=port.direction.out)
self.add_port(name='in1', kind=port.kind.photonic, direction=port.direction.inp)
input_state=self.add_port(name='in0', kind=port.kind.photonic, direction=port.direction.inp) #Create new "quantum" port type?
#U=self.add_port(name='unitary', kind=port.kind.real, direction=port.direction.inp)
self.add_model(Linear('simple R '+self.name, block=self, unitary_matrix=unitary))
|
import numpy as np
from munkres import munkres
def test_big(k):
a = np.empty((k,k))
for i in range(k):
for j in range(k):
a[i,j] = (i+1)*(j+1)
b = munkres(a)
print k, b
if __name__ == '__main__':
for i in range(256):
test_big(i+1)
|
import graphene
from django.db import models
from django.conf import settings
from modelcluster.fields import ParentalKey
from modelcluster.contrib.taggit import ClusterTaggableManager
from taggit.models import TaggedItemBase
from wagtail.core.models import Page, Orderable
from wagtail.core.fields import StreamField
from wagtail.contrib.settings.models import BaseSetting, register_setting
from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel, InlinePanel
from wagtail.snippets.models import register_snippet
from wagtail.snippets.edit_handlers import SnippetChooserPanel
from wagtail.images.edit_handlers import ImageChooserPanel
from wagtail.documents.edit_handlers import DocumentChooserPanel
from wagtail_headless_preview.models import HeadlessPreviewMixin
from wagtailmedia.edit_handlers import MediaChooserPanel
from grapple.helpers import (
register_query_field,
register_paginated_query_field,
register_singular_query_field,
)
from grapple.utils import resolve_paginated_queryset
from grapple.models import (
GraphQLString,
GraphQLSnippet,
GraphQLStreamfield,
GraphQLForeignKey,
GraphQLImage,
GraphQLDocument,
GraphQLMedia,
GraphQLCollection,
GraphQLPage,
GraphQLTag,
)
from grapple.middleware import IsAnonymousMiddleware
from home.blocks import StreamFieldBlock
document_model_string = getattr(
settings, "WAGTAILDOCS_DOCUMENT_MODEL", "wagtaildocs.Document"
)
@register_singular_query_field("simpleModel")
class SimpleModel(models.Model):
pass
class HomePage(Page):
pass
class AuthorPage(Page):
name = models.CharField(max_length=255)
content_panels = Page.content_panels + [FieldPanel("name")]
graphql_fields = [GraphQLString("name")]
class BlogPageTag(TaggedItemBase):
content_object = ParentalKey(
"BlogPage", related_name="tagged_items", on_delete=models.CASCADE
)
@register_singular_query_field("first_post", middleware=[IsAnonymousMiddleware])
@register_paginated_query_field("blog_page", middleware=[IsAnonymousMiddleware])
@register_query_field("post", middleware=[IsAnonymousMiddleware])
class BlogPage(HeadlessPreviewMixin, Page):
date = models.DateField("Post date")
advert = models.ForeignKey(
"home.Advert",
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name="+",
)
hero_image = models.ForeignKey(
"images.CustomImage",
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name="+",
)
book_file = models.ForeignKey(
document_model_string,
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name="+",
)
featured_media = models.ForeignKey(
"wagtailmedia.Media",
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name="+",
)
author = models.ForeignKey(
AuthorPage, null=True, blank=True, on_delete=models.SET_NULL, related_name="+"
)
body = StreamField(StreamFieldBlock())
tags = ClusterTaggableManager(through=BlogPageTag, blank=True)
content_panels = Page.content_panels + [
FieldPanel("date"),
ImageChooserPanel("hero_image"),
StreamFieldPanel("body"),
FieldPanel("tags"),
InlinePanel("related_links", label="Related links"),
InlinePanel("authors", label="Authors"),
FieldPanel("author"),
SnippetChooserPanel("advert"),
DocumentChooserPanel("book_file"),
MediaChooserPanel("featured_media"),
]
@property
def copy(self):
return self
def paginated_authors(self, info, **kwargs):
return resolve_paginated_queryset(self.authors, info, **kwargs)
graphql_fields = [
GraphQLString("date", required=True),
GraphQLStreamfield("body"),
GraphQLTag("tags"),
GraphQLCollection(
GraphQLForeignKey,
"related_links",
"home.blogpagerelatedlink",
required=True,
item_required=True,
),
GraphQLCollection(GraphQLString, "related_urls", source="related_links.url"),
GraphQLCollection(GraphQLString, "authors", source="authors.person.name"),
GraphQLCollection(
GraphQLForeignKey,
"paginated_authors",
"home.Author",
is_paginated_queryset=True,
),
GraphQLSnippet("advert", "home.Advert"),
GraphQLImage("hero_image"),
GraphQLDocument("book_file"),
GraphQLMedia("featured_media"),
GraphQLForeignKey("copy", "home.BlogPage"),
GraphQLPage("author"),
]
class BlogPageRelatedLink(Orderable):
page = ParentalKey(BlogPage, on_delete=models.CASCADE, related_name="related_links")
name = models.CharField(max_length=255)
url = models.URLField()
panels = [FieldPanel("name"), FieldPanel("url")]
graphql_fields = [GraphQLString("name"), GraphQLString("url")]
@register_snippet
class Person(models.Model):
name = models.CharField(max_length=255)
job = models.CharField(max_length=255)
def __str__(self):
return self.name
panels = [FieldPanel("name"), FieldPanel("job")]
graphql_fields = [GraphQLString("name"), GraphQLString("job")]
class Author(Orderable):
page = ParentalKey(BlogPage, on_delete=models.CASCADE, related_name="authors")
role = models.CharField(max_length=255)
person = models.ForeignKey(
Person, null=True, blank=True, on_delete=models.SET_NULL, related_name="+"
)
panels = [FieldPanel("role"), SnippetChooserPanel("person")]
graphql_fields = [GraphQLString("role"), GraphQLForeignKey("person", Person)]
@register_snippet
@register_query_field(
"advert",
"adverts",
{"url": graphene.String()},
required=True,
plural_required=True,
plural_item_required=True,
)
class Advert(models.Model):
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
panels = [FieldPanel("url"), FieldPanel("text")]
graphql_fields = [GraphQLString("url"), GraphQLString("text")]
def __str__(self):
return self.text
@register_setting
class SocialMediaSettings(BaseSetting):
facebook = models.URLField(help_text="Your Facebook page URL")
instagram = models.CharField(
max_length=255, help_text="Your Instagram username, without the @"
)
trip_advisor = models.URLField(help_text="Your Trip Advisor page URL")
youtube = models.URLField(help_text="Your YouTube channel or user account URL")
graphql_fields = [
GraphQLString("facebook"),
GraphQLString("instagram"),
GraphQLString("trip_advisor"),
GraphQLString("youtube"),
]
|
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from google.oauth2 import id_token
from google.auth.transport import requests
from flashsale.misc.provider.ProviderBase import ProviderBase
from flashsale.misc.lib.exceptions import OAuthAuthenticationError
# refer to https://developers.google.com/identity/sign-in/web/backend-auth
class Google(ProviderBase):
def verify_id_token(self, token, email):
try:
# Specify the CLIENT_ID of the app that accesses the backend:
id_info = id_token.verify_oauth2_token(token, requests.Request(), settings.FLASHSALE_GOOGLE_CLIENT_ID)
if id_info['iss'] not in ['accounts.google.com', 'https://accounts.google.com']:
raise OAuthAuthenticationError(_('Google OAuth Authentication Error.. iss error'))
if id_info['email'] != email:
raise OAuthAuthenticationError(_('Google OAuth Authentication Error.. different email address'))
except OAuthAuthenticationError as inst:
raise OAuthAuthenticationError(inst.detail)
except Exception as e:
raise OAuthAuthenticationError(_('Google OAuth Authentication Error. ' + e.__str__()))
user_info = {'email': id_info['email'], 'name': id_info.get('name', None), 'picture': id_info.get('picture', None)}
return user_info
|
# Copyright 2022, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for learning rate scheduling."""
import collections
from typing import Callable
import tensorflow as tf
from tensorflow_federated.python.common_libs import py_typecheck
from tensorflow_federated.python.learning.optimizers import optimizer as optimizer_base
_LEARNING_RATE_KEY = optimizer_base.LEARNING_RATE_KEY
def schedule_learning_rate(
optimizer: optimizer_base.Optimizer,
schedule_fn: Callable[[int], float]) -> optimizer_base.Optimizer:
"""Returns an optimizer with scheduled learning rate.
The returned optimizer will use a learning rate of `schedule_fn(i)` for the
`i`-th invocation of its `next` method, indexing from 0.
Args:
optimizer: A `tff.learning.optimizers.Optimizer` which uses a learning rate.
schedule_fn: A callable mapping integer round number to a floating point
learning rate. To be invoked in the cotext of a `tff.tf_computation`, thus
should support a `tf.Tensor` input.
Returns:
A `tff.learning.optimizers.Optimizer`.
Raises:
KeyError: If the provided `optimizer`'s state is not a dictionary with
learning rate stored under the `tff.learning.optimizers.LEARNING_RATE_KEY`
key.
"""
return _ScheduledLROptimizer(optimizer, schedule_fn)
class _ScheduledLROptimizer(optimizer_base.Optimizer):
"""Optimizer with scheduled learning rate."""
def __init__(self, optimizer: optimizer_base.Optimizer,
schedule_fn: Callable[[int], float]):
py_typecheck.check_type(optimizer, optimizer_base.Optimizer)
py_typecheck.check_callable(schedule_fn)
self._optimizer = optimizer
self._schedule_fn = schedule_fn
def initialize(self, specs):
optimizer_state = self._optimizer.initialize(specs)
_check_lr_exists(optimizer_state)
round_num = tf.constant(0, tf.int32)
optimizer_state[_LEARNING_RATE_KEY] = self._schedule_fn(round_num)
return collections.OrderedDict(
round_num=round_num, optimizer=optimizer_state)
def next(self, state, weights, gradients):
optimizer_state, weights = self._optimizer.next(state['optimizer'], weights,
gradients)
round_num = state['round_num'] + 1
optimizer_state[_LEARNING_RATE_KEY] = self._schedule_fn(round_num)
new_state = collections.OrderedDict(
round_num=round_num, optimizer=optimizer_state)
return new_state, weights
def _check_lr_exists(optimizer_state):
if _LEARNING_RATE_KEY not in optimizer_state:
raise KeyError(
'Optimizer to be scheduled must have learning rate under '
'`tff.learning.optimizer.LEARNING_RATE_KEY` key in its state. Found '
f'optimizer state: {optimizer_state}')
|
import socket
from random import randrange
from common import *
def rand_point():
return Point(x=randrange(100), y=randrange(100))
logger = Logger(also_print = True)
parser = Parser(logger)
sock = socket.create_connection(SERVER_ADDR)
f = sock.makefile("rwb", 0)
vec = Vector(p1=rand_point(), p2=rand_point())
logger.log_and_write(f, vec)
pt = parser.parse(f)
assert vec.p1.x < pt.x < vec.p2.x or vec.p1.x > pt.x > vec.p2.x
assert vec.p1.y < pt.y < vec.p2.y or vec.p1.y > pt.y > vec.p2.y
rect = Rectangle(points=[Point(x=1, y=1),
Point(x=1, y=5),
Point(x=5, y=1),
Point(x=5, y=5)])
logger.log_and_write(f, rect)
pt = parser.parse(f)
assert pt.x == pt.y == 1
points = [rand_point() for i in range(10)]
logger.log_and_write(f, PointGroup(count=10, points=points))
rect = parser.parse(f)
assert rect.code == Rectangle.code.always
sock.close()
|
def index(request):
request.render_template("home/shop.html") |
import os
from root import *
from preprocessing.data_utils import *
import numpy as np
import pandas as pd
import seaborn as sns
import scipy
from datetime import datetime, timedelta
from sklearn.preprocessing import LabelEncoder
pd.set_option('display.max_columns', 100)
data = pd.read_pickle(root+"/data/interim/data_xgboost.pkl")
'''Create lag variables: volume at previous time points'''
n = 28
lags = list(range(1, (n+1)))
for lag in lags:
data[f'Vol_t-{lag}'] = data.sort_values('Date').groupby\
(['ProductCode', 'UnitPrice'])['Volume'].shift(lag)
'''Remove first n dates for which not all lag variables are available'''
data = data[data['Date'] > (data.Date.min() + timedelta(days=max(lags)))]
'''Replace ProductCode with ordinal encodings'''
# TODO: Replace ordinal encoding with learned feature embeddings with
# invoice and ProductCode
data['ProductCode_ordinal'] = LabelEncoder().\
fit_transform(data['ProductCode'])
data = data.drop('ProductCode', axis=1)
'''Sort on date and ProductCode'''
data = data.sort_values(['Date', 'ProductCode_ordinal'])
'''Fill missing values with zero'''
data = data.fillna(value=0)
'''Create Weekday variable'''
data['Weekday'] = data['Date'].dt.dayofweek
'''Train-test split'''
nr_dates = data['Date'].nunique()
train_proportion = int(nr_dates*0.7)
train_cutoff_date = data.Date.min() + \
timedelta(days=(max(lags)+train_proportion))
train = data[data['Date'] <= train_cutoff_date]
test = data[data['Date'] > train_cutoff_date]
'''Input for future predictions'''
future = data[data['Date'] == (data.Date.max())]
'''Separate the target values from the predictors'''
train_y = train['Volume']
train_x = train.drop('Volume', axis=1)
del train
test_y = test['Volume']
test_x = test.drop('Volume', axis=1)
del test
train_all_y = data['Volume']
train_all_x = data.drop('Volume', axis=1)
del data
'''Replace Date with a month and day ordinal variables'''
train_x = month_and_day_from_date(train_x)
test_x = month_and_day_from_date(test_x)
train_all_x = month_and_day_from_date(train_all_x)
'''To avoid overfitting, limit the number of lag variables: use only
last 7 days and 14, 21, and 28 days ago'''
limited_vars = ['UnitPrice', 'Vol_t-1', 'Vol_t-2', 'Vol_t-3',
'Vol_t-4', 'Vol_t-5', 'Vol_t-6', 'Vol_t-7',
'Vol_t-14', 'Vol_t-21', 'Vol_t-28',
'ProductCode_ordinal', 'Month', 'Day', 'Weekday']
train_x = train_x[limited_vars]
test_x = test_x[limited_vars]
train_all_x = train_all_x[limited_vars]
'''Create prediction input'''
train_x.to_csv(root+"/data/interim/train_x.csv", header=True,
index=False)
train_y.to_csv(root+"/data/interim/train_y.csv", header=True,
index=False)
test_x.to_csv(root+"/data/interim/test_x.csv", header=True,
index=False)
test_y.to_csv(root+"/data/interim/test_y.csv", header=True,
index=False)
train_all_x.to_csv(root+"/data/interim/train_all_x.csv", header=True,
index=False)
train_all_y.to_csv(root+"/data/interim/train_all_y.csv", header=True,
index=False)
future.to_csv(root+"/data/interim/future_input.csv", header=True,
index=False)
|
import os; import sys; sys.path.insert(1, os.path.join(sys.path[0], '..'))
import prince
from sklearn import datasets
X, y = datasets.load_iris(return_X_y=True)
pca = prince.PCA(rescale_with_mean=True, rescale_with_std=True, n_components=2).fit(X)
print('Eigenvalues')
print(pca.eigenvalues_)
print(pca.explained_inertia_)
print('---')
print('U')
print(pca.U_[:5])
print('---')
print('V')
print(pca.V_)
print('---')
print('s')
print(pca.s_)
print('---')
print('Row coords')
print(pca.row_coordinates(X)[:5])
print('---')
|
import unittest
import numpy as np
from chainer import testing
from chainer.datasets import TupleDataset
from chainercv.datasets import SiameseDataset
from chainercv.utils.testing.assertions.assert_is_image import assert_is_image
N = 15
@testing.parameterize(
# Positive and negative samples
{'labels_0': np.arange(N, dtype=np.int32) % 3,
'labels_1': np.arange(N, dtype=np.int32) % 3,
'pos_exist': True, 'neg_exist': True,
},
# No positive
{'labels_0': np.zeros(N, dtype=np.int32),
'labels_1': np.ones(N, dtype=np.int32),
'pos_exist': False, 'neg_exist': True
},
# No negative
{'labels_0': np.ones(N, dtype=np.int32),
'labels_1': np.ones(N, dtype=np.int32),
'pos_exist': True, 'neg_exist': False},
)
class TestSiameseDataset(unittest.TestCase):
img_shape = (3, 32, 48)
def setUp(self):
np.random.shuffle(self.labels_0)
np.random.shuffle(self.labels_1)
self.dataset_0 = TupleDataset(
np.random.uniform(size=(N,) + self.img_shape), self.labels_0)
self.dataset_1 = TupleDataset(
np.random.uniform(size=(N,) + self.img_shape), self.labels_1)
self.n_class = np.max((self.labels_0, self.labels_1)) + 1
def _check_example(self, example):
assert_is_image(example[0])
self.assertEqual(example[0].shape, self.img_shape)
assert_is_image(example[2])
self.assertEqual(example[2].shape, self.img_shape)
self.assertIsInstance(example[1], np.int32)
self.assertEqual(example[1].ndim, 0)
self.assertTrue(example[1] >= 0 and example[1] < self.n_class)
self.assertIsInstance(example[3], np.int32)
self.assertEqual(example[3].ndim, 0)
self.assertTrue(example[3] >= 0 and example[1] < self.n_class)
def test_no_pos_ratio(self):
dataset = SiameseDataset(self.dataset_0, self.dataset_1)
for i in range(10):
example = dataset[i]
self._check_example(example)
self.assertEqual(len(dataset), N)
def test_pos_ratio(self):
if self.pos_exist and self.neg_exist:
dataset = SiameseDataset(self.dataset_0, self.dataset_1, 0.5,
labels_0=self.labels_0,
labels_1=self.labels_1)
for i in range(10):
example = dataset[i]
self._check_example(example)
self.assertEqual(len(dataset), N)
else:
with self.assertRaises(ValueError):
dataset = SiameseDataset(self.dataset_0, self.dataset_1, 0.5,
labels_0=self.labels_0,
labels_1=self.labels_1)
def test_pos_ratio_equals_0(self):
if self.neg_exist:
dataset = SiameseDataset(self.dataset_0, self.dataset_1, 0)
for i in range(10):
example = dataset[i]
self._check_example(example)
if self.neg_exist:
self.assertNotEqual(example[1], example[3])
self.assertEqual(len(dataset), N)
else:
with self.assertRaises(ValueError):
dataset = SiameseDataset(self.dataset_0, self.dataset_1, 0)
def test_pos_ratio_equals_1(self):
if self.pos_exist:
dataset = SiameseDataset(self.dataset_0, self.dataset_1, 1)
for i in range(10):
example = dataset[i]
self._check_example(example)
if self.pos_exist:
self.assertEqual(example[1], example[3])
self.assertEqual(len(dataset), N)
else:
with self.assertRaises(ValueError):
dataset = SiameseDataset(self.dataset_0, self.dataset_1, 1)
def test_length_manual(self):
dataset = SiameseDataset(self.dataset_0, self.dataset_1, length=100)
self.assertEqual(len(dataset), 100)
testing.run_module(__name__, __file__)
|
from setuptools import setup
setup(name='redwood-cli',
version='0.1.0',
description='Redwood Tracker CLI',
url='http://github.com/kespindler/redwood-cli',
author='Kurt Spindler',
author_email='[email protected]',
license='MIT',
packages=['redwood_cli'],
zip_safe=False,
entry_points={
'console_scripts': [
'redwood-cli=redwood_cli:main',
],
},
)
|
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Model
from keras.layers import Dense,GlobalAveragePooling2D
from keras.applications import MobileNet
from keras.applications.mobilenet import preprocess_input
# Module for training the transfer learning model for car type classification
base_model = MobileNet(weights='imagenet', include_top=False)
# Defining layers for the model
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dense(1024,activation='relu')(x) # adding dense layers so that the model can learn complex functions and for better classification results
x = Dense(1024,activation='relu')(x) #dense layer 2
x = Dense(512,activation='relu')(x) #dense layer 3
preds = Dense(2,activation='softmax')(x) #final layer with softmax activation
model = Model(inputs=base_model.input,outputs=preds)
for layer in model.layers[:28]:
layer.trainable = False
# Reading the images for the type classification
train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input) #included in our dependencies
train_generator = train_datagen.flow_from_directory('cars/images', target_size=(224,224), color_mode='rgb', batch_size=128,
class_mode='categorical', shuffle=True)
# Compiling the model with Adam optimizer, loss function as categorical cross entropy
# and evaluation metric as accuracy
model.compile(optimizer='Adam',loss='categorical_crossentropy',metrics=['accuracy'])
# Training the model
step_size_train=train_generator.n//train_generator.batch_size
model.fit_generator(generator=train_generator,
steps_per_epoch=step_size_train,
epochs=10)
# Saving the model as json object
model_json = model.to_json()
with open("model.json","w") as json_file:
json_file.write(model_json)
model.save_weights("model.h5")
print("Saved model")
|
'''
Paper : Incremental Domain Adaptation with Smoothing and Calibration for Surgical Report Generation
Note : Dataloader for incremental learning
'''
import os
import sys
import random
import numpy as np
from glob import glob
from PIL import Image
import torch
from torch.utils.data import Dataset, DataLoader
if sys.version_info[0] == 2:
import xml.etree.cElementTree as ET
else:
import xml.etree.ElementTree as ET
class SurgicalClassDataset18_incremental(Dataset):
def __init__(self, classes, memory=None, fine_tune_size=None,is_train=None):
self.is_train = is_train
if self.is_train: self.dir_root_gt = '/media/mmlab/dataset/global_dataset/Classification_dataset/train/'
else: self.dir_root_gt = '/media/mmlab/dataset/global_dataset/Classification_dataset/test/'
self.xml_dir_list = []
self.img_dir_list = []
self.classes = classes
if memory is not None: self.memory=memory
else: self.memory=[]
xml_dir_temp = self.dir_root_gt + '*.png'
self.xml_dir_list = self.xml_dir_list + glob(xml_dir_temp)
for _img_dir in self.xml_dir_list:
_target = int(_img_dir[:-4].split('_')[-1:][0])
if _target in self.classes:
self.img_dir_list.append(_img_dir)
random.shuffle(self.img_dir_list)
if fine_tune_size is not None:
self.img_dir_list = self.memory + self.img_dir_list[0: fine_tune_size]
else:
self.img_dir_list = self.img_dir_list + self.memory
def __len__(self):
return len(self.img_dir_list)
def __getitem__(self, index):
_img_dir = self.img_dir_list[index]
_img = Image.open(_img_dir).convert('RGB')
_img = np.asarray(_img, np.float32) / 255
_img = torch.from_numpy(np.array(_img).transpose(2, 0, 1,)).float()
_target = int(_img_dir[:-4].split('_')[-1:][0])
_target = torch.from_numpy(np.array(_target)).long()
return _img, _target
class SurgicalClassDataset18_incremental_transform(Dataset):
def __init__(self, classes, memory=None, fine_tune_size=None, transform=None, is_train=None):
self.is_train = is_train
if self.is_train: self.dir_root_gt = '/media/mmlab/dataset/global_dataset/Classification_dataset/train/'
else: self.dir_root_gt = '/media/mmlab/dataset/global_dataset/Classification_dataset/test/'
self.xml_dir_list = []
self.img_dir_list = []
self.classes = classes
if memory is not None: self.memory=memory
else: self.memory=[]
self.transform = transform
xml_dir_temp = self.dir_root_gt + '*.png'
self.xml_dir_list = self.xml_dir_list + glob(xml_dir_temp)
for _img_dir in self.xml_dir_list:
_target = int(_img_dir[:-4].split('_')[-1:][0])
if _target in self.classes:
self.img_dir_list.append(_img_dir)
random.shuffle(self.img_dir_list)
if fine_tune_size is not None:
self.img_dir_list = self.memory + self.img_dir_list[0: fine_tune_size]
else:
self.img_dir_list = self.img_dir_list + self.memory
def __len__(self):
return len(self.img_dir_list)
def __getitem__(self, index):
_img_dir = self.img_dir_list[index]
_img = Image.open(_img_dir).convert('RGB')
if self.transform:
_img = self.transform(_img)
_target = int(_img_dir[:-4].split('_')[-1:][0])
_target = torch.from_numpy(np.array(_target)).long()
return _img, _target
def memory_managment(classes, fine_tune_size):
dir_root_gt = '/media/mmlab/dataset/global_dataset/Classification_dataset/train/'
xml_dir_list = []
img_dir_list = []
xml_dir_temp = dir_root_gt + '*.png'
xml_dir_list = xml_dir_list + glob(xml_dir_temp)
for _img_dir in xml_dir_list:
_target = int(_img_dir[:-4].split('_')[-1:][0])
if _target in classes:
img_dir_list.append(_img_dir)
random.shuffle(img_dir_list)
'------------------------------------------------------------'
'''
for new_added_memory:
period=0, choose some samples from class0-9 to form the memory_0;
period=1, choose some samples from class10-11 to form the memory_1;
'''
new_added_memory = img_dir_list[0: fine_tune_size]
return new_added_memory
class TwoCropTransform:
"""Create two crops of the same image"""
def __init__(self, transform):
self.transform = transform
def __call__(self, x):
return [self.transform(x), self.transform(x)]
|
# coding: utf-8
from celery import shared_task
import geocoder
from django.contrib.gis.geos import Point
@shared_task
def update_coordinates(id, address):
from .models import Property
g = geocoder.google(address)
Property.objects.filter(pk=id).update(point=Point(g.lat, g.lng))
|
import sys
import time
import qi
import numpy as np
import cv2
import cv2.aruco
import almath
import math
import signal
from camera_config import *
class Authenticator:
def __init__(self, user, pswd):
self.user = user
self.pswd = pswd
def initialAuthData(self):
cm = {'user': self.user, 'token': self.pswd}
return cm
class ClientFactory:
def __init__(self, user, pswd):
self.user = user
self.pswd = pswd
def newAuthenticator(self):
return Authenticator(self.user, self.pswd)
class CameraImage:
def __init__(self, ip):
self.ip = ip
self.aruco = cv2.aruco
self.dictionary = self.aruco.getPredefinedDictionary(self.aruco.DICT_4X4_1000)
self.image_remote = None
self.subscriber_id = None
signal.signal(signal.SIGINT, self.handler)
self.connect()
def handler(self, signal, frame):
self.unsubscribe()
def connect(self):
try:
try:
self.session = qi.Session()
port = 9559
self.session.connect("tcp://" + self.ip + ":" + str(port))
except Exception as errorMsg:
try:
self.session = qi.Session()
factory = ClientFactory("nao", "nao")
self.session.setClientAuthenticatorFactory(factory)
self.session.connect('tcps://{ip}:9503'.format(ip=self.ip))
print("ok connection")
except Exception as errorMsg2:
print(errorMsg2)
self.camera = self.session.service("ALVideoDevice")
self.motion = self.session.service("ALMotion")
except Exception as errorMsg3:
print("Error when creating proxy: => " + str(errorMsg3))
self.subscribe()
self.run()
def subscribe(self, params=DEFAULT_PARAMS):
if self.subscriber_id is None:
self.params = dict(DEFAULT_PARAMS) # copy default params
self.params.update(params)
# subscribe
camera = self.params["camera"]
resolution = self.params["resolution"]
color_space, channels = self.params["color_space_and_channels"]
self.params["dictionary"] = cv2.aruco.getPredefinedDictionary(self.params["dictionary"])
fps = CAMERA_DATAS_AT_RESOLUTION[resolution]["fps"]
self.subscriber_id = self.camera.subscribeCamera(SUBSCRIBER_ID,
camera,
resolution,
color_space,
fps)
if self.params["exposure"]:
self.camera.setParameter(self.params["camera"], CAMERA_PARAMETERS["AutoExposition"], 0)
self.camera.setParameter(self.params["camera"], CAMERA_PARAMETERS["Exposure"], 400)
resolution = self.params["resolution"]
fps = CAMERA_DATAS_AT_RESOLUTION[resolution]["fps"]
# self.periodic_tasks.setUsPeriod(1000000 / fps)
# self.periodic_tasks.start(True)
print("subscribe done")
else:
raise Exception("DXAruco is already running")
def unsubscribe(self):
print("unsubscribe...")
if self.subscriber_id is not None:
self.camera.unsubscribe(self.subscriber_id)
# self.periodic_tasks.stop()
self.subscriber_id = None
print("unsubscribe done")
else:
raise Exception("DXAruco is not running")
def calc_marker(self):
print("calculation marker information")
start_time = time.time()
try:
self.image_remote = self.camera.getImageRemote(self.subscriber_id)
except Exception as message:
self.unsubscribe()
print(str(message))
if not self.image_remote:
self.unsubscribe()
raise Exception("No data in image")
camera = self.params["camera"]
camera_name = CAMERAS[camera]
seconds = self.image_remote[4]
micro_seconds = self.image_remote[5]
t_world2camera = almath.Transform(self.motion._getSensorTransformAtTime(camera_name, seconds*10e8+micro_seconds*10e2))
t_robot2camera = almath.Transform(self.motion.getTransform(camera_name, 2, True))
resolution = self.params["resolution"]
x, y = CAMERA_DATAS_AT_RESOLUTION[resolution]["image_size"]
color_space, channels = self.params["color_space_and_channels"]
image = numpy.frombuffer(self.image_remote[6], dtype = numpy.uint8).reshape(y, x, channels)
if self.params["color"] and color_space == vd.kBGRColorSpace:
print("Thresholding image...")
lower_b = tuple([int(val) for val in self.params["color"]])
upper_b = (255, 255, 255)
image = cv2.inRange(image, lower_b, upper_b)
print("Thresholding image done")
p6Ds = dict()
corners, ids, rejectedImgPoints = cv2.aruco.detectMarkers(image, self.params["dictionary"])
result = False
if ids is not None:
try:
if [328] in ids:
count = 0
for _id in ids:
print(_id)
if _id == [328]:
break
count = count + 1
rvecs, tvecs, _ = cv2.aruco.estimatePoseSingleMarkers(corners, \
self.params["size"], \
CAMERA_DATAS_AT_RESOLUTION[resolution]["matrix"], \
CAMERA_DISTORTION_COEFF)
tvec = tvecs[count][0]
x, y, z = tvec[2], -tvec[0], -tvec[1]
p3d_camera2target = almath.Position3D(x, y, z)
rvec = rvecs[count][0]
wx, wy, wz = rvec[2], -rvec[0], -rvec[1]
proj_rvec, _ = cv2.Rodrigues(numpy.array([wx, wy, wz]))
r_camera2target = almath.Rotation(proj_rvec.flatten())
t_camera2target = almath.transformFromRotationPosition3D(r_camera2target, p3d_camera2target)
r3d_correction = almath.Rotation3D(0., 3*math.pi/2, 0)
t_corretion = almath.transformFromRotation3D(r3d_correction)
t_world2target = t_world2camera * t_camera2target * t_corretion
t_robot2target = t_robot2camera * t_camera2target * t_corretion
p6D_world2target = almath.position6DFromTransform(t_world2target)
p6D_robot2target = almath.position6DFromTransform(t_robot2target)
print("[x,y,theta] = [{},{},{}]".format(p6D_robot2target.x, p6D_robot2target.y, math.degrees(p6D_robot2target.wz)))
#p6Ds[ids] = {
# "robot2target": list(p6D_robot2target.toVector())
#"world2target": list(p6D_world2target.toVector())
#}
result = True
print("ID:" + str(ids))
except Exception as message:
print("failed: {}".format(str(message)))
self.unsubscribe()
else:
result = False
print("No Marker")
delta_time = time.time() - start_time
return result
def run(self):
while True:
result = self.calc_marker()
if __name__ == "__main__":
ip = "127.0.0.1"
if(len(sys.argv)) > 1:
ip = sys.argv[1]
print("IP:" + ip)
win = CameraImage(ip)
|
from BiGNN import BiGNN
import sys
sys.path.append('..')
from utlis import load_data,load_customdataset_test_data,load_randomdataset_test_data,vaild,get_50_epoch_MAPE,accuracy_train
import argparse
import random
import numpy as np
import torch
from torch.optim import lr_scheduler
import torch.optim as optim
import time
import os
def parse_opt():
parser = argparse.ArgumentParser()
parser.add_argument('--seed', type=int, default=2020, help='random seed')
parser.add_argument('--epochs', type=int, default=300)
parser.add_argument('--weight-decay', type=float, default=4e-5)
parser.add_argument('--hidden', type=int, default=96, help='hidden size of forward aggregator')
parser.add_argument('--reverse-hidden', type=int, default=16, help='hidden size of reverse aggregator')
parser.add_argument('--lr', type=float, default=0.0006, help='learning rate')
parser.add_argument('--heads', type=int, default=2, help='number of heads')
parser.add_argument('--dataset', type=str, default='random', help='random or custom')
parser.add_argument('--step', type=int, default=30, help='step of lr scheduler')
parser.add_argument('--gamma', type=float, default=0.75, help='decay ratio of lr scheduler')
parser.add_argument('--device', type=str, default='1', help='cpu or 0,1,2,3 for gpu')
opt = parser.parse_args()
return opt
def set_seed(seed):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
cuda = torch.cuda.is_available()
if cuda:
torch.cuda.manual_seed(seed)
def save_model(model, optimizer, epoch):
state = {
'epoch': epoch,
'model': model.state_dict(),
'optimizer': optimizer.state_dict()
}
torch.save(state, '/content/drive/MyDrive/RandomModel/model_%s' % epoch)
if __name__ == '__main__':
opt = parse_opt()
set_seed(opt.seed)
if opt.device != 'cpu':
print(opt.device,type(opt.device))
torch.cuda.set_device(int(opt.device))
train_loader, vaild_loader, params_min_list, params_max_list, max_layer_length = load_data(opt.dataset)
if opt.dataset == 'random':
test_data = load_randomdataset_test_data()
else:
test_data = load_customdataset_test_data()
min_loss = 1e10
test_net_name = test_data['name'].values.tolist()
vaild_acc = {}
for name in test_net_name:
vaild_acc[name] = []
# Model and optimizer
model = BiGNN(nfeat = 110,
nhid=opt.hidden,
reverse_hidden=opt.reverse_hidden,
nheads=opt.heads)
optimizer = optim.Adam(model.parameters(),
lr=opt.lr,
weight_decay=opt.weight_decay)
scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.step, gamma=opt.gamma)
cuda = torch.cuda.is_available()
if cuda:
print('use GPU')
model.cuda()
loss_func = torch.nn.L1Loss()
# Train model
epochs = opt.epochs
t_total = time.time()
loss_values = []
error_values = []
test_error_list = []
vaild_loss_values = []
vaild_error_values = []
bad_counter = 0
best = epochs + 1
best_epoch = 0
patience_cnt = 0
patience = 200
for epoch in range(epochs):
t = time.time()
step_loss_values = []
step_error_values = []
scheduler.step()
for step, (batch_params, batch_id, batch_link, batch_y) in enumerate(train_loader): # 每个训练步骤
if cuda:
batch_params = batch_params.cuda()
batch_id = batch_id.cuda()
batch_y = batch_y.cuda()
batch_link = batch_link.cuda()
model.train()
optimizer.zero_grad()
output = model(batch_params, batch_link, batch_id)
loss_train = loss_func(output, batch_y)
error_train = accuracy_train(output, batch_y)
loss_train.backward()
optimizer.step()
step_loss_values.append(loss_train.item())
step_error_values.append(error_train)
# print(step_loss_values)
loss_values.append(np.mean(step_loss_values))
error_values.append(np.mean(step_error_values))
epoch_train_loss = np.mean(step_loss_values)
epoch_train_error = np.mean(step_error_values)
step_loss_values = []
step_error_values = []
for step, (batch_params, batch_id, batch_link, batch_y) in enumerate(vaild_loader): # 每个训练步骤
if cuda:
batch_params = batch_params.cuda()
batch_id = batch_id.cuda()
batch_y = batch_y.cuda()
batch_link = batch_link.cuda()
model.eval()
output = model(batch_params, batch_link, batch_id)
loss_val = loss_func(output, batch_y)
error_val = accuracy_train(output, batch_y)
step_loss_values.append(loss_val.item())
step_error_values.append(error_val)
epoch_val_loss = np.mean(step_loss_values)
epoch_val_error = np.mean(step_error_values)
vaild_loss_values.append(epoch_val_loss)
vaild_error_values.append(epoch_val_error)
#测试
vaild_mean = 0
count = 0
for i in range(len(test_data)):
label = test_data.loc[i]['all_energy']
pre,MAE,acc = vaild(model,params_min_list,params_max_list,max_layer_length,test_data.loc[i]['layer_parameters'],test_data.loc[i]['layer_link'],test_data.loc[i]['layer_id'],label)
# print('%s ,pre: %f ,label: %f, error: %f' %(test_data.loc[i]['name'],pre,test_data.loc[i]['all_energy'],acc.item()))
vaild_mean += acc.item()
count += 1
vaild_acc[test_data.loc[i]['name']].append([acc.item(),pre.item()])
vaild_mean = vaild_mean / count
if epoch > 1:
print('Epoch: {:04d}'.format(epoch+1),
'loss_train: {:.4f}'.format(epoch_train_loss),
'error_train: {:.4f}'.format(epoch_train_error),
'loss_val: {:.4f}'.format(epoch_val_loss),
'error_val: {:.4f}'.format(epoch_val_error),
'time: {:.4f}s'.format(time.time() - t),
'error_test: {:.4f}'.format(vaild_mean),
'100_epoch_test: {:.4f}'.format(get_50_epoch_MAPE(epoch,vaild_acc)))
test_error_list.append(vaild_mean)
if test_error_list[-1] < min_loss:
min_loss = test_error_list[-1]
best_epoch = epoch
patience_cnt = 0
else:
patience_cnt += 1
if patience_cnt == patience:
break
if epoch == 100 and get_50_epoch_MAPE(epoch,vaild_acc) > 18:
break
# if epoch >= 243-50 and epoch <= 243:
# print('save')
# save_model(model, optimizer, epoch)
all_test_mean = 0
if epoch>50 and epoch %10 == 1:
index = epoch - 50
split = epoch
print('\n',index,split,'-----------------','\n')
print('Test Acc',np.mean(vaild_error_values[index:split]))
for net_name,acc_list in vaild_acc.items():
# all_test_mean += np.mean(acc_list[index:split])
print(net_name,np.mean(acc_list[index:split],axis=0)[0],np.mean(acc_list[index:split],axis=0)[1])
index = split |
import numpy as np
import matplotlib.pyplot as plt
from keras.layers import Conv2D, BatchNormalization, Activation, Dense, Conv2DTranspose, Input, Lambda, Reshape, Flatten, UpSampling2D, MaxPooling2D
from keras.models import Model
import keras.backend as K
from keras import initializers
class SVHNGenerator():
def __init__(self):
self.latent_dim = 100 # Dimension of Latent Representation
self.Encoder = None
self.Decoder = None
self.model = None
self.weights_path = './model_weights/svhn.h5'
def GenerateModel(self):
b_f = 128
# ENCODER
input_ = Input(shape=(32,32,3))
encoder_hidden1 = Conv2D(filters = b_f, kernel_size = 2, strides = (2,2), padding = 'valid', kernel_initializer = 'he_normal' )(input_)
encoder_hidden1 = BatchNormalization()(encoder_hidden1)
encoder_hidden1 = Activation('relu')(encoder_hidden1)
encoder_hidden2 = Conv2D(filters = b_f*2, kernel_size = 2, strides = (2,2), padding = 'valid', kernel_initializer = 'he_normal' )(encoder_hidden1)
encoder_hidden2 = BatchNormalization()(encoder_hidden2)
encoder_hidden2 = Activation('relu')(encoder_hidden2)
encoder_hidden3 = Conv2D(filters = b_f*4, kernel_size = 2, strides = (2,2), padding = 'valid', kernel_initializer = 'he_normal' )(encoder_hidden2)
encoder_hidden3 = BatchNormalization()(encoder_hidden3)
encoder_hidden3 = Activation('relu')(encoder_hidden3)
encoder_hidden4 = Flatten()(encoder_hidden3)
# Latent Represenatation Distribution, P(z)
z_mean = Dense(self.latent_dim, activation='linear',
kernel_initializer= initializers.he_normal(seed=None))(encoder_hidden4)
z_std_sq_log = Dense(self.latent_dim, activation='linear',
kernel_initializer= initializers.he_normal(seed=None))(encoder_hidden4)
# Sampling z from P(z)
def sample_z(args):
mu, std_sq_log = args
epsilon = K.random_normal(shape=(K.shape(mu)[0], self.latent_dim), mean=0., stddev=1.)
z = mu + epsilon * K.sqrt( K.exp(std_sq_log))
return z
z = Lambda(sample_z)([z_mean, z_std_sq_log])
# DECODER
decoder_hidden0 = Dense(K.int_shape(encoder_hidden4)[1], activation='relu', kernel_initializer= initializers.he_normal(seed=None))(z)
decoder_hidden0 = Reshape(K.int_shape(encoder_hidden3)[1:])(decoder_hidden0)
decoder_hidden1 = Conv2DTranspose(filters = b_f*4, kernel_size = 2, strides = (2,2), padding = 'valid', kernel_initializer = 'he_normal' )(decoder_hidden0)
decoder_hidden1 = BatchNormalization()(decoder_hidden1)
decoder_hidden1 = Activation('relu')(decoder_hidden1)
decoder_hidden2 = Conv2DTranspose(filters = b_f*2, kernel_size = 2, strides = (2,2), padding = 'valid', kernel_initializer = 'he_normal' )(decoder_hidden1)
decoder_hidden2 = BatchNormalization()(decoder_hidden2)
decoder_hidden2 = Activation('relu')(decoder_hidden2)
decoder_hidden3 = Conv2DTranspose(filters = b_f, kernel_size = 2, strides = (2,2), padding = 'valid', kernel_initializer = 'he_normal' )(decoder_hidden2)
decoder_hidden3 = BatchNormalization()(decoder_hidden3)
decoder_hidden3 = Activation('relu')(decoder_hidden3)
decoder_hidden4 = Conv2D(filters = 3, kernel_size= 1, strides = (1,1), padding='valid', kernel_initializer = 'he_normal')(decoder_hidden3)
decoder_hidden4 = Activation('sigmoid')(decoder_hidden4)
# MODEL
vae = Model(input_, decoder_hidden4)
# Encoder Model
encoder = Model(inputs = input_, outputs = [z_mean, z_std_sq_log])
# Decoder Model
no_of_encoder_layers = len(encoder.layers)
no_of_vae_layers = len(vae.layers)
decoder_input = Input(shape=(self.latent_dim,))
decoder_hidden = vae.layers[no_of_encoder_layers+1](decoder_input)
for i in np.arange(no_of_encoder_layers+2 , no_of_vae_layers-1):
decoder_hidden = vae.layers[i](decoder_hidden)
decoder_hidden = vae.layers[no_of_vae_layers-1](decoder_hidden)
decoder = Model(decoder_input,decoder_hidden )
self.VAE = vae
self.Encoder = encoder
self.Decoder = decoder
def LoadWeights(self):
self.VAE.load_weights(self.weights_path)
def GetModels(self):
return self.VAE, self.Encoder, self.Decoder
if __name__ == "__main__":
Gen = SVHNGenerator()
Gen.GenerateModel()
# Gen.weights_path = '../../gan_train/svhn.h5'
Gen.LoadWeights()
vae, encoder, decoder = Gen.GetModels()
n_samples = 10
len_z = Gen.latent_dim
z = np.random.normal(0,1,size=(n_samples*n_samples ,len_z))
sampled = decoder.predict(z)
k = 0
for i in range(n_samples):
for j in range(n_samples):
img = sampled[k]
plt.subplot(n_samples,n_samples,k+1)
plt.imshow(img)
plt.axis("Off")
k=k+1
plt.show() |
"""
Created on Oct 28, 2016
@author: mvoitko
"""
import re
import time
import locale
from datetime import datetime
from selenium import webdriver
from selenium.common.exceptions import *
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from src import config
from src.utils import helper
from src.pages.basePage import BasePage
from src.locators.mainLocators import MainLocators
class MainPage(BasePage):
"""
Main Habrahabr page representation.
Class for UI actions related to this page
"""
url = config.base_url + 'interesting'
locators_dictionary = MainLocators.locators_dictionary
def search(self, querry):
"""
Search given querry.
:param querry: str - text to search
:return: MainPage: selenium.webdriver.*
"""
self.click_on('search button')
self.fill('search field', querry)
self.find('search field').send_keys(Keys.ENTER)
return MainPage(self.driver)
def get_search_results(self):
"""
Get search results.
:param querry: str - text to search
:return: results: list of selenium.webdriver.remote.webelement.WebElement
"""
return self.find_elems('post')
def sort_by(self, sorting_param):
"""
Sort search results page by given sorting parameter.
:param sorting_param: str - sort by parameter
:return: MainPage: selenium.webdriver.*
"""
# old_post = self.driver.find_element(*MainLocators.locators_dictionary['POST TITLE'])
sorting_param = "sort by " + sorting_param
self.click_on(sorting_param)
# WebDriverWait(self.driver, self.timeout).until(EC.staleness_of(old_post))
return MainPage(self.driver)
def get_posts_timestamps(self):
"""
Get posts timestamps.
:return: timestamps: list of datetime objects of posts.
"""
time.sleep(1)
timestamps = []
timestamp_elements = self.find_elems('post timestamp')
for timestamp in timestamp_elements:
if re.match(helper.pattern_today, timestamp.text, re.IGNORECASE):
date_object = helper.parse_today(timestamp.text)
elif re.match(helper.pattern_yesterday, timestamp.text, re.IGNORECASE):
date_object = helper.parse_yesterday(timestamp.text)
elif re.match(helper.pattern_current_year, timestamp.text, re.IGNORECASE):
date_object = helper.parse_current_year(timestamp.text)
elif re.match(helper.pattern_full, timestamp.text):
date_object = helper.parse_full(timestamp.text)
else:
raise NoSuchElementException(
"Cannot find POST TIMESTAMP locator on the {1} page".format(str(cls)))
timestamps.append(date_object)
return timestamps |
import logging
logger = logging.getLogger(__name__)
import os
from functools import reduce
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from gym.common.info import Content
class VNFPP(Content):
def __init__(self):
Content.__init__(self)
self.id = None
self.reports = []
self._inputs_list = {}
self._instance = None
self._reports = {}
self._vnfbd_instances_inputs = {}
def set_id(self, instance_id):
self.id = instance_id
def get_id(self):
return self.id
def add_report(self, vnfbd, report):
report_id = report.get_id()
self._reports[report_id] = report
self._vnfbd_instances_inputs[report_id] = vnfbd.get_inputs()
def _retrieve_dict(self, content, keywords):
_dict = {}
for key in keywords:
value = content.get(key)
_dict[key] = value
return _dict
def _process_evaluation(self, evaluation):
keywords = ['id', 'source', 'metrics', 'timestamp']
eval = self._retrieve_dict(evaluation, keywords)
return eval
def _process_snapshot(self, snapshot):
keywords = ['id', 'trial', 'origin', 'timestamp']
evaluations = snapshot.get('evaluations')
evals = list(map(self._process_evaluation, evaluations))
snap = self._retrieve_dict(snapshot, keywords)
snap['evaluations'] = evals
return snap
def _filter_vnfbd_inputs(self, vnfbd_inputs):
filtered_inputs = {}
for inputs_list in self._inputs_list:
path = inputs_list[:-1]
input_value = reduce(dict.__getitem__, path, vnfbd_inputs)
input_name = '_'.join(path)
filtered_inputs[input_name] = input_value
logger.debug("report filtered_inputs %s", filtered_inputs)
return filtered_inputs
def compile(self, layout_id=None):
logger.info("compile vnf-pp")
self._instance = layout_id
for report_id,report in self._reports.items():
snapshots = report.get('snapshots')
snaps = list(map(self._process_snapshot, snapshots))
keywords = ['id', 'test', 'timestamp']
profile = self._retrieve_dict(report, keywords)
profile['snapshots'] = snaps
vnfbd_inputs = self._vnfbd_instances_inputs.get(report_id)
profile_inputs = self._filter_vnfbd_inputs(vnfbd_inputs)
profile['inputs'] = profile_inputs
self.reports.append(profile)
def has_list_value(self, dict_items):
fields_list = [ field for field,value in dict_items.items() if type(value) is list ]
return fields_list
def has_dict_value(self, inputs):
fields_dict = [ field for field,value in inputs.items() if type(value) is dict ]
return fields_dict
def lists_paths(self, inputs, internal=False):
full_paths = []
dicts = self.has_dict_value(inputs)
if dicts:
for dict_field in dicts:
paths = self.lists_paths(inputs[dict_field], internal=True)
if paths:
if all([True if type(path) is list else False for path in paths]):
for path in paths:
paths_partial = [dict_field]
paths_partial.extend(path)
full_paths.append(paths_partial)
else:
paths_partial = [dict_field]
paths_partial.extend(paths)
if internal:
full_paths.extend(paths_partial)
else:
full_paths.append(paths_partial)
lists = self.has_list_value(inputs)
if lists:
for list_field in lists:
full_paths.append( [list_field, inputs[list_field]] )
return full_paths
def parse_inputs(self, inputs):
logger.info("vnf-pp parse_inputs")
lists_paths = self.lists_paths(inputs)
logger.info("lists_paths %s", lists_paths)
self._inputs_list = lists_paths |
# -*- coding: utf-8 -*-
import json
import logging
import math
import multiprocessing
import os
import time
from functools import wraps
from itertools import repeat
from statistics import mean
from tempfile import NamedTemporaryFile
from typing import List, Tuple, TypedDict
from django.conf import settings
from django.contrib.gis.geos import Polygon
from mapproxy.grid import tile_grid
from osgeo import gdal, ogr, osr
from eventkit_cloud.tasks.exceptions import CancelException
from eventkit_cloud.tasks.task_process import TaskProcess
from eventkit_cloud.utils.generic import requires_zip, create_zip_file, get_zip_name
logger = logging.getLogger(__name__)
MAX_DB_CONNECTION_RETRIES = 8
TIME_DELAY_BASE = 2 # Used for exponential delays (i.e. 5^y) at 8 would be about 4 minutes 15 seconds max delay.
# The retry here is an attempt to mitigate any possible dropped connections. We chose to do a limited number of
# retries as retrying forever would cause the job to never finish in the event that the database is down. An
# improved method would perhaps be to see if there are connection options to create a more reliable connection.
# We have used this solution for now as I could not find options supporting this in the gdal documentation.
GOOGLE_MAPS_FULL_WORLD = [-20037508.342789244, -20037508.342789244, 20037508.342789244, 20037508.342789244]
def retry(f):
@wraps(f)
def wrapper(*args, **kwds):
attempts = MAX_DB_CONNECTION_RETRIES
exc = None
while attempts:
try:
return_value = f(*args, **kwds)
if not return_value:
logger.error("The function {0} failed to return any values.".format(getattr(f, "__name__")))
raise Exception("The process failed to return any data, please contact an administrator.")
return return_value
except Exception as e:
logger.error("The function {0} threw an error.".format(getattr(f, "__name__")))
logger.error(str(e))
exc = e
if getattr(settings, "TESTING", False):
# Don't wait/retry when running tests.
break
attempts -= 1
logger.info(e)
if "canceled" in str(e).lower():
# If task was canceled (as opposed to fail) don't retry.
logger.info("The task was canceled ")
attempts = 0
else:
if attempts:
delay = TIME_DELAY_BASE ** (MAX_DB_CONNECTION_RETRIES - attempts + 1)
logger.error(f"Retrying {str(attempts)} more times, sleeping for {delay}...")
time.sleep(delay)
raise exc
return wrapper
def progress_callback(pct, msg, user_data):
from eventkit_cloud.tasks.helpers import update_progress
update_progress(
user_data.get("task_uid"),
progress=round(pct * 100),
subtask_percentage=user_data.get("subtask_percentage", 100.0),
msg=msg,
)
def open_dataset(file_path, is_raster):
"""
Given a path to a raster or vector dataset, returns an opened GDAL or OGR dataset.
The caller has the responsibility of closing/deleting the dataset when finished.
:param file_path: Path to dataset
:return: Handle to open dataset
"""
# Attempt to open as gdal dataset (raster)
# Using gdal exception to minimize output to stdout
gdal.UseExceptions()
logger.info("Opening the dataset: {}".format(file_path))
gdal_dataset = None
ogr_dataset = None
try:
try:
gdal_dataset = gdal.Open(file_path)
except Exception as e:
logger.debug("Could not open dataset using gdal as raster.")
logger.debug(e)
finally:
if gdal_dataset and is_raster:
logger.info(f"The dataset: {file_path} opened with gdal.")
return gdal_dataset
# Attempt to open as ogr dataset (vector)
# ogr.UseExceptions doesn't seem to work reliably, so just check for Open returning None
try:
ogr_dataset = ogr.Open(file_path)
except Exception as e:
logger.debug("Could not open dataset using ogr.")
logger.debug(e)
finally:
if not ogr_dataset:
logger.debug("Unknown file format: {0}".format(file_path))
else:
logger.info(f"The dataset: {file_path} opened with ogr.")
return ogr_dataset or gdal_dataset
except RuntimeError as ex:
if ("not recognized as a supported file format" not in str(ex)) or (
"Error browsing database for PostGIS Raster tables" in str(ex)
):
raise ex
finally:
cleanup_dataset(gdal_dataset)
cleanup_dataset(ogr_dataset)
def cleanup_dataset(dataset):
"""
Given an input gdal.Dataset or ogr.DataSource, destroy it.
NB: referring to this object's members after destruction will crash the Python interpreter.
:param resources: Dataset / DataSource to destroy
"""
if dataset:
logger.info("Closing the resources: {}.".format(dataset))
# https://trac.osgeo.org/gdal/wiki/PythonGotchas#CertainobjectscontainaDestroymethodbutyoushouldneveruseit
del dataset
@retry
def get_meta(ds_path, is_raster=True):
"""
This function is a wrapper for the get_gdal metadata because if there is a database diconnection there is no obvious
way to clean up and free those resources therefore it is put on a separate process and if it fails it can just be
tried again.
This is using GDAL 2.2.4 this should be checked again to see if it can be simplified in a later version.
:param ds_path: String: Path to dataset
:param is_raster Boolean: Do not try to do OGR lookup if a raster dataset can be opened, otherwise it will try both,
and return the vector if that is an option.
:return: Metadata dict
driver: Short name of GDAL driver for dataset
is_raster: True if dataset is a raster type
nodata: NODATA value for all bands if all bands have the same one, otherwise None (raster sets only)
"""
multiprocess_queue = multiprocessing.dummy.Queue()
proc = multiprocessing.dummy.Process(target=get_gdal_metadata, args=(ds_path, is_raster, multiprocess_queue))
proc.start()
proc.join()
return multiprocess_queue.get()
def get_gdal_metadata(ds_path, is_raster, multiprocess_queue):
"""
Don't call this directly use get_meta.
Given a path to a raster or vector dataset, return the appropriate driver type.
:param ds_path: String: Path to dataset
:param A multiprocess queue.
:return: None.
"""
dataset = None
ret = {"driver": None, "is_raster": None, "nodata": None, "dim": [0, 0, 0]}
try:
dataset = open_dataset(ds_path, is_raster)
if isinstance(dataset, ogr.DataSource):
ret["driver"] = dataset.GetDriver().GetName()
ret["is_raster"] = False
elif isinstance(dataset, gdal.Dataset):
ret["driver"] = dataset.GetDriver().ShortName
ret["is_raster"] = True
if dataset.RasterCount:
bands = list(set([dataset.GetRasterBand(i + 1).GetNoDataValue() for i in range(dataset.RasterCount)]))
if len(bands) == 1:
ret["nodata"] = bands[0]
ret["dim"] = [dataset.RasterXSize, dataset.RasterYSize, len(bands)]
if ret["driver"]:
logger.debug("Identified dataset {0} as {1}".format(ds_path, ret["driver"]))
else:
logger.debug("Could not identify dataset {0}".format(ds_path))
multiprocess_queue.put(ret)
finally:
cleanup_dataset(dataset)
def get_area(geojson):
"""
Given a GeoJSON string or object, return an approximation of its geodesic area in km².
The geometry must contain a single polygon with a single ring, no holes.
Based on Chamberlain and Duquette's algorithm: https://trs.jpl.nasa.gov/bitstream/handle/2014/41271/07-0286.pdf
:param geojson: GeoJSON selection area
:return: area of geojson ring in square kilometers
"""
earth_r = 6371 # km
def rad(d):
return math.pi * d / 180
if isinstance(geojson, str):
geojson = json.loads(geojson)
if hasattr(geojson, "geometry"):
geojson = geojson["geometry"]
geom_type = geojson["type"].lower()
if geom_type == "polygon":
polys = [geojson["coordinates"]]
elif geom_type == "multipolygon":
polys = geojson["coordinates"]
else:
return RuntimeError("Invalid geometry type: %s" % geom_type)
a = 0
for poly in polys:
ring = poly[0]
if len(ring) < 4:
continue
ring.append(ring[-2]) # convenient for circular indexing
for i in range(len(ring) - 2):
a += (rad(ring[i + 1][0]) - rad(ring[i - 1][0])) * math.sin(rad(ring[i][1]))
area = abs(a * (earth_r ** 2) / 2)
return area
def is_envelope(geojson_path):
"""
Given a path to a GeoJSON file, reads it and determines whether its coordinates correspond to a WGS84 bounding box,
i.e. lat1=lat2, lon2=lon3, lat3=lat4, lon4=lon1, to tell whether there's need for an alpha layer in the output
:param geojson_path: Path to GeoJSON selection file
:return: True if the given geojson is an envelope/bounding box, with one polygon and one ring.
"""
try:
if not os.path.isfile(geojson_path) and isinstance(geojson_path, str):
geojson = json.loads(geojson_path)
else:
with open(geojson_path, "r") as gf:
geojson = json.load(gf)
geom_type = geojson["type"].lower()
if geom_type == "polygon":
polys = [geojson["coordinates"]]
elif geom_type == "multipolygon":
polys = geojson["coordinates"]
else:
return False # Points/lines aren't envelopes
if len(polys) != 1:
return False # Multipolygons aren't envelopes
poly = polys[0]
if len(poly) != 1:
return False # Polygons with multiple rings aren't envelopes
ring = poly[0]
if len(ring) != 5 or ring[4] != ring[0]:
return False # Envelopes need exactly four valid coordinates
# Envelopes will have exactly two unique coordinates, for both x and y, out of those four
ret = len(set([coord[0] for coord in ring])) == len(set([coord[1] for coord in ring])) == 2
return ret
except (IndexError, IOError, ValueError):
# Unparseable JSON or unreadable file: play it safe
return False
@retry
def convert(
boundary=None,
input_file=None,
output_file=None,
src_srs=4326,
driver=None,
layers=None,
layer_name=None,
task_uid=None,
projection: int = 4326,
creation_options: list = None,
dataset_creation_options: list = None,
layer_creation_options: list = None,
is_raster: bool = True,
warp_params: dict = None,
translate_params: dict = None,
use_translate: bool = False,
access_mode: str = "overwrite",
config_options: List[Tuple[str]] = None,
distinct_field=None,
):
"""
Uses gdal to convert and clip a supported dataset file to a mask if boundary is passed in.
:param use_translate: A flag to force the use of translate instead of warp.
:param layer_creation_options: Data options specific to vector conversion.
:param dataset_creation_options: Data options specific to vector conversion.
:param translate_params: A dict of params to pass into gdal translate.
:param warp_params: A dict of params to pass into gdal warp.
:param is_raster: A explicit declaration that dataset is raster (for disambiguating mixed mode files...gpkg)
:param boundary: A geojson file or bbox (xmin, ymin, xmax, ymax) to serve as a cutline
:param input_file: A raster or vector file to be clipped
:param output_file: The dataset to put the clipped output in (if not specified will use in_dataset)
:param driver: Short name of output driver to use (defaults to input format)
:param layer_name: Table name in database for in_dataset
:param layers: A list of layers to include for translation.
:param task_uid: A task uid to update
:param projection: A projection as an int referencing an EPSG code (e.g. 4326 = EPSG:4326)
:param creation_options: Additional options to pass to the convert method (e.g. "-co SOMETHING")
:param config_options: A list of gdal configuration options as a tuple (option, value).
:return: Filename of clipped dataset
"""
if isinstance(input_file, str) and not use_translate:
input_file = [input_file]
meta_list = []
for _index, _file in enumerate(input_file):
input_file[_index], output_file = get_dataset_names(_file, output_file)
meta_list.append(get_meta(input_file[_index], is_raster))
src_src = f"EPSG:{src_srs}"
dst_src = f"EPSG:{projection}"
# Currently, when there are more than 1 files, they much each be the same driver, making the meta the same.
meta = meta_list[0]
if not driver:
driver = meta["driver"] or "gpkg"
# Geopackage raster only supports byte band type, so check for that
band_type = None
dstalpha = None
if driver.lower() == "gpkg":
band_type = gdal.GDT_Byte
if meta.get("nodata") is None and meta.get("is_raster"):
dstalpha = True
# Clip the dataset if a boundary is passed in.
temp_boundfile = None
geojson = None
bbox = None
if boundary:
# Strings are expected to be a file.
if isinstance(boundary, str):
if not os.path.isfile(boundary):
raise Exception(f"Called convert using a boundary of {boundary} but no such path exists.")
elif is_valid_bbox(boundary):
geojson = bbox2polygon(boundary)
bbox = boundary
elif isinstance(boundary, dict):
geojson = boundary
if geojson:
temp_boundfile = NamedTemporaryFile(suffix=".json")
temp_boundfile.write(json.dumps(geojson).encode())
temp_boundfile.flush()
boundary = temp_boundfile.name
if meta["is_raster"]:
cmd = get_task_command(
convert_raster,
input_file,
output_file,
driver=driver,
creation_options=creation_options,
band_type=band_type,
dst_alpha=dstalpha,
boundary=boundary,
src_srs=src_src,
dst_srs=dst_src,
task_uid=task_uid,
warp_params=warp_params,
translate_params=translate_params,
use_translate=use_translate,
config_options=config_options,
)
else:
cmd = get_task_command(
convert_vector,
input_file,
output_file,
driver=driver,
dataset_creation_options=dataset_creation_options,
layer_creation_options=layer_creation_options,
src_srs=src_src,
dst_srs=dst_src,
layers=layers,
layer_name=layer_name,
task_uid=task_uid,
boundary=boundary,
bbox=bbox,
access_mode=access_mode,
config_options=config_options,
distinct_field=distinct_field,
)
try:
task_process = TaskProcess(task_uid=task_uid)
task_process.start_process(cmd)
except CancelException:
# If we don't allow cancel exception to propagate then the task won't exit properly.
# TODO: Allow retry state to be more informed.
raise
except Exception as e:
logger.error(e)
raise Exception("File conversion failed. Please try again or contact support.")
finally:
if temp_boundfile:
temp_boundfile.close()
if requires_zip(driver):
logger.debug(f"Requires zip: {output_file}")
output_file = create_zip_file(output_file, get_zip_name(output_file))
return output_file
def get_task_command(function, *args, **kwargs):
return lambda: function(*args, **kwargs)
def get_dataset_names(input_file, output_file):
"""
This is a helper that will get us the name of the output_dataset.
:param input_file: The name of the dataset to convert.
:param output_file: (Optional) The path to convert the file.
:return: An output dataset name.
"""
if not input_file:
raise Exception("Not provided: 'in' dataset")
# Strip optional file prefixes
file_prefix, in_dataset_file = strip_prefixes(input_file)
if not output_file:
output_file = in_dataset_file
# don't operate on the original file. If the renamed file already exists,
# then don't try to rename, since that file may not exist if this is a retry.
if output_file == in_dataset_file:
in_dataset_file = rename_duplicate(in_dataset_file)
input_file = f"{file_prefix}{in_dataset_file}"
return input_file, output_file
def clean_options(options):
return {option: value for option, value in options.items() if value is not None}
def convert_raster(
input_files,
output_file,
driver=None,
access_mode="overwrite",
creation_options=None,
band_type=None,
dst_alpha=None,
boundary=None,
src_srs=None,
dst_srs=None,
task_uid=None,
warp_params: dict = None,
translate_params: dict = None,
use_translate: bool = False,
config_options: List[Tuple[str]] = None,
):
"""
:param warp_params: A dict of options to pass to gdal warp (done first in conversion), overrides other settings.
:param translate_params: A dict of options to pass to gdal translate (done second in conversion),
overrides other settings.
:param input_files: A file or list of files to convert.
:param output_file: The file to convert.
:param driver: The file format to convert.
:param creation_options: Special GDAL options for conversion.
Search for "gdal driver <format> creation options" creation options for driver specific implementation.
:param band_type: The GDAL data type (e.g. gdal.GDT_BYTE).
:param dst_alpha: If including an alpha band in the destination file.
:param boundary: The boundary to be used for clipping, this must be a file.
:param src_srs: The srs of the source (e.g. "EPSG:4326")
:param dst_srs: The srs of the destination (e.g. "EPSG:3857")
:param task_uid: The eventkit task uid used for tracking the work.
:param use_translate: Make true if needing to use translate for conversion instead of warp.
:param config_options: A list of gdal configuration options as a tuple (option, value).
:return: The output file.
"""
if not driver:
raise Exception("Cannot use convert_raster without specififying a gdal driver.")
if isinstance(input_files, str) and not use_translate:
input_files = [input_files]
elif isinstance(input_files, list) and use_translate:
# If a single file is provided in an array, we can simply pull it out
if len(input_files) == 1:
input_files = input_files[0]
else:
raise Exception("Cannot use_translate with a list of files.")
gdal.UseExceptions()
subtask_percentage = 50 if driver.lower() == "gtiff" else 100
options = clean_options(
{
"callback": progress_callback,
"callback_data": {"task_uid": task_uid, "subtask_percentage": subtask_percentage},
"creationOptions": creation_options,
"format": driver,
}
)
if not warp_params:
warp_params = clean_options(
{"outputType": band_type, "dstAlpha": dst_alpha, "srcSRS": src_srs, "dstSRS": dst_srs}
)
if not translate_params:
translate_params = dict()
if boundary:
# Conversion fails if trying to cut down very small files (i.e. 0x1 pixel error).
dims = list(map(sum, zip(*[get_meta(input_file)["dim"] for input_file in input_files]))) or [0, 0, 0]
if dims[0] > 100 and dims[1] > 100:
warp_params.update({"cutlineDSName": boundary, "cropToCutline": True})
# Keep the name imagery which is used when seeding the geopackages.
# Needed because arcpy can't change table names.
if driver.lower() == "gpkg":
options["creationOptions"] = options.get("creationOptions", []) + ["RASTER_TABLE=imagery"]
if use_translate:
logger.info(
f"calling gdal.Translate('{output_file}', {input_files}'),"
f"{stringify_params(options)}, {stringify_params(warp_params)},)"
)
options.update(translate_params)
gdal.Translate(output_file, input_files, **options)
else:
logger.info(
f"calling gdal.Warp('{output_file}', [{', '.join(input_files)}],"
f"{stringify_params(options)}, {stringify_params(warp_params)},)"
)
gdal.Warp(output_file, input_files, **options, **warp_params)
if driver.lower() == "gtiff" or translate_params:
# No need to compress in memory objects as they will be removed later.
if "vsimem" in output_file:
return output_file
input_file, output_file = get_dataset_names(output_file, output_file)
if translate_params:
options.update(translate_params)
else:
options.update({"creationOptions": ["COMPRESS=LZW", "TILED=YES", "BIGTIFF=YES"]})
logger.info(f"calling gdal.Translate('{output_file}', '{input_file}', " f"{stringify_params(options)},)")
gdal.Translate(output_file, input_file, **options)
return output_file
def convert_vector(
input_file,
output_file,
driver=None,
access_mode="overwrite",
src_srs=None,
dst_srs=None,
task_uid=None,
layers=None,
layer_name=None,
boundary=None,
bbox=None,
dataset_creation_options=None,
layer_creation_options=None,
config_options: List[Tuple[str]] = None,
distinct_field=None,
):
"""
:param input_files: A file or list of files to convert.
:param output_file: The file to convert.
:param driver: The file format to convert.
:param creation_options: Special GDAL options for conversion.
Search for "gdal driver <format> creation options" creation options for driver specific implementation.
:param access_mode: The access mode for the file (e.g. "append" or "overwrite")
:param bbox: A bounding box as a list (w,s,e,n) to be used for limiting the AOI that is used during conversion.
:param boundary: The boundary to be used for clipping.
This must be a file (i.e. a path as a string) and cannot be used with bbox.
:param src_srs: The srs of the source (e.g. "EPSG:4326")
:param dst_srs: The srs of the destination (e.g. "EPSG:3857")
:param task_uid: The eventkit task uid used for tracking the work.
:param layers: A list of layers to include for translation.
:param layer_name: Table name in database for in_dataset
:param config_options: A list of gdal configuration options as a tuple (option, value).
:param distinct_field: A field for selecting distinct features to prevent duplicates.
:return: The output file.
"""
if isinstance(input_file, str) and access_mode == "append":
input_file = [input_file]
elif isinstance(input_file, list) and access_mode == "overwrite":
# If a single file is provided in an array, we can simply pull it out
if len(input_file) == 1:
input_file = input_file[0]
else:
raise Exception("Cannot overwrite with a list of files.")
gdal.UseExceptions()
options = clean_options(
{
"callback": progress_callback,
"callback_data": {"task_uid": task_uid},
"datasetCreationOptions": dataset_creation_options,
"layerCreationOptions": layer_creation_options,
"format": driver,
"layers": layers,
"layerName": layer_name,
"srcSRS": src_srs,
"dstSRS": dst_srs,
"accessMode": access_mode,
"reproject": src_srs != dst_srs,
"skipFailures": True,
"spatFilter": bbox,
"options": ["-clipSrc", boundary] if boundary and not bbox else None,
}
)
if "gpkg" in driver.lower():
options["geometryType"] = ["PROMOTE_TO_MULTI"]
if config_options:
for config_option in config_options:
gdal.SetConfigOption(*config_option)
if access_mode == "append":
for _input_file in input_file:
logger.info(f"calling gdal.VectorTranslate('{output_file}', '{_input_file}', {stringify_params(options)})")
gdal.VectorTranslate(output_file, _input_file, **options)
else:
logger.info(f"calling gdal.VectorTranslate('{output_file}', '{input_file}', {stringify_params(options)})")
gdal.VectorTranslate(output_file, input_file, **options)
if distinct_field:
logger.error(f"Normalizing features based on field: {distinct_field}")
table_name = layer_name or os.path.splitext(os.path.basename(output_file))[0]
options["SQLStatement"] = f"SELECT * from '{table_name}' GROUP BY '{distinct_field}'"
options["SQLDialect"] = "sqlite"
logger.error(f"calling gdal.VectorTranslate('{output_file}', '{output_file}', {stringify_params(options)})")
gdal.VectorTranslate(output_file, rename_duplicate(output_file), **options)
return output_file
def polygonize(input_file: str, output_file: str, output_type: str = "GeoJSON", band: int = None):
"""
Polygonization groups similar pixel values into bins and draws a boundary around them.
This is often used as a way to display raster information in a vector format. That can still be done here,
but if a band isn't provided the function will try to guess at the mask band and will use that as both the
converted layer and the mask. The result should be a polygon of anywhere there are not black or not transparent
pixels.
:param input_file: The raster file to use to polygonize.
:param output_file: The vector output file for the new data.
:param output_type: The file type for output data (should be a vector type).
:param band: The band to use for polygonization.
:return:
"""
src_ds = gdal.Open(input_file)
if src_ds is None:
logger.error("Unable to open source.")
raise Exception("Failed to open the file.")
try:
band_index = band
if not band_index:
if src_ds.RasterCount == 4:
band_index = 4
elif src_ds.RasterCount == 3:
# Likely RGB (jpg) add a transparency mask and use that.
# Clean up pixel values of 1 0 0 or 0 0 1 caused by interleaving.
nb_file = "/vsimem/nb"
gdal.Nearblack(nb_file, input_file)
# Convert to geotiff so that we can remove black pixels and use alpha mask for the polygon.
tmp_file = "/vsimem/tmp.tif"
convert_raster(nb_file, tmp_file, driver="gtiff", warp_params={"dstAlpha": True, "srcNodata": "0 0 0"})
del nb_file
src_ds = gdal.Open(tmp_file)
band_index = 4
elif src_ds.RasterCount == 2:
band_index = 2
else:
band_index = 1
mask_band = src_ds.GetRasterBand(band_index)
except RuntimeError as e:
logger.error(e)
raise Exception("Unable to get raster band.")
drv = ogr.GetDriverByName(output_type)
dst_ds = drv.CreateDataSource(output_file)
dst_layer = dst_ds.CreateLayer(output_file)
# Use the mask band for both the polygonization and as a mask.
gdal.Polygonize(mask_band, mask_band, dst_layer, -1, [])
# Close files to read later.
del dst_ds
del src_ds
return output_file
def stringify_params(params):
return ", ".join([f"{k}='{v}'" for k, v in params.items()])
def get_dimensions(bbox: List[float], scale: int) -> (int, int):
"""
:param bbox: A list [w, s, e, n].
:param scale: A scale in meters per pixel.
:return: A list [width, height] representing pixels
"""
# Request at least one pixel
width = get_distance([bbox[0], bbox[1]], [bbox[2], bbox[1]])
height = get_distance([bbox[0], bbox[1]], [bbox[0], bbox[3]])
scaled_width = int(width / scale) or 1
scaled_height = int(height / scale) or 1
return scaled_width, scaled_height
def get_line(coordinates):
"""
:param coordinates: A list representing a single coordinate in decimal degrees.
Example: [[W/E, N/S], [W/E, N/S]]
:return: AN OGR geometry point.
"""
# This line will implicitly be in EPSG:4326 because that is what the geojson standard specifies.
geojson = json.dumps({"type": "LineString", "coordinates": coordinates})
return ogr.CreateGeometryFromJson(geojson)
def get_distance(point_a, point_b):
"""
Takes two points, and converts them to a line, converts the geometry to mercator and returns length in meters.
The geometry is converted to mercator because length is based on the SRS unit of measure (meters for mercator).
:param point_a: A list representing a single point [W/E, N/S].
:param point_b: A list representing a single point [W/E, N/S].
:return: Distance in meters.
"""
line = get_line([point_a, point_b])
reproject_geometry(line, 4326, 3857)
return line.Length()
def get_scale_in_meters(pixel_size: Tuple[float, float]) -> float:
"""
Takes pixel size and returns a single scale value in meters.
:param pixel_size: A tuple of two floats representing the x/y pixel values.
:return: Distance in meters of pixel size averaged.
>>> get_scale_in_meters((0.00028, 0.00028))
31
>>> get_scale_in_meters((0.000833, 0.000833))
93
>>> get_scale_in_meters((0.00833, 0.00833))
927
"""
pixel = list(map(get_distance, repeat([0, 0]), list(zip(repeat(0), pixel_size))))
return round(mean(pixel))
def reproject_geometry(geometry, from_srs, to_srs):
"""
:param geometry: Converts an ogr geometry from one spatial reference system to another
:param from_srs:
:param to_srs:
:return:
"""
return geometry.Transform(get_transform(from_srs, to_srs))
def get_transform(from_srs, to_srs):
"""
:param from_srs: A spatial reference (EPSG) represented as an int (i.e. EPSG:4326 = 4326)
:param to_srs: A spatial reference (EPSG) represented as an int (i.e. EPSG:4326 = 4326)
:return: An osr coordinate transformation object.
"""
osr_axis_mapping_strategy = osr.OAMS_TRADITIONAL_GIS_ORDER
source = osr.SpatialReference()
source.ImportFromEPSG(from_srs)
source.SetAxisMappingStrategy(osr_axis_mapping_strategy)
target = osr.SpatialReference()
target.ImportFromEPSG(to_srs)
target.SetAxisMappingStrategy(osr_axis_mapping_strategy)
return osr.CoordinateTransformation(source, target)
def merge_geotiffs(in_files, out_file, task_uid=None):
"""
:param in_files: A list of geotiffs.
:param out_file: A location for the result of the merge.
:param task_uid: A task uid to track the conversion.
:return: The out_file path.
"""
cmd = get_task_command(convert_raster, in_files, out_file, task_uid=task_uid, driver="gtiff")
try:
task_process = TaskProcess(task_uid=task_uid)
task_process.start_process(cmd)
except Exception as e:
logger.error(e)
raise Exception("GeoTIFF merge process failed.")
return out_file
def merge_geojson(in_files, out_file):
"""
:param in_files: A list of geojson files.
:param out_file: A location for the result of the merge.
:param task_uid: A task uid to track the conversion.
:return: The out_file path.
"""
try:
out_driver = ogr.GetDriverByName("GeoJSON")
out_ds = out_driver.CreateDataSource(out_file)
out_layer = out_ds.CreateLayer(out_file)
for file in in_files:
ds = ogr.Open(file)
lyr = ds.GetLayer()
for feat in lyr:
out_feat = ogr.Feature(out_layer.GetLayerDefn())
out_feat.SetGeometry(feat.GetGeometryRef().Clone())
out_layer.CreateFeature(out_feat)
out_feat = None # NOQA
out_layer.SyncToDisk()
out_ds = None # NOQA
except Exception as e:
logger.error(e)
raise Exception("File merge process failed.")
return out_file
def get_band_statistics(file_path, band=1):
"""
Returns the band statistics for a specific raster file and band
:param file_path: The path to the file.
:param band: A specific raster band (defaults to 1).
:return: A list [min, max, mean, std_dev]
"""
image_file = None
try:
gdal.UseExceptions()
image_file = gdal.Open(file_path)
raster_band = image_file.GetRasterBand(band)
return raster_band.GetStatistics(False, True)
except Exception as e:
logger.error(e)
logger.error("Could not get statistics for {0}:{1}".format(file_path, raster_band))
return None
finally:
# Need to close the dataset.
cleanup_dataset(image_file) # NOQA
def rename_duplicate(original_file: str) -> str:
# Some files we may not want to rename or overwrite. For example if PBF is used for source data, we don't want to
# create duplicates of it and the gdal driver doesn't support writing PBF anyway, so this is likely a mistake.
protected_files = [".pbf"]
if os.path.splitext(original_file)[1] in protected_files:
raise Exception(f"The {original_file} cannot be renamed it is protected and/or not writable by this module.")
returned_file = os.path.join(os.path.dirname(original_file), "old_{0}".format(os.path.basename(original_file)))
# if the original and renamed files both exist, we can remove the renamed version, and then rename the file.
if os.path.isfile(returned_file) and os.path.isfile(original_file):
os.remove(returned_file)
# If the original file doesn't exist but the renamed version does, then something failed after a rename, and
# this is now retrying the operation.
if not os.path.isfile(returned_file):
logger.info("Renaming '{}' to '{}'".format(original_file, returned_file))
os.rename(original_file, returned_file)
return returned_file
def strip_prefixes(dataset: str) -> (str, str):
prefixes = ["GTIFF_RAW:"]
removed_prefix = ""
output_dataset = dataset
for prefix in prefixes:
cleaned_dataset = output_dataset.lstrip(prefix)
if cleaned_dataset != output_dataset:
removed_prefix = prefix
output_dataset = cleaned_dataset
return removed_prefix, output_dataset
def get_chunked_bbox(bbox, size: tuple = None, level: int = None):
"""
Chunks a bbox into a grid of sub-bboxes.
:param bbox: bbox in 4326, representing the area of the world to be chunked
:param size: optional image size to use when calculating the resolution.
:param level: The level to use for the affected level.
:return: enclosing bbox of the area, dimensions of the grid, bboxes of all tiles.
"""
from eventkit_cloud.utils.image_snapshot import get_resolution_for_extent
# Calculate the starting res for our custom grid
# This is the same method we used when taking snap shots for data packs
resolution = get_resolution_for_extent(bbox, size)
# Make a subgrid of 4326 that spans the extent of the provided bbox
# min res specifies the starting zoom level
mapproxy_grid = tile_grid(srs=4326, bbox=bbox, bbox_srs=4326, origin="ul", min_res=resolution)
# bbox is the bounding box of all tiles affected at the given level, unused here
# size is the x, y dimensions of the grid
# tiles at level is a generator that returns the tiles in order
tiles_at_level = mapproxy_grid.get_affected_level_tiles(bbox, 0)[2]
# convert the tiles to bboxes representing the tiles on the map
return [mapproxy_grid.tile_bbox(_tile) for _tile in tiles_at_level]
class _ArcGISSpatialReference(TypedDict):
wkid: int
class ArcGISSpatialReference(_ArcGISSpatialReference, total=False):
latestWkid: int
class ArcGISExtent(TypedDict):
xmin: float
ymin: float
xmax: float
ymax: float
spatialReference: ArcGISSpatialReference
def get_polygon_from_arcgis_extent(extent: ArcGISExtent):
spatial_reference = extent.get("spatialReference", {})
bbox = [extent.get("xmin"), extent.get("ymin"), extent.get("xmax"), extent.get("ymax")]
try:
polygon = Polygon.from_bbox(bbox)
polygon.srid = spatial_reference.get("latestWkid") or spatial_reference.get("wkid") or 4326
polygon.transform(4326)
return polygon
except Exception:
return Polygon.from_bbox([-180, -90, 180, 90])
def is_valid_bbox(bbox):
if not isinstance(bbox, list) or len(bbox) != 4:
return False
if bbox[0] < bbox[2] and bbox[1] < bbox[3]:
return True
else:
return False
def expand_bbox(original_bbox, new_bbox):
"""
Takes two bboxes and returns a new bbox containing the original two.
:param original_bbox: A list representing [west, south, east, north]
:param new_bbox: A list representing [west, south, east, north]
:return: A list containing the two original lists.
"""
if not original_bbox:
original_bbox = list(new_bbox)
return original_bbox
original_bbox[0] = min(new_bbox[0], original_bbox[0])
original_bbox[1] = min(new_bbox[1], original_bbox[1])
original_bbox[2] = max(new_bbox[2], original_bbox[2])
original_bbox[3] = max(new_bbox[3], original_bbox[3])
return original_bbox
def bbox2polygon(bbox):
try:
(w, s, e, n) = bbox
except KeyError:
return
coordinates = [[[w, s], [e, s], [e, n], [w, n], [w, s]]]
return {"type": "Polygon", "coordinates": coordinates}
|
import bin.generateGraph as graph
import json
def test_graph():
output = open("files/test.txt")
output = output.read().strip()
j = json.load(open("output/aaa/aaa.json"))
artefacts = j['analysis']['data']['art']
if isinstance(artefacts, dict):
artefacts = [artefacts]
g = graph.generateGraph(artefacts)
g = str(g).strip()
assert g == output
|
u = 0.5*sin(2*pi*x)+1
|
from .get_pathname_data import get_directory |
# _______________________________________________________________________
# | File Name: views.py |
# | |
# | This file is for handling the views of support ticket display |
# |_____________________________________________________________________|
# | Start Date: July 7th, 2016 |
# | |
# | Package: Openstack Horizon Dashboard [liberty] |
# | |
# | Copyright: 2016@nephoscale |
# |_____________________________________________________________________|
#from openstack_dashboard.dashboards.zendesksupport.api import zenpy_patch as zenpy
from openstack_dashboard.dashboards.zendesksupport.tickets import tabs as ticket_tabs
from django.utils.translation import ugettext_lazy as _
from openstack_dashboard.dashboards.zendesksupport.tickets import tables
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponseRedirect
from django.shortcuts import render
from horizon import forms
from horizon import views
from horizon import tabs
from horizon import exceptions
from horizon.utils import memoized
from openstack_dashboard.dashboards.zendesksupport.tickets import forms as ticket_form
from openstack_dashboard.dashboards.zendesksupport import api
from django.conf import settings
import os
from django.core.files.base import ContentFile
class IndexView(tabs.TabbedTableView):
"""
# | IndexView for showing ticket list
# |
# | Code is in tabs.py
"""
tab_group_class = ticket_tabs.MyTicketsTab
template_name = "zendesk_support_dashboard/tickets/index.html"
page_title = "My Tickets"
class CreateTicketView(forms.ModalFormView):
"""
# | Class to display create form function
"""
form_class = ticket_form.CreateTicketForm
template_name = 'zendesk_support_dashboard/tickets/create_ticket.html'
success_url = reverse_lazy("horizon:zendesk_support_dashboard:tickets:index")
modal_id = "create_ticket_modal"
modal_header = _("Create Ticket")
submit_label = _("Create Ticket")
submit_url = "horizon:zendesk_support_dashboard:tickets:create_ticket"
def get_context_data(self, **kwargs):
"""
# | Function to create the create ticket form
# |
# | Arguments: Kwargs
# |
# | Returns: TEMPLATE CONTEXT
"""
context = super(CreateTicketView, self).get_context_data(**kwargs)
context['submit_url'] = reverse_lazy(self.submit_url, args=[])
return context
def get_ticket_detail(request, **kwargs):
"""
# | Function for handling ticket detail page
# |
# | @Arguments:
# | <request>: HTTP Request object
# | <kwargs>: Request dictionary containing different dictionary
# |
# | @Returns:
# | HTTPResponse Object
"""
ticket_id = kwargs['ticket_id']
try:
zenpy_obj = api.Zendesk(request)
#Initializing
role_check_flag = False
#Initializing
files = []
#Setting the location where the uploads are to be stored temporarily
folder = 'zendesk_user_uploads'
BASE_PATH = '/tmp/'
attachment_list = []
if request.method == "POST":
# If the request method is POST
# then we need to call for addCommentForm
form = ticket_form.AddCommentForm(request.POST)
if form.is_valid():
comment = request.POST.get('comment')
#Handle uploaded files if present
if request.FILES:
#files = handle_uploaded_file(request)
#Enter the loop only if attachments are present
if request.method == 'POST' and request.FILES['attachment']:
try:
attachment_list = request.FILES.getlist('attachment')
for file in attachment_list:
uploaded_filename = file.name
if file.size > 1000000:
raise Exception('%s is too large. Only attachments less than 1MB are accepted.' % uploaded_filename)
full_filename = os.path.join(BASE_PATH, folder, uploaded_filename)
fout = open(full_filename, 'wb+')
file_content = ContentFile(file.read())
#Iterate through the chunks.
for chunk in file_content.chunks():
fout.write(chunk)
fout.close()
files.append(str(full_filename))
except Exception as err:
error_message = _(str(err))
exceptions.handle(request, error_message)
else:
files = []
privacy = request.POST.get('comment_privacy')
zenpy_obj.create_comment(ticket_id, comment, privacy, files)
return HttpResponseRedirect(reverse_lazy('horizon:zendesk_support_dashboard:tickets:ticket_detail', args=[ticket_id]))
else:
exceptions.handle(request, form.errors)
#Check if role_check has been set in the session
if request.session.get('role_check', False):
role_check_flag = request.session['role_check']
ticket_detail = zenpy_obj.get_ticket_detail(ticket_id)
ticket_comments = zenpy_obj.get_ticket_comments(ticket_id)
context = {
"page_title": ticket_detail.subject,
"ticket": ticket_detail,
"comments": ticket_comments,
"role_check_flag" : role_check_flag
}
return render(request, 'zendesk_support_dashboard/tickets/ticket_detail.html', context)
except api.ZendeskError as err:
if err.code == 403:
context = {
"page_title": "403:Forbidden",
"error": "You are not authorized to view this ticket."
}
elif err.code == 404:
context = {
"page_title": "404: Not Found",
"error": "This ticket does not exist."
}
else:
context = {
"page_title": "500:Unknown Error Occured",
"error": "Unknown error occurred. Unable to fetch ticket detail.."
}
return render(request, 'zendesk_support_dashboard/tickets/ticket_detail.html', context)
def handle_uploaded_file(request):
#Initializing
files = []
folder = 'zendesk_user_uploads'
BASE_PATH = '/tmp/'
attachment_list = []
#Enter the loop only if attachments are present
if request.method == 'POST' and request.FILES['attachment']:
attachment_list = request.FILES.getlist('attachment')
for file in attachment_list:
uploaded_filename = file.name
if file.size > 1000:
raise Exception('%s is too large. Please attach files that are less than 1MB.' % uploaded_filename)
#break
full_filename = os.path.join(BASE_PATH, folder, uploaded_filename)
fout = open(full_filename, 'wb+')
file_content = ContentFile(file.read())
#Iterate through the chunks.
for chunk in file_content.chunks():
fout.write(chunk)
fout.close()
files.append(str(full_filename))
return files
|
from .abc import AbstractIdentityPolicy, AbstractAuthorizationPolicy
from .api import remember, forget, setup, authorized_userid, permits
from .cookies_identity import CookiesIdentityPolicy
from .session_identity import SessionIdentityPolicy
__version__ = '0.1.1'
__all__ = ('AbstractIdentityPolicy', 'AbstractAuthorizationPolicy',
'CookiesIdentityPolicy', 'SessionIdentityPolicy',
'remember', 'forget', 'authorized_userid',
'permits', 'setup')
|
COMMENT_HEADER = "# ******************************************************************************"
MODULE_HEADER = "# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #"
RULE_HEADER = "rule "
POOL_HEADER = "pool " |
# -*- coding: utf-8 -*-
"""
core runtime tests
~~~~~~~~~~~~~~~~~~
tests canteen's runtime core, which is responsible for pulling
the app together with glue and spit.
:author: Sam Gammon <[email protected]>
:copyright: (c) Sam Gammon, 2014
:license: This software makes use of the MIT Open Source License.
A copy of this license is included as ``LICENSE.md`` in
the root of the project.
"""
# stdlib
import operator
# testing
from canteen import test
# runtime core
from canteen.core import runtime
class BaseRuntimeTest(test.FrameworkTest):
""" Tests :py:mod:`canteen`'s :py:mod:`core.runtime` package,
which handles pretty much everything from bind to dispatch. """
def test_exports(self):
""" Test basic attributes that should always be present on `runtime` """
assert hasattr(runtime, 'Runtime')
assert hasattr(runtime, 'Library')
class RuntimeLibraryTest(test.FrameworkTest):
""" Tests :py:class:`canteen.core.runtime.Library`, which is
used internally by :py:mod:`canteen` for deferred library
binding. """
def test_good_library_string_lax(self):
""" Test that `Library` can load a known-good module (by path) """
with runtime.Library('operator') as (library, _operator):
assert hasattr(_operator, 'eq')
assert operator.eq is _operator.eq
def test_good_library_string_strict(self):
""" Test that `Library` can load a known-good module in `strict` mode """
with runtime.Library('operator', strict=True) as (library, _operator):
assert hasattr(_operator, 'eq')
assert operator.eq is _operator.eq
def test_bad_library_string_lax(self):
""" Test that `Library` can ignore a known-bad module (by path) """
_lib = runtime.Library('i_do_not_exist_at_all_okay_never')
_lib.__enter__()
def test_bad_library_string_strict(self):
""" Test that `Library` can ignore a known-bad module in `strict` mode """
with self.assertRaises(ImportError):
_lib = runtime.Library('i_do_not_exist_at_all_okay_never', strict=True)
_lib.__enter__()
def test_good_library_module_lax(self):
""" Test that `Library` can load a module (by mod) """
with runtime.Library(operator) as (library, _operator):
assert hasattr(_operator, 'eq')
def test_good_library_module_strict(self):
""" Test that `Library` can load a module (by mod) in `strict` mode """
with runtime.Library(operator, strict=True) as (library, _operator):
assert hasattr(_operator, 'eq')
|
from src import macadress
from src import ping
import sys
modeChoice = int(
input("\nChoose:\n1 - full (1-254) or\n2 - partial mode (custom range)\n\n|> "))
safeChoice = input(
"Activate safe-mode or flood network?\n\nyes - safe-mode\nno - no timeout between pings\n\n|> ")
if(safeChoice.lower() == 'yes'):
safe = True
elif(safeChoice.lower() == 'no'):
safe = False
else:
print("\nInvalid input, defaulting to safe-mode ...\n")
safe = True
if (modeChoice == 1):
results = ping.get_confirmed_adresses(1, 254, safe)
elif (modeChoice == 2):
start = int(input("\nEnter the first adress to scan: "))
end = int(input("Enter the last adress: "))
results = ping.get_confirmed_adresses(start, end, safe)
else:
sys.exit(
"\nInvalid choice!\nPlease choose either 1 (full) or 2 (partial mode).\n")
for ip in results:
hostname = macadress.get_hostname(ip)
mac = macadress.get_mac_from_ip(ip)
print("\n{}\nHostname: '{}'\nMAC-adress: {}"
.format(ip, hostname[0], mac))
|
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2016 by Mike Taylor
:license: CC0 1.0 Universal, see LICENSE for more details.
"""
import json
from flask import current_app
class IndieWebNews():
def __init__(self, keyBase):
self.keyRoot = '%sindienews-' % keyBase
# execute the query
def query(self, sql, args=()):
cur = current_app.dbStore.cursor()
cur.execute(sql, args)
rows = [dict(r) for r in cur.fetchall()]
return rows
# execute the query
# return either a single dictionary item or a list of rows
def run(self, sql, params=None):
cur = current_app.dbStore.cursor()
if params is None:
cur.execute(sql)
else:
cur.execute(sql, params)
current_app.dbStore.commit()
def domain(self, domain):
d = { 'domain': domain,
'created': None,
'updated': None,
'postCount': 0,
'posts': [],
}
r = self.query('select * from domains where domain = "{domain}"'.format(domain=domain))
if len(r) > 0:
d['created'] = r[0]['created']
d['updated'] = r[0]['updated']
posts = []
l = self.query('select postid from posts where domain = "{domain}"'.format(domain=domain))
for item in l:
postid = item['postid']
current_app.logger.info('post {postid}'.format(postid=postid))
p = self.query('select * from posts where postid = "{postid}"'.format(postid=postid))
d = {}
if len(p) > 0:
for key in p[0].keys():
if key in ('comment', 'parsed'):
d[key] = json.loads(p[0][key])
else:
d[key] = p[0][key]
print d
posts.append(d)
d['posts'] = posts
d['postCount'] = len(posts)
return d
def post(self, postid):
current_app.logger.info('post info [%s]' % postid)
p = self.query('select * from posts where postid = "{postid}"'.format(postid=postid))
d = {}
if len(p) > 0:
for key in p[0].keys():
if key in ('comment', 'parsed'):
d[key] = json.loads(p[0][key])
else:
d[key] = p[0][key]
return d
def domains(self):
p = self.query('select * from domains')
return p
def current(self):
posts = []
l = current_app.dbRedis.lrange('indienews-recent', 0, -1)
current_app.logger.info('pulling current items %d' % len(l))
for postid in l:
current_app.logger.info('post {postid}'.format(postid=postid))
p = self.query('select * from posts where postid = "{postid}"'.format(postid=postid))
d = {}
if len(p) > 0:
for key in p[0].keys():
if key in ('comment', 'parsed'):
d[key] = json.loads(p[0][key])
else:
d[key] = p[0][key]
print d
posts.append(d)
current_app.logger.info('%d items found' % len(posts))
return posts
|
class Solution(object):
def combinationSum2(self, candidates, target):
result = []
self.combinationSumRecu(sorted(candidates), result, 0, [], target)
return result
def combinationSumRecu(self, candidates, result, start, intermediate, target):
if target == 0:
result.append(list(intermediate))
prev = 0
while start < len(candidates) and candidates[start] <= target:
if prev != candidates[start]:
intermediate.append(candidates[start])
self.combinationSumRecu(candidates, result, start + 1, intermediate, target - candidates[start])
intermediate.pop()
prev = candidates[start]
start += 1
candidates = [10,1,2,7,6,1,5]
target = 8
res = Solution().combinationSum2(candidates, target)
print(res) |
import pytest
from .config import PORT_DEFAULT
import time
from pynextion import PySerialNex
from pynextion.widgets import NexPage, NexQRcode, NexText
@pytest.mark.parametrize("port", [PORT_DEFAULT])
def test_qrcode(port):
nexSerial = PySerialNex(port)
nexSerial.init()
nexPage = NexPage(nexSerial, "pg_qr", pid=9)
nexQRcode = NexQRcode(nexSerial, "qr0", cid=1)
nexText = NexText(nexSerial, "t1", cid=3)
nexPage.show()
time.sleep(1)
text = "Hello"
nexQRcode.text = text
nexText.text = text
time.sleep(2)
# text = "https://github.com/py3-nextion/pynextion"
text = "http://bit.ly/2QRZsuV"
nexText.text = text
# nexQRcode.textmaxlength = len(text)
# nexQRcode.textmaxlength = 50
# time.sleep(1)
nexQRcode.text = text
assert nexText.text == text
assert nexQRcode.text == text
nexSerial.close()
|
from datetime import timedelta
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
from forms import SearchForm
class IndexView(BaseView):
def initial_context(self, request):
return {
'search_form': getattr(self.conf, 'form', SearchForm(request.GET or None))
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb(
'search', None, 'Search', lazy_reverse('index')
)
def handle_GET(self, request, context):
if context['search_form'].is_valid():
return self.handle_search(request, context)
return self.render(request, context, 'search/index',
expires=timedelta(minutes=30))
def handle_search(self, request, context):
application = context['search_form'].cleaned_data['application'] or None
query = context['search_form'].cleaned_data['query']
results = []
for provider in self.conf.providers:
results += provider.perform_search(request, query, application)
seen_urls, i = set(), 0
while i < len(results):
url = results[i]['url']
if url in seen_urls:
results[i:i+1] = []
else:
seen_urls.add(url)
i += 1
# Remove results deemed irrelevant
results = [r for r in results if not r.get('exclude_from_search')]
if len(results) == 1 and results[0].get('redirect_if_sole_result'):
return self.redirect(results[0]['url'], request)
context.update({
'results': list(results)[:20],
})
return self.render(request, context, 'search/index')
|
from django.shortcuts import render, redirect, render_to_response, HttpResponseRedirect
from django.http import HttpResponse, Http404,HttpResponseRedirect
from django.contrib.auth.forms import UserCreationForm
from .models import Projects, Profile
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from rest_framework.response import Response
from rest_framework.views import APIView
import datetime as dt
from .serializers import ProfileSerializer, ProjectSerializer
from django.http import JsonResponse
from .forms import SignUpForm, NewProjectForm, ProfileUpdateForm
from rest_framework import status
# Create your views here.
def page(request):
projects = Projects.get_projects()
return render(request, 'page.html', {"projects":projects})
def registerPage(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
return redirect('page.html')
else:
form = SignUpForm()
return render(request, 'registration/registration_form.html', {'form': form})
def page(request):
return render(request, 'page.html')
@login_required(login_url='/accounts/login/')
def new_project(request):
current_user = request.user
if request.method == 'POST':
form = NewProjectForm(request.POST, request.FILES)
if form.is_valid():
project = form.save(commit=False)
project.Owner = current_user
project.save()
return redirect('page')
else:
form = NewProjectForm()
return render(request, 'online/new-project.html', {"form": form})
class ProjectList(APIView):
def get(self, request, format=None):
all_project = Projects.objects.all()
serializers = ProjectSerializer(all_project, many=True)
return Response(serializers.data, status=status.HTTP_201_CREATED)
class ProfileList(APIView):
def get(self, request, format=None):
all_profile = Profile.objects.all()
serializers = ProfileSerializer(all_profile, many=True)
return Response(serializers.data, status=status.HTTP_201_CREATED)
#class ProjectDescription(APIView):
# permission_classes = (IsAdminOrReadOnly,)
# def get_merch(self, pk):
# try:
# return ProjectMerch.objects.get(pk=pk)
# except ProjectMerch.DoesNotExist:
# return Http404
# def get(self, request, pk, format=None):
# merch = self.get_merch(pk)
# serializers = ProjectSerializer(merch)
# return Response(serializers.data)
@login_required(login_url='/accounts/login/')
def user_profiles(request):
current_user = request.user
Owner = current_user
projects = Projects.get_by_owner(Owner)
if request.method == 'POST':
form = ProfileUpdateForm(request.POST, request.FILES, instance=request.user.profile)
if form.is_valid():
profile = form.save(commit=False)
profile.save()
return redirect('profile')
else:
form = ProfileUpdateForm()
return render(request, 'profiles/profile.html', {"form":form, "projects":projects})
@login_required(login_url='/accounts/login/')
def search_results(request):
if 'project' in request.GET and request.GET["project"]:
search_term = request.GET.get("project")
searched_projects = Project.search_by_title(search_term)
message = f"{search_term}"
return render(request, 'online/search.html',{"message":message,"projects": searched_projects})
else:
message = ",Have an amazing website you want to showcase ,post it here"
return render(request, 'online/search.html',{"message":message})
def get_project(request, id):
try:
project = Projects.objects.get(pk = id)
except ObjectDoesNotExist:
raise Http404()
return render(request, "online/projects.html", {"project":project})
|
import math
from collections import deque
from typing import List
import collections
from ListNode import ListNode
# URL:
# https://leetcode.com/problems/swap-nodes-in-pairs/
# Question:
# Given a linked list, swap every two adjacent nodes and return its head.
# You may not modify the values in the list's nodes. Only nodes itself may be changed.
class SwapNodesInPair:
# Runtime: 24 ms
# Memory: 14.1 MB
@classmethod
def my_solution(cls, head: ListNode) -> ListNode:
if head and head.next:
temp = head.next
head.next = cls.my_solution(temp.next)
temp.next = head
return temp
return head
# Runtime: 24 ms
# Memory: 14.1 MB
@classmethod
def my_solution2(cls, head: ListNode) -> ListNode:
root = prev = ListNode(None)
prev.next = head
while head and head.next:
temp = head.next
head.next = temp.next
temp.next = head
prev.next = temp
head = head.next
prev = prev.next.next
return root.next
def main():
list2 = ListNode.list2linkedlist([1, 2, 3, 4])
answer = ListNode.list2linkedlist([2, 1, 4, 3])
res = SwapNodesInPair.my_solution2(list2)
print(ListNode.linkedlist2list(res))
if __name__ == "__main__":
main() |
import json
import os
import shutil
import tempfile
from datetime import timedelta
from unittest import mock
from unittest import TestCase
from pytuber.storage import Registry
class RegistryTests(TestCase):
def tearDown(self):
Registry.clear()
Registry._obj = {}
def test_singleton(self):
a = Registry()
b = Registry()
self.assertIs(a, b)
a[1] = 2
self.assertEqual({1: 2}, b)
def test_set(self):
Registry.set(1, 2, 3, 4, 5)
self.assertEqual({1: {2: {3: {4: 5}}}}, Registry())
Registry.set(1, 3, 5)
self.assertEqual({1: {2: {3: {4: 5}}, 3: 5}}, Registry())
def test_get(self):
Registry.set(1, 2, 3, 4, 5)
self.assertEqual({4: 5}, Registry.get(1, 2, 3))
with self.assertRaises(KeyError):
Registry.get(2)
def test_clear(self):
Registry.set(1, 2, 3, 4, 5)
self.assertEqual({4: 5}, Registry.get(1, 2, 3))
Registry.clear()
self.assertEqual({}, Registry())
def test_from_file(self):
try:
tmp = tempfile.mkdtemp()
file_path = os.path.join(tmp, "foo.json")
with open(file_path, "w") as fp:
json.dump({"a": True}, fp)
Registry.from_file(file_path)
self.assertEqual({"a": True}, Registry())
Registry.set("a", False)
self.assertFalse(Registry.get("a"))
Registry.from_file(file_path)
self.assertFalse(Registry.get("a"))
finally:
shutil.rmtree(tmp)
def test_persist(self):
try:
Registry.set(1, 2, 3, 4)
tmp = tempfile.mkdtemp()
file_path = os.path.join(tmp, "foo.json")
Registry.persist(file_path)
Registry.set(1, 2, 3, 5)
Registry._obj = {}
Registry.from_file(file_path)
self.assertEqual({"1": {"2": {"3": 4}}}, Registry())
finally:
shutil.rmtree(tmp)
@mock.patch("pytuber.storage.time.time")
def test_cache(self, time):
time.side_effect = [10, 20.1, 20.1, 20.5, 20.8]
def callme(ttl, value, refresh=False):
return Registry.cache(
key="foo",
ttl=timedelta(seconds=ttl),
func=lambda: value,
refresh=refresh,
)
self.assertEqual("first", callme(10, "first"))
self.assertEqual(("first", 20.0), Registry.get("foo"))
self.assertEqual("second", callme(1, "second"))
self.assertEqual(("second", 21.1), Registry.get("foo"))
self.assertEqual("second", callme(1, "third"))
self.assertEqual(("second", 21.1), Registry.get("foo"))
self.assertEqual("third", callme(100, "third", refresh=True))
self.assertEqual(("third", 120.8), Registry.get("foo"))
self.assertEqual(5, time.call_count)
|
import argparse
import os
import logging
import time
import shutil
import sys
import subprocess
from typing import cast, Iterable
from powar.module_config import ModuleConfigManager
from powar.global_config import GlobalConfigManager, GlobalConfig
from powar.settings import AppSettings, AppMode, AppLogLevel
from powar.util import realpath, UserError
LOGGING_FORMAT = "%(levelname)s: %(message)s"
logger: logging.Logger
ROOT_FLAGS = ("--root", "-r")
def parse_args_into(app_settings: AppSettings) -> argparse.ArgumentParser:
parser = argparse.ArgumentParser()
parser.add_argument(
"--dry-run",
dest="dry_run",
help="don't modify any files, just show what would be done",
action="store_true",
)
parser.add_argument(
"--template-dir",
dest="template_dir",
help="use a custom directory for templates",
)
parser.add_argument(
"--config-dir",
dest="config_dir",
help="use a custom directory for configuration",
)
parser.add_argument(
*ROOT_FLAGS,
dest="switch_to_root",
action="store_true",
help=
"run powar in sudo mode to be able to install files in places outside $HOME",
)
group = parser.add_mutually_exclusive_group()
group.add_argument(
"-q",
"--quiet",
help="supress output",
action="store_const",
dest="log_level",
const=AppLogLevel.QUIET,
)
group.add_argument(
"-v",
"--verbose",
help="be verbose",
action="store_const",
dest="log_level",
const=AppLogLevel.VERBOSE,
)
subparsers = parser.add_subparsers(
help="mode to use",
dest="mode command",
)
# Install mode
parser_install = subparsers.add_parser(
"install",
help="install specified modules (empty argument installs all modules)",
)
parser_install.set_defaults(mode=AppMode.INSTALL)
parser_install.add_argument(
"modules_to_consider",
nargs="*",
metavar="MODULE",
help="module(s) to install (empty argument installs all modules)",
)
# New module mode
parser_new = subparsers.add_parser("new", help="create a new powar module")
parser_new.set_defaults(mode=AppMode.NEW_MODULE)
parser_new.add_argument(
"new_module_name",
metavar="MODULE_NAME",
help="name of the new module to be created",
)
# Init mode
parser_init = subparsers.add_parser(
"init",
help="create the folders required for powar",
)
parser_init.set_defaults(mode=AppMode.INIT)
parser.parse_args(namespace=app_settings)
return parser
def run_init(app_settings: AppSettings) -> None:
os.makedirs(app_settings.config_dir, exist_ok=True)
global_config_path = os.path.join(
app_settings.config_dir,
app_settings.global_config_filename,
)
if not os.path.exists(global_config_path):
shutil.copy(
os.path.join(
app_settings.data_path,
app_settings.global_config_template_filename,
),
global_config_path,
)
print(f"{global_config_path} created.")
else:
logger.warn(f"{global_config_path} exists, skipping.")
try:
os.makedirs(app_settings.template_dir)
print(f"{app_settings.template_dir}/ created.")
except FileExistsError:
logger.warn(f"{app_settings.template_dir} exists, skipping.")
def run_new_module(app_settings: AppSettings) -> None:
if not os.path.exists(app_settings.template_dir):
raise UserError(f"{app_settings.template_dir} doesn't exist.")
module_dir = os.path.join(
app_settings.template_dir,
app_settings.new_module_name,
)
try:
os.makedirs(module_dir)
except FileExistsError:
raise UserError(f"{module_dir} already exists.")
module_config_path = os.path.join(
module_dir,
app_settings.module_config_filename,
)
shutil.copy(
os.path.join(
app_settings.data_path,
app_settings.module_config_template_filename,
),
module_config_path,
)
print(f"{module_config_path} created.")
def run_install(app_settings: AppSettings, module_directories: Iterable[str],
global_config: GlobalConfig) -> None:
for directory in module_directories:
manager = ModuleConfigManager(directory, global_config, app_settings)
manager.run()
def main() -> None:
app_settings = AppSettings()
parser = parse_args_into(app_settings)
# set logging level from arguments
logging.basicConfig(
level=app_settings.log_level.into_logging_level(),
format=LOGGING_FORMAT,
)
logger = logging.getLogger(__name__)
# resolve $VARIABLES and ~, ensure absolute
dirs_to_resolve = ("template_dir", "config_dir", "cache_dir")
for var in dirs_to_resolve:
app_settings[var] = realpath(app_settings[var])
if not os.path.isabs(app_settings[var]):
parser.error(f"{var} needs to be absolute")
try:
if app_settings.mode == AppMode.INIT:
return run_init(app_settings)
if app_settings.mode == AppMode.NEW_MODULE:
return run_new_module(app_settings)
# cache_man = CacheManager(app_settings.cache_dir)
global_config = GlobalConfigManager(
app_settings.config_dir,
app_settings,
).get_global_config()
directories = [
os.path.join(app_settings.template_dir, module)
for module in global_config.modules
]
# Main logic
if app_settings.mode == AppMode.INSTALL:
if not directories:
return logger.info("No files to install, exiting.")
return run_install(app_settings, directories, global_config)
except UserError as error:
for arg in error.args:
logger.error(arg)
|
import glob
import json
import os.path
import time
from json import loads
from uuid import uuid4
import boto3
from kafka import KafkaConsumer
consumer = KafkaConsumer(
bootstrap_servers="localhost:9092",
value_deserializer=lambda x: loads(x),
group_id='Pintrestdata_{}'.format(uuid4()),
auto_offset_reset="earliest",
# max_poll_records=5,
enable_auto_commit=True
)
consumer.subscribe(topics=["Pinterest_data"])
def del_local_files():
files = glob.glob('*.json')
for file in files:
os.remove(file)
def batch_consumer():
for message in consumer:
batch_message = message.value
i = 0
while os.path.exists(f'batch_data_{i}.json'):
i += 1
with open(f'batch_data_{i}.json', 'w') as file:
json.dump(batch_message, file, indent=4)
boto3.client("s3").upload_file(f'batch_data_{i}.json', 'ai-core-bucket', f'batch_data_{i}.json')
time.sleep(0.5)
exit()
del_local_files()
def get_messages(num_messages_to_consume):
messages = []
while len(messages) < num_messages_to_consume:
record = next(consumer)
line = record.value
messages.append(line)
consumer.commit()
return messages
def save_messages():
i = 0
while os.path.exists(f'batch_data_{i}.json'):
i += 1
with open(f'batch_data_{i}.json', 'w') as file:
json.dump(get_messages(10), file, indent=4)
if __name__ == '__main__':
batch_consumer()
|
import contextlib
import numpy as np
from . import base
import config
class State(base.StateWithSprings):
pass
class Robot(base.Robot):
"""
A simulated version of the robot that requires no connected hardware
"""
@classmethod
@contextlib.contextmanager
def connect(cls):
yield cls()
def __init__(self):
self._adc_reading = config.adc_0
self._servo_angle = np.zeros(3)
@property
def state(self):
return State(adc_reading=self._adc_reading, servo_angle=self._servo_angle)
@property
def servo_angle(self):
return self._servo_angle
@servo_angle.setter
def servo_angle(self, value):
self._servo_angle = value
@base.Robot.angle_error.setter
def angle_error(self, value):
""" for debugging """
self._adc_reading = value / config.rad_per_adc + config.adc_0
@property
def target_adc_reading(self): raise NotImplementedError
@target_adc_reading.setter
def target_adc_reading(self, value): raise NotImplementedError
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 19 11:11:42 2021
@author: Abdelrahman
"""
import torch
from torch import nn, optim
from torchvision import transforms
import numpy as np
import matplotlib.pyplot as plt
from torchvision import datasets, models
from Model import Net
mean = np.array([0.70756066,0.59119403,0.5465341 ])
std = np.array([0.15324508,0.16183697,0.17682996])
mean_v = np.array([0.69324577,0.56011826,0.5092703 ])
std_v = np.array([0.13990514,0.1405701,0.15759519])
#'cuda:0' if torch.cuda.is_available() else
device = torch.device('cpu')
########################################
## Loading Train and validation Data ###
########################################
train_transfrom = transforms.Compose([transforms.Resize((256,256)),
transforms.RandomHorizontalFlip(),
transforms.RandomRotation(10),
transforms.ToTensor(),
transforms.Normalize(torch.Tensor(mean), torch.Tensor(std))])
valid_transfrom = transforms.Compose([transforms.Resize((256,256)),
transforms.ToTensor(),
transforms.Normalize(torch.Tensor(mean_v), torch.Tensor(std_v))])
train_data = datasets.ImageFolder(root = "dataset\\train", transform = train_transfrom)
trainloader = torch.utils.data.DataLoader(train_data,
batch_size=32,
shuffle=True,
num_workers=0)
valid_data = datasets.ImageFolder(root = "dataset\\valid", transform = valid_transfrom)
validloader = torch.utils.data.DataLoader(valid_data,
batch_size=32,
shuffle=True,
num_workers=0)
#################################################
## Defining model, optimizer and loss function ##
#################################################
model = models.resnet50(pretrained = True)
num_ftrs = model.fc.in_features
model.fc = nn.Linear(num_ftrs, 2)
#model = Net()
model.to(device)
criterion = nn.CrossEntropyLoss()
#criterion = nn.HingeEmbeddingLoss()
optimizer = optim.Adagrad(model.parameters(),lr = 0.001)
step_lr_scheduler = optim.lr_scheduler.StepLR(optimizer, step_size = 7, gamma = 0.1)
########################################
############ Training model ############
########################################
def train_model(model, criterion, optimizer, scheduler, SK = True, n_epochs=5):
valid_loss_min = np.Inf
val_loss = []
val_acc = []
train_loss = []
train_acc = []
total_step = len(trainloader)
for epoch in range(1, n_epochs+1):
running_loss = 0.0
correct = 0
total=0
print(f'Epoch {epoch}\n')
for batch_idx, (data_, target_) in enumerate(trainloader):
if SK:
target_ = np.array([1 if i == 2 else 0 for i in target_.numpy()])
target_ = torch.tensor(target_.astype(np.longlong))
else:
target_ = np.array([1 if i == 0 else 0 for i in target_.numpy()])
target_ = torch.tensor(target_.astype(np.longlong))
data_, target_ = data_.to(device), target_.to(device)
optimizer.zero_grad()
outputs = model(data_)
loss = criterion(outputs, target_)
loss.backward()
optimizer.step()
running_loss += loss.item()
_,pred = torch.max(outputs, dim=1)
correct += torch.sum(pred==target_).item()
total += target_.size(0)
#scheduler.step()
if (batch_idx) % 20 == 0:
print ('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'
.format(epoch, n_epochs, batch_idx, total_step, loss.item()))
train_acc.append(100 * correct / total)
train_loss.append(running_loss/total_step)
print(f'\ntrain-loss: {np.mean(train_loss):.4f}, train-acc: {(100 * correct/total):.4f}')
batch_loss = 0
total_v=0
correct_v=0
with torch.no_grad():
model.eval()
for data_v, target_v in (validloader):
if SK:
target_v = np.array([1 if i == 2 else 0 for i in target_v.numpy()])
target_v = torch.tensor(target_v.astype(np.longlong))
else:
target_v = np.array([1 if i == 0 else 0 for i in target_v.numpy()])
target_v = torch.tensor(target_v.astype(np.longlong))
data_v, target_v = data_v.to(device), target_v.to(device)
outputs_v = model(data_v)
loss_v = criterion(outputs_v, target_v)
batch_loss += loss_v.item()
_,pred_v = torch.max(outputs_v, dim=1)
correct_v += torch.sum(pred_v==target_v).item()
total_v += target_v.size(0)
val_acc.append(100 * correct_v/total_v)
val_loss.append(batch_loss/len(validloader))
network_learned = batch_loss < valid_loss_min
print(f'validation loss: {np.mean(val_loss):.4f}, validation acc: {(100 * correct_v/total_v):.4f}\n')
if network_learned:
valid_loss_min = batch_loss
if SK:
torch.save(model.state_dict(), 'resnet_SK.pt')
else:
torch.save(model.state_dict(), 'resnet_MM.pt')
print('Improvement-Detected, save-model')
model.train()
return train_loss, val_loss
##################################################
## seborrheic_keratosis vs. rest classification ##
##################################################
train_loss, val_loss = train_model(model,
criterion,
optimizer,
step_lr_scheduler,
SK = True,
n_epochs=2)
########################################
#### Plotting Train-Validation Loss ####
########################################
fig = plt.figure(figsize=(20,10))
plt.title("Train-Validation Loss")
plt.plot(train_loss, label='train')
plt.plot(val_loss, label='validation')
plt.xlabel('num_epochs', fontsize=12)
plt.ylabel('loss', fontsize=12)
plt.legend(loc='best')
##################################################
######## melanoma vs. rest classification ########
##################################################
train_loss, val_loss = train_model(model,
criterion,
optimizer,
step_lr_scheduler,
SK = False,
n_epochs=5)
|
#!/usr/bin/env python2
import ConfigParser
class configuration():
def __init__(self, configuration_file):
self.configuration_file = configuration_file
def get_options(self):
result_dict = {}
Config = ConfigParser.ConfigParser()
Config.read(self.configuration_file)
for section in Config.sections():
for option in Config.options(section):
result_dict[option] = Config.get(section, option)
return result_dict
|
from itemmultipliers import equipmentMultipliers as equipMultipliers
levelMultiplier = {
'hp': 3, 'sp': 0, 'atk': 2, 'def': 2, 'mag': 2, 'mnd': 2, 'spd': 2
}
statOrder = [
'hp', 'sp', 'tp', 'atk', 'def', 'mag', 'mnd', 'spd', 'eva',
'fir', 'cld', 'wnd', 'ntr', 'mys', 'spi'
]
def readAsInteger(f, nbytes):
data = f.read(nbytes)
result = 0
for i, c in enumerate(data):
power = nbytes - i - 1
result += (256**power) * c
return result
class Character:
def __init__(self, name, growths, affinities, expValue):
self.name = name
self.expValue = expValue
self.filename = name
if (name == "Meiling"): self.filename = "Meirin"
elif (name == "Tenshi"): self.filename = "Tenko"
elif (name == "Reisen"): self.filename = "Udonge"
elif (name == "Rinnosuke"): self.filename = "KourinB"
elif (name == "Maribel"): self.filename = "MaribelB"
elif (name == "Shikieiki"): self.filename = "Eiki"
self.growths = growths
self.affinities = affinities
self.computeMap = {
"hp": self.computeHP,
"sp": self.computeSP,
"atk": self.computeATK,
"def": self.computeDEF,
"mag": self.computeMAG,
"mnd": self.computeMND,
"spd": self.computeSPD,
}
def computeStat(self, stat):
if (stat in self.computeMap):
return self.computeMap[stat]()
return self.computeAffinity(stat)
def computeHP(self):
mult = self.computeMultiplier('hp')
return int((((self.level + 6) * self.growths['hp']) + 12) * mult / 100)
def computeSP(self):
mult = self.computeMultiplier('sp')
return int(((self.level * self.growths['sp'] / 8) + 100) * mult / 100)
def computeATK(self):
mult = self.computeMultiplier('atk')
return int((((self.level + 4) * self.growths['atk']) + 3) * mult / 100)
def computeDEF(self):
mult = self.computeMultiplier('def')
return int((((self.level + 4) * self.growths['def']) + 1) * mult / 100)
def computeMAG(self):
mult = self.computeMultiplier('mag')
return int((((self.level + 4) * self.growths['mag']) + 2) * mult / 100)
def computeMND(self):
mult = self.computeMultiplier('mnd')
return int((((self.level + 4) * self.growths['mnd']) + 1) * mult / 100)
def computeSPD(self):
mult = self.computeMultiplier('spd')
return int((self.level * self.growths['spd'] / 32) * mult / 100) + 100
def computeAffinity(self, name):
base = self.affinities[name]
equips = 0
for i in range(3):
if (self.equips[i] < 150 and name in equipMultipliers[self.equips[i]]):
equips += equipMultipliers[self.equips[i]][name]
return base + equips + (2 * self.bonus[name]) + (3 * self.skills[name])
def computeMultiplier(self, stat):
base = 100 if (self.name != 'Remilia' or stat != 'atk') else 84
base += (self.level - 1) * levelMultiplier[stat]
base += self.bonus[stat] * (2 if stat != 'sp' else 1)
base += self.skills[stat] * (4 if stat != 'sp' else 1)
for i in range(3):
if (self.equips[i] < 150 and stat in equipMultipliers[self.equips[i]]):
base += equipMultipliers[self.equips[i]][stat]
return base
def reverseStatBonus(self, multiplier, stat):
base = 100 if (self.name != 'Remilia' or stat != 'atk') else 84
base += (self.level - 1) * levelMultiplier[stat]
base += self.skills[stat] * (4 if stat != 'sp' else 1)
for i in range(3):
if (self.equips[i] < 150 and stat in equipMultipliers[self.equips[i]]):
base += equipMultipliers[self.equips[i]][stat]
divider = 2 if (stat != 'sp') else 1
return int((multiplier - base) / divider)
def reverseAffinityBonus(self, multiplier, name):
result = multiplier
result -= self.affinities[name]
result -= (3 * self.skills[name])
for i in range(3):
if (self.equips[i] < 150 and name in equipMultipliers[self.equips[i]]):
result -= equipMultipliers[self.equips[i]][name]
return int(result / 2)
def loadSave(self, data):
self.level = readAsInteger(data, 2)
self.exp = readAsInteger(data, 4)
self.totalExp = self.exp
for i in range(1, self.level):
base = (i + 1) * 24.0
levelBonus = 1.0 + (i * 0.5)
required = int(int(base * levelBonus) * self.expValue / 100.0)
self.totalExp += required
self.tp = readAsInteger(data, 1)
multipliers = [readAsInteger(data, 2) for i in range(8)]
self.evaMultiplier = multipliers[5]
affinities = [readAsInteger(data, 2) for i in range(6)]
skills = [readAsInteger(data, 2) for i in range(15)]
self.garbage = data.read(8)
self.bp = readAsInteger(data, 2)
self.sprec = readAsInteger(data, 1)
self.resistances = data.read(5)
self.equips = [readAsInteger(data, 1) for i in range(3)]
self.skills = {}
for i, s in enumerate(statOrder):
self.skills[s] = skills[i] - 1
self.bonus = {
'hp': self.reverseStatBonus(multipliers[6], 'hp'),
'sp': self.reverseStatBonus(multipliers[7], 'sp'),
'atk': self.reverseStatBonus(multipliers[0], 'atk'),
'def': self.reverseStatBonus(multipliers[1], 'def'),
'mag': self.reverseStatBonus(multipliers[2], 'mag'),
'mnd': self.reverseStatBonus(multipliers[3], 'mnd'),
'spd': self.reverseStatBonus(multipliers[4], 'spd'),
'fir': self.reverseAffinityBonus(affinities[0], 'fir'),
'cld': self.reverseAffinityBonus(affinities[1], 'cld'),
'wnd': self.reverseAffinityBonus(affinities[2], 'wnd'),
'ntr': self.reverseAffinityBonus(affinities[3], 'ntr'),
'mys': self.reverseAffinityBonus(affinities[4], 'mys'),
'spi': self.reverseAffinityBonus(affinities[5], 'spi')
}
def saveSave(self, data):
data.write(self.level.to_bytes(2, 'big'))
data.write(self.exp.to_bytes(4, 'big'))
data.write(self.tp.to_bytes(1, 'big'))
multipliers = [
self.computeMultiplier('atk'),
self.computeMultiplier('def'),
self.computeMultiplier('mag'),
self.computeMultiplier('mnd'),
self.computeMultiplier('spd'),
self.evaMultiplier,
self.computeMultiplier('hp'),
self.computeMultiplier('sp'),
]
for m in multipliers:
data.write(m.to_bytes(2, 'big'))
affinities = [
self.computeAffinity('fir'),
self.computeAffinity('cld'),
self.computeAffinity('wnd'),
self.computeAffinity('ntr'),
self.computeAffinity('mys'),
self.computeAffinity('spi'),
]
for a in affinities:
data.write(a.to_bytes(2, 'big'))
for s in statOrder:
data.write((self.skills[s] + 1).to_bytes(2, 'big'))
data.write(self.garbage)
data.write(self.bp.to_bytes(2, 'big'))
data.write(self.sprec.to_bytes(1, 'big'))
data.write(self.resistances)
for e in self.equips:
data.write(e.to_bytes(1, 'big'))
def computeRemaining(self):
remaining = self.level - 1
for stat in self.bonus:
remaining -= self.bonus[stat]
return remaining
def exportAsString(self):
result = "===== {} =====\n".format(self.name)
result += "Level: {}\n".format(self.level)
result += "EXP: {}\n".format(self.exp)
result += "BP: {}\n".format(self.bp)
result += "Bonus HP: {}\n".format(self.bonus['hp'])
result += "Bonus SP: {}\n".format(self.bonus['sp'])
result += "Bonus ATK: {}\n".format(self.bonus['atk'])
result += "Bonus DEF: {}\n".format(self.bonus['def'])
result += "Bonus MAG: {}\n".format(self.bonus['mag'])
result += "Bonus MND: {}\n".format(self.bonus['mnd'])
result += "Bonus SPD: {}\n".format(self.bonus['spd'])
result += "Bonus FIR: {}\n".format(self.bonus['fir'])
result += "Bonus CLD: {}\n".format(self.bonus['cld'])
result += "Bonus WND: {}\n".format(self.bonus['wnd'])
result += "Bonus NTR: {}\n".format(self.bonus['ntr'])
result += "Bonus MYS: {}\n".format(self.bonus['mys'])
result += "Bonus SPI: {}\n".format(self.bonus['spi'])
result += "Library HP: {}\n".format(self.skills['hp'] + 1)
result += "Library SP: {}\n".format(self.skills['sp'] + 1)
result += "Library ATK: {}\n".format(self.skills['atk'] + 1)
result += "Library DEF: {}\n".format(self.skills['def'] + 1)
result += "Library MAG: {}\n".format(self.skills['mag'] + 1)
result += "Library MND: {}\n".format(self.skills['mnd'] + 1)
result += "Library SPD: {}\n".format(self.skills['spd'] + 1)
result += "Library FIR: {}\n".format(self.skills['fir'] + 1)
result += "Library CLD: {}\n".format(self.skills['cld'] + 1)
result += "Library WND: {}\n".format(self.skills['wnd'] + 1)
result += "Library NTR: {}\n".format(self.skills['ntr'] + 1)
result += "Library MYS: {}\n".format(self.skills['mys'] + 1)
result += "Library SPI: {}\n".format(self.skills['spi'] + 1)
result += "Equip 1: {}\n".format(self.equips[0])
result += "Equip 2: {}\n".format(self.equips[1])
result += "Equip 3: {}\n".format(self.equips[2])
return result
characterArray = [
Character(
'Reimu',
{'hp': 12, 'sp': 20, 'atk': 8, 'def': 6, 'mag': 9, 'mnd': 9, 'spd': 8},
{'fir': 110, 'cld': 106, 'wnd': 114, 'ntr': 105, 'mys': 77, 'spi': 148},
90
),
Character(
'Marisa',
{'hp': 9, 'sp': 26, 'atk': 3, 'def': 5, 'mag': 13, 'mnd': 12, 'spd': 11},
{'fir': 89, 'cld': 95, 'wnd': 96, 'ntr': 93, 'mys': 167, 'spi': 144},
110
),
Character(
'Remilia',
{'hp': 19, 'sp': 8, 'atk': 16, 'def': 10, 'mag': 4, 'mnd': 9, 'spd': 12},
{'fir': 121, 'cld': 122, 'wnd': 128, 'ntr': 125, 'mys': 96, 'spi': 77},
132
),
Character(
'Sakuya',
{'hp': 15, 'sp': 14, 'atk': 11, 'def': 8, 'mag': 5, 'mnd': 7, 'spd': 10},
{'fir': 110, 'cld': 110, 'wnd': 110, 'ntr': 110, 'mys': 110, 'spi': 110},
104
),
Character(
'Patchouli',
{'hp': 6, 'sp': 30, 'atk': 2, 'def': 2, 'mag': 16, 'mnd': 17, 'spd': 5},
{'fir': 132, 'cld': 136, 'wnd': 138, 'ntr': 134, 'mys': 173, 'spi': 102},
128
),
Character(
'Chen',
{'hp': 8, 'sp': 5, 'atk': 8, 'def': 4, 'mag': 4, 'mnd': 4, 'spd': 13},
{'fir': 98, 'cld': 51, 'wnd': 96, 'ntr': 97, 'mys': 105, 'spi': 103},
70
),
Character(
'Meiling',
{'hp': 17, 'sp': 6, 'atk': 7, 'def': 9, 'mag': 4, 'mnd': 6, 'spd': 7},
{'fir': 136, 'cld': 144, 'wnd': 138, 'ntr': 140, 'mys': 105, 'spi': 103},
94
),
Character(
'Cirno',
{'hp': 11, 'sp': 15, 'atk': 8, 'def': 5, 'mag': 8, 'mnd': 4, 'spd': 9},
{'fir': 35, 'cld': 176, 'wnd': 114, 'ntr': 105, 'mys': 95, 'spi': 91},
84
),
Character(
'Minoriko',
{'hp': 10, 'sp': 16, 'atk': 3, 'def': 3, 'mag': 8, 'mnd': 9, 'spd': 7},
{'fir': 50, 'cld': 56, 'wnd': 163, 'ntr': 196, 'mys': 100, 'spi': 104},
88
),
Character(
'Youmu',
{'hp': 16, 'sp': 5, 'atk': 12, 'def': 9, 'mag': 2, 'mnd': 2, 'spd': 7},
{'fir': 110, 'cld': 106, 'wnd': 114, 'ntr': 105, 'mys': 84, 'spi': 132},
105
),
Character(
'Alice',
{'hp': 12, 'sp': 22, 'atk': 6, 'def': 7, 'mag': 12, 'mnd': 10, 'spd': 8},
{'fir': 118, 'cld': 114, 'wnd': 117, 'ntr': 113, 'mys': 126, 'spi': 112},
106
),
Character(
'Rumia',
{'hp': 9, 'sp': 16, 'atk': 4, 'def': 5, 'mag': 9, 'mnd': 7, 'spd': 6},
{'fir': 96, 'cld': 102, 'wnd': 103, 'ntr': 99, 'mys': 192, 'spi': 67},
86
),
Character(
'Wriggle',
{'hp': 14, 'sp': 16, 'atk': 10, 'def': 7, 'mag': 6, 'mnd': 7, 'spd': 8},
{'fir': 61, 'cld': 73, 'wnd': 145, 'ntr': 157, 'mys': 110, 'spi': 109},
94
),
Character(
'Yuugi',
{'hp': 16, 'sp': 7, 'atk': 17, 'def': 12, 'mag': 1, 'mnd': 3, 'spd': 7},
{'fir': 138, 'cld': 72, 'wnd': 136, 'ntr': 75, 'mys': 137, 'spi': 73},
115
),
Character(
'Aya',
{'hp': 12, 'sp': 16, 'atk': 13, 'def': 7, 'mag': 6, 'mnd': 6, 'spd': 14},
{'fir': 102, 'cld': 104, 'wnd': 201, 'ntr': 106, 'mys': 77, 'spi': 80},
108
),
Character(
'Iku',
{'hp': 13, 'sp': 16, 'atk': 6, 'def': 6, 'mag': 11, 'mnd': 11, 'spd': 7},
{'fir': 114, 'cld': 112, 'wnd': 181, 'ntr': 109, 'mys': 101, 'spi': 102},
104
),
Character(
'Komachi',
{'hp': 28, 'sp': 10, 'atk': 14, 'def': 3, 'mag': 3, 'mnd': 2, 'spd': 8},
{'fir': 87, 'cld': 82, 'wnd': 84, 'ntr': 90, 'mys': 126, 'spi': 141},
120
),
Character(
'Suwako',
{'hp': 9, 'sp': 11, 'atk': 14, 'def': 6, 'mag': 14, 'mnd': 6, 'spd': 9},
{'fir': 71, 'cld': 141, 'wnd': 68, 'ntr': 142, 'mys': 102, 'spi': 98},
128
),
Character(
'Sanae',
{'hp': 10, 'sp': 21, 'atk': 4, 'def': 5, 'mag': 12, 'mnd': 8, 'spd': 7},
{'fir': 123, 'cld': 133, 'wnd': 101, 'ntr': 94, 'mys': 84, 'spi': 146},
102
),
Character(
'Nitori',
{'hp': 11, 'sp': 15, 'atk': 10, 'def': 5, 'mag': 4, 'mnd': 7, 'spd': 8},
{'fir': 75, 'cld': 169, 'wnd': 113, 'ntr': 167, 'mys': 104, 'spi': 72},
98
),
Character(
'Ran',
{'hp': 14, 'sp': 18, 'atk': 10, 'def': 8, 'mag': 13, 'mnd': 10, 'spd': 10},
{'fir': 173, 'cld': 66, 'wnd': 165, 'ntr': 170, 'mys': 61, 'spi': 192},
116
),
Character(
'Reisen',
{'hp': 12, 'sp': 16, 'atk': 4, 'def': 6, 'mag': 10, 'mnd': 4, 'spd': 9},
{'fir': 118, 'cld': 125, 'wnd': 84, 'ntr': 90, 'mys': 198, 'spi': 50},
108
),
Character(
'Eirin',
{'hp': 15, 'sp': 19, 'atk': 9, 'def': 9, 'mag': 13, 'mnd': 10, 'spd': 8},
{'fir': 147, 'cld': 150, 'wnd': 151, 'ntr': 145, 'mys': 101, 'spi': 168},
136
),
Character(
'Tenshi',
{'hp': 11, 'sp': 14, 'atk': 10, 'def': 12, 'mag': 6, 'mnd': 12, 'spd': 6},
{'fir': 118, 'cld': 125, 'wnd': 119, 'ntr': 114, 'mys': 126, 'spi': 124},
110
),
Character(
'Mokou',
{'hp': 14, 'sp': 14, 'atk': 4, 'def': 7, 'mag': 12, 'mnd': 7, 'spd': 9},
{'fir': 176, 'cld': 71, 'wnd': 140, 'ntr': 136, 'mys': 89, 'spi': 92},
112
),
Character(
'Flandre',
{'hp': 16, 'sp': 16, 'atk': 22, 'def': 5, 'mag': 14, 'mnd': 1, 'spd': 11},
{'fir': 342, 'cld': 28, 'wnd': 61, 'ntr': 54, 'mys': 90, 'spi': 46},
128
),
Character(
'Rin',
{'hp': 10, 'sp': 17, 'atk': 11, 'def': 5, 'mag': 9, 'mnd': 10, 'spd': 11},
{'fir': 180, 'cld': 52, 'wnd': 102, 'ntr': 60, 'mys': 99, 'spi': 160},
106
),
Character(
'Kaguya',
{'hp': 7, 'sp': 20, 'atk': 4, 'def': 6, 'mag': 14, 'mnd': 11, 'spd': 7},
{'fir': 149, 'cld': 156, 'wnd': 151, 'ntr': 146, 'mys': 158, 'spi': 155},
118
),
Character(
'Suika',
{'hp': 16, 'sp': 10, 'atk': 16, 'def': 6, 'mag': 6, 'mnd': 10, 'spd': 9},
{'fir': 69, 'cld': 165, 'wnd': 159, 'ntr': 162, 'mys': 160, 'spi': 60},
130
),
Character(
'Yuyuko',
{'hp': 14, 'sp': 17, 'atk': 4, 'def': 5, 'mag': 12, 'mnd': 13, 'spd': 6},
{'fir': 73, 'cld': 168, 'wnd': 75, 'ntr': 151, 'mys': 99, 'spi': 234},
136
),
Character(
'Yukari',
{'hp': 14, 'sp': 21, 'atk': 6, 'def': 9, 'mag': 13, 'mnd': 12, 'spd': 7},
{'fir': 87, 'cld': 144, 'wnd': 149, 'ntr': 143, 'mys': 89, 'spi': 181},
140
),
Character(
'Rinnosuke',
{'hp': 16, 'sp': 16, 'atk': 16, 'def': 10, 'mag': 12, 'mnd': 10, 'spd': 11},
{'fir': 163, 'cld': 169, 'wnd': 165, 'ntr': 166, 'mys': 167, 'spi': 69},
144
),
Character(
'Renko',
{'hp': 13, 'sp': 11, 'atk': 4, 'def': 6, 'mag': 5, 'mnd': 7, 'spd': 9},
{'fir': 102, 'cld': 105, 'wnd': 100, 'ntr': 101, 'mys': 107, 'spi': 108},
98
),
Character(
'Maribel',
{'hp': 11, 'sp': 16, 'atk': 7, 'def': 5, 'mag': 11, 'mnd': 8, 'spd': 8},
{'fir': 78, 'cld': 132, 'wnd': 135, 'ntr': 132, 'mys': 65, 'spi': 156},
103
),
Character(
'Utsuho',
{'hp': 12, 'sp': 15, 'atk': 8, 'def': 8, 'mag': 14, 'mnd': 8, 'spd': 10},
{'fir': 282, 'cld': 54, 'wnd': 187, 'ntr': 76, 'mys': 186, 'spi': 61},
130
),
Character(
'Kanako',
{'hp': 13, 'sp': 16, 'atk': 8, 'def': 9, 'mag': 13, 'mnd': 9, 'spd': 8},
{'fir': 112, 'cld': 123, 'wnd': 238, 'ntr': 145, 'mys': 100, 'spi': 126},
134
),
Character(
'Yuuka',
{'hp': 14, 'sp': 14, 'atk': 11, 'def': 10, 'mag': 14, 'mnd': 7, 'spd': 8},
{'fir': 76, 'cld': 78, 'wnd': 142, 'ntr': 254, 'mys': 176, 'spi': 120},
134
),
Character(
'Mystia',
{'hp': 12, 'sp': 14, 'atk': 10, 'def': 6, 'mag': 4, 'mnd': 6, 'spd': 10},
{'fir': 90, 'cld': 91, 'wnd': 143, 'ntr': 127, 'mys': 76, 'spi': 88},
85
),
Character(
'Keine',
{'hp': 15, 'sp': 19, 'atk': 11, 'def': 8, 'mag': 7, 'mnd': 7, 'spd': 9},
{'fir': 133, 'cld': 133, 'wnd': 129, 'ntr': 131, 'mys': 140, 'spi': 139},
106
),
Character(
'Shikieiki',
{'hp': 15, 'sp': 15, 'atk': 14, 'def': 5, 'mag': 14, 'mnd': 10, 'spd': 7},
{'fir': 108, 'cld': 171, 'wnd': 105, 'ntr': 112, 'mys': 125, 'spi': 169},
130
),
]
|
from GroupCreator import GroupCreator
from Filter import Filter
from Filter import IsIn
from Filter import NotIn
from Filter import IsNot
from Filter import GT
from Filter import GTE
from Filter import LT
from Filter import LTE
from Utils import WILDCARD
# Goal types
from Goal import GroupFilterGoal
from Goal import MinSimilarGoal
from Goal import MaxSimilarGoal
from Goal import MustMatchGoal
from Goal import PodGoal
# Only for testing
from DataBox import DataBox
from Student import Student
# Utilities for user
from Helpers import * |
#!/usr/bin/env python
'''
Python WebSocket library with support for "wss://" encryption.
Copyright 2011 Joel Martin
Licensed under LGPL version 3 (see docs/LICENSE.LGPL-3)
Supports following protocol versions:
- http://tools.ietf.org/html/draft-ietf-hybi-thewebsocketprotocol-07
- http://tools.ietf.org/html/draft-ietf-hybi-thewebsocketprotocol-10
- http://tools.ietf.org/html/rfc6455
You can make a cert/key with openssl using:
openssl req -new -x509 -days 365 -nodes -out self.pem -keyout self.pem
as taken from http://docs.python.org/dev/library/ssl.html#certificates
'''
import os, sys, time, errno, signal, socket, select, logging
import array, struct
from base64 import b64encode, b64decode
# Imports that vary by python version
# python 3.0 differences
if sys.hexversion > 0x3000000:
b2s = lambda buf: buf.decode('latin_1')
s2b = lambda s: s.encode('latin_1')
s2a = lambda s: s
else:
b2s = lambda buf: buf # No-op
s2b = lambda s: s # No-op
s2a = lambda s: [ord(c) for c in s]
try: from io import StringIO
except: from cStringIO import StringIO
try: from http.server import SimpleHTTPRequestHandler
except: from SimpleHTTPServer import SimpleHTTPRequestHandler
# python 2.6 differences
try: from hashlib import sha1
except: from sha import sha as sha1
# python 2.5 differences
try:
from struct import pack, unpack_from
except:
from struct import pack
def unpack_from(fmt, buf, offset=0):
slice = buffer(buf, offset, struct.calcsize(fmt))
return struct.unpack(fmt, slice)
# Degraded functionality if these imports are missing
for mod, msg in [('numpy', 'HyBi protocol will be slower'),
('ssl', 'TLS/SSL/wss is disabled'),
('multiprocessing', 'Multi-Processing is disabled'),
('resource', 'daemonizing is disabled')]:
try:
globals()[mod] = __import__(mod)
except ImportError:
globals()[mod] = None
print("WARNING: no '%s' module, %s" % (mod, msg))
if multiprocessing and sys.platform == 'win32':
# make sockets pickle-able/inheritable
import multiprocessing.reduction
# HTTP handler with WebSocket upgrade support
class WebSocketRequestHandler(SimpleHTTPRequestHandler):
"""
WebSocket Request Handler Class, derived from SimpleHTTPRequestHandler.
Must be sub-classed with new_websocket_client method definition.
The request handler can be configured by setting optional
attributes on the server object:
* only_upgrade: If true, SimpleHTTPRequestHandler will not be enabled,
only websocket is allowed.
* verbose: If true, verbose logging is activated.
* daemon: Running as daemon, do not write to console etc
* record: Record raw frame data as JavaScript array into specified filename
* run_once: Handle a single request
* handler_id: A sequence number for this connection, appended to record filename
"""
buffer_size = 65536
GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
server_version = "WebSockify"
protocol_version = "HTTP/1.1"
# An exception while the WebSocket client was connected
class CClose(Exception):
pass
def __init__(self, req, addr, server):
# Retrieve a few configuration variables from the server
self.only_upgrade = getattr(server, "only_upgrade", False)
self.verbose = getattr(server, "verbose", False)
self.daemon = getattr(server, "daemon", False)
self.record = getattr(server, "record", False)
self.run_once = getattr(server, "run_once", False)
self.rec = None
self.handler_id = getattr(server, "handler_id", False)
self.file_only = getattr(server, "file_only", False)
self.traffic = getattr(server, "traffic", False)
self.auto_pong = getattr(server, "auto_pong", False)
self.strict_mode = getattr(server, "strict_mode", True)
self.logger = getattr(server, "logger", None)
if self.logger is None:
self.logger = WebSocketServer.get_logger()
SimpleHTTPRequestHandler.__init__(self, req, addr, server)
def log_message(self, format, *args):
self.logger.info("%s - - [%s] %s" % (self.address_string(), self.log_date_time_string(), format % args))
@staticmethod
def unmask(buf, hlen, plen):
pstart = hlen + 4
pend = pstart + plen
if numpy:
b = c = s2b('')
if plen >= 4:
dtype=numpy.dtype('<u4')
if sys.byteorder == 'big':
dtype = dtype.newbyteorder('>')
mask = numpy.frombuffer(buf, dtype, offset=hlen, count=1)
data = numpy.frombuffer(buf, dtype, offset=pstart,
count=int(plen / 4))
#b = numpy.bitwise_xor(data, mask).data
b = numpy.bitwise_xor(data, mask).tostring()
if plen % 4:
#self.msg("Partial unmask")
dtype=numpy.dtype('B')
if sys.byteorder == 'big':
dtype = dtype.newbyteorder('>')
mask = numpy.frombuffer(buf, dtype, offset=hlen,
count=(plen % 4))
data = numpy.frombuffer(buf, dtype,
offset=pend - (plen % 4), count=(plen % 4))
c = numpy.bitwise_xor(data, mask).tostring()
return b + c
else:
# Slower fallback
mask = buf[hlen:hlen+4]
data = array.array('B')
mask = s2a(mask)
data.fromstring(buf[pstart:pend])
for i in range(len(data)):
data[i] ^= mask[i % 4]
return data.tostring()
@staticmethod
def encode_hybi(buf, opcode, base64=False):
""" Encode a HyBi style WebSocket frame.
Optional opcode:
0x0 - continuation
0x1 - text frame (base64 encode buf)
0x2 - binary frame (use raw buf)
0x8 - connection close
0x9 - ping
0xA - pong
"""
if base64:
buf = b64encode(buf)
b1 = 0x80 | (opcode & 0x0f) # FIN + opcode
payload_len = len(buf)
if payload_len <= 125:
header = pack('>BB', b1, payload_len)
elif payload_len > 125 and payload_len < 65536:
header = pack('>BBH', b1, 126, payload_len)
elif payload_len >= 65536:
header = pack('>BBQ', b1, 127, payload_len)
#self.msg("Encoded: %s", repr(header + buf))
return header + buf, len(header), 0
@staticmethod
def decode_hybi(buf, base64=False, logger=None, strict=True):
""" Decode HyBi style WebSocket packets.
Returns:
{'fin' : 0_or_1,
'opcode' : number,
'masked' : boolean,
'hlen' : header_bytes_number,
'length' : payload_bytes_number,
'payload' : decoded_buffer,
'left' : bytes_left_number,
'close_code' : number,
'close_reason' : string}
"""
f = {'fin' : 0,
'opcode' : 0,
'masked' : False,
'hlen' : 2,
'length' : 0,
'payload' : None,
'left' : 0,
'close_code' : 1000,
'close_reason' : ''}
if logger is None:
logger = WebSocketServer.get_logger()
blen = len(buf)
f['left'] = blen
if blen < f['hlen']:
return f # Incomplete frame header
b1, b2 = unpack_from(">BB", buf)
f['opcode'] = b1 & 0x0f
f['fin'] = (b1 & 0x80) >> 7
f['masked'] = (b2 & 0x80) >> 7
f['length'] = b2 & 0x7f
if f['length'] == 126:
f['hlen'] = 4
if blen < f['hlen']:
return f # Incomplete frame header
(f['length'],) = unpack_from('>xxH', buf)
elif f['length'] == 127:
f['hlen'] = 10
if blen < f['hlen']:
return f # Incomplete frame header
(f['length'],) = unpack_from('>xxQ', buf)
full_len = f['hlen'] + f['masked'] * 4 + f['length']
if blen < full_len: # Incomplete frame
return f # Incomplete frame header
# Number of bytes that are part of the next frame(s)
f['left'] = blen - full_len
# Process 1 frame
if f['masked']:
# unmask payload
f['payload'] = WebSocketRequestHandler.unmask(buf, f['hlen'],
f['length'])
else:
logger.debug("Unmasked frame: %s" % repr(buf))
if strict:
raise WebSocketRequestHandler.CClose(1002, "The client sent an unmasked frame.")
f['payload'] = buf[(f['hlen'] + f['masked'] * 4):full_len]
if base64 and f['opcode'] in [1, 2]:
try:
f['payload'] = b64decode(f['payload'])
except:
logger.exception("Exception while b64decoding buffer: %s" %
(repr(buf)))
raise
if f['opcode'] == 0x08:
if f['length'] >= 2:
f['close_code'] = unpack_from(">H", f['payload'])[0]
if f['length'] > 3:
f['close_reason'] = f['payload'][2:]
return f
#
# WebSocketRequestHandler logging/output functions
#
def print_traffic(self, token="."):
""" Show traffic flow mode. """
if self.traffic:
sys.stdout.write(token)
sys.stdout.flush()
def msg(self, msg, *args, **kwargs):
""" Output message with handler_id prefix. """
prefix = "% 3d: " % self.handler_id
self.logger.log(logging.INFO, "%s%s" % (prefix, msg), *args, **kwargs)
def vmsg(self, msg, *args, **kwargs):
""" Same as msg() but as debug. """
prefix = "% 3d: " % self.handler_id
self.logger.log(logging.DEBUG, "%s%s" % (prefix, msg), *args, **kwargs)
def warn(self, msg, *args, **kwargs):
""" Same as msg() but as warning. """
prefix = "% 3d: " % self.handler_id
self.logger.log(logging.WARN, "%s%s" % (prefix, msg), *args, **kwargs)
#
# Main WebSocketRequestHandler methods
#
def send_frames(self, bufs=None):
""" Encode and send WebSocket frames. Any frames already
queued will be sent first. If buf is not set then only queued
frames will be sent. Returns the number of pending frames that
could not be fully sent. If returned pending frames is greater
than 0, then the caller should call again when the socket is
ready. """
tdelta = int(time.time()*1000) - self.start_time
if bufs:
for buf in bufs:
if self.base64:
encbuf, lenhead, lentail = self.encode_hybi(buf, opcode=1, base64=True)
else:
encbuf, lenhead, lentail = self.encode_hybi(buf, opcode=2, base64=False)
if self.rec:
self.rec.write("%s,\n" %
repr("{%s{" % tdelta
+ encbuf[lenhead:len(encbuf)-lentail]))
self.send_parts.append(encbuf)
while self.send_parts:
# Send pending frames
buf = self.send_parts.pop(0)
sent = self.request.send(buf)
if sent == len(buf):
self.print_traffic("<")
else:
self.print_traffic("<.")
self.send_parts.insert(0, buf[sent:])
break
return len(self.send_parts)
def recv_frames(self):
""" Receive and decode WebSocket frames.
Returns:
(bufs_list, closed_string)
"""
closed = False
bufs = []
tdelta = int(time.time()*1000) - self.start_time
buf = self.request.recv(self.buffer_size)
if len(buf) == 0:
closed = {'code': 1000, 'reason': "Client closed abruptly"}
return bufs, closed
if self.recv_part:
# Add partially received frames to current read buffer
buf = self.recv_part + buf
self.recv_part = None
while buf:
frame = self.decode_hybi(buf, base64=self.base64,
logger=self.logger,
strict=self.strict_mode)
#self.msg("Received buf: %s, frame: %s", repr(buf), frame)
if frame['payload'] == None:
# Incomplete/partial frame
self.print_traffic("}.")
if frame['left'] > 0:
self.recv_part = buf[-frame['left']:]
break
else:
if frame['opcode'] == 0x8: # connection close
closed = {'code': frame['close_code'],
'reason': frame['close_reason']}
break
elif self.auto_pong and frame['opcode'] == 0x9: # ping
self.print_traffic("} ping %s\n" %
repr(frame['payload']))
self.send_pong(frame['payload'])
return [], False
elif frame['opcode'] == 0xA: # pong
self.print_traffic("} pong %s\n" %
repr(frame['payload']))
return [], False
self.print_traffic("}")
if self.rec:
start = frame['hlen']
end = frame['hlen'] + frame['length']
if frame['masked']:
recbuf = WebSocketRequestHandler.unmask(buf, frame['hlen'],
frame['length'])
else:
recbuf = buf[frame['hlen']:frame['hlen'] +
frame['length']]
self.rec.write("%s,\n" %
repr("}%s}" % tdelta + recbuf))
bufs.append(frame['payload'])
if frame['left']:
buf = buf[-frame['left']:]
else:
buf = ''
return bufs, closed
def send_close(self, code=1000, reason=''):
""" Send a WebSocket orderly close frame. """
msg = pack(">H%ds" % len(reason), code, s2b(reason))
buf, h, t = self.encode_hybi(msg, opcode=0x08, base64=False)
self.request.send(buf)
def send_pong(self, data=''):
""" Send a WebSocket pong frame. """
buf, h, t = self.encode_hybi(s2b(data), opcode=0x0A, base64=False)
self.request.send(buf)
def send_ping(self, data=''):
""" Send a WebSocket ping frame. """
buf, h, t = self.encode_hybi(s2b(data), opcode=0x09, base64=False)
self.request.send(buf)
def do_websocket_handshake(self):
h = self.headers
prot = 'WebSocket-Protocol'
protocols = h.get('Sec-'+prot, h.get(prot, '')).split(',')
ver = h.get('Sec-WebSocket-Version')
if ver:
# HyBi/IETF version of the protocol
# HyBi-07 report version 7
# HyBi-08 - HyBi-12 report version 8
# HyBi-13 reports version 13
if ver in ['7', '8', '13']:
self.version = "hybi-%02d" % int(ver)
else:
self.send_error(400, "Unsupported protocol version %s" % ver)
return False
key = h['Sec-WebSocket-Key']
# Choose binary if client supports it
if 'binary' in protocols:
self.base64 = False
elif 'base64' in protocols:
self.base64 = True
else:
self.send_error(400, "Client must support 'binary' or 'base64' protocol")
return False
# Generate the hash value for the accept header
accept = b64encode(sha1(s2b(key + self.GUID)).digest())
self.send_response(101, "Switching Protocols")
self.send_header("Upgrade", "websocket")
self.send_header("Connection", "Upgrade")
self.send_header("Sec-WebSocket-Accept", b2s(accept))
if self.base64:
self.send_header("Sec-WebSocket-Protocol", "base64")
else:
self.send_header("Sec-WebSocket-Protocol", "binary")
self.end_headers()
return True
else:
self.send_error(400, "Missing Sec-WebSocket-Version header. Hixie protocols not supported.")
return False
def handle_websocket(self):
"""Upgrade a connection to Websocket, if requested. If this succeeds,
new_websocket_client() will be called. Otherwise, False is returned.
"""
if (self.headers.get('upgrade') and
self.headers.get('upgrade').lower() == 'websocket'):
# ensure connection is authorized, and determine the target
self.validate_connection()
if not self.do_websocket_handshake():
return False
# Indicate to server that a Websocket upgrade was done
self.server.ws_connection = True
# Initialize per client settings
self.send_parts = []
self.recv_part = None
self.start_time = int(time.time()*1000)
# client_address is empty with, say, UNIX domain sockets
client_addr = ""
is_ssl = False
try:
client_addr = self.client_address[0]
is_ssl = self.client_address[2]
except IndexError:
pass
if is_ssl:
self.stype = "SSL/TLS (wss://)"
else:
self.stype = "Plain non-SSL (ws://)"
self.log_message("%s: %s WebSocket connection", client_addr,
self.stype)
self.log_message("%s: Version %s, base64: '%s'", client_addr,
self.version, self.base64)
if self.path != '/':
self.log_message("%s: Path: '%s'", client_addr, self.path)
if self.record:
# Record raw frame data as JavaScript array
fname = "%s.%s" % (self.record,
self.handler_id)
self.log_message("opening record file: %s", fname)
self.rec = open(fname, 'w+')
encoding = "binary"
if self.base64: encoding = "base64"
self.rec.write("var VNC_frame_encoding = '%s';\n"
% encoding)
self.rec.write("var VNC_frame_data = [\n")
try:
self.new_websocket_client()
except self.CClose:
# Close the client
_, exc, _ = sys.exc_info()
self.send_close(exc.args[0], exc.args[1])
return True
else:
return False
def do_GET(self):
"""Handle GET request. Calls handle_websocket(). If unsuccessful,
and web server is enabled, SimpleHTTPRequestHandler.do_GET will be called."""
if not self.handle_websocket():
if self.only_upgrade:
self.send_error(405, "Method Not Allowed")
else:
SimpleHTTPRequestHandler.do_GET(self)
def list_directory(self, path):
if self.file_only:
self.send_error(404, "No such file")
else:
return SimpleHTTPRequestHandler.list_directory(self, path)
def new_websocket_client(self):
""" Do something with a WebSockets client connection. """
raise Exception("WebSocketRequestHandler.new_websocket_client() must be overloaded")
def validate_connection(self):
""" Ensure that the connection is a valid connection, and set the target. """
pass
def do_HEAD(self):
if self.only_upgrade:
self.send_error(405, "Method Not Allowed")
else:
SimpleHTTPRequestHandler.do_HEAD(self)
def finish(self):
if self.rec:
self.rec.write("'EOF'];\n")
self.rec.close()
def handle(self):
# When using run_once, we have a single process, so
# we cannot loop in BaseHTTPRequestHandler.handle; we
# must return and handle new connections
if self.run_once:
self.handle_one_request()
else:
SimpleHTTPRequestHandler.handle(self)
def log_request(self, code='-', size='-'):
if self.verbose:
SimpleHTTPRequestHandler.log_request(self, code, size)
class WebSocketServer(object):
"""
WebSockets server class.
As an alternative, the standard library SocketServer can be used
"""
policy_response = """<cross-domain-policy><allow-access-from domain="*" to-ports="*" /></cross-domain-policy>\n"""
log_prefix = "websocket"
# An exception before the WebSocket connection was established
class EClose(Exception):
pass
class Terminate(Exception):
pass
def __init__(self, RequestHandlerClass, listen_host='',
listen_port=None, source_is_ipv6=False,
verbose=False, cert='', key='', ssl_only=None,
daemon=False, record='', web='',
file_only=False,
run_once=False, timeout=0, idle_timeout=0, traffic=False,
tcp_keepalive=True, tcp_keepcnt=None, tcp_keepidle=None,
tcp_keepintvl=None, auto_pong=False, strict_mode=True):
# settings
self.RequestHandlerClass = RequestHandlerClass
self.verbose = verbose
self.listen_host = listen_host
self.listen_port = listen_port
self.prefer_ipv6 = source_is_ipv6
self.ssl_only = ssl_only
self.daemon = daemon
self.run_once = run_once
self.timeout = timeout
self.idle_timeout = idle_timeout
self.traffic = traffic
self.file_only = file_only
self.strict_mode = strict_mode
self.launch_time = time.time()
self.ws_connection = False
self.handler_id = 1
self.logger = self.get_logger()
self.tcp_keepalive = tcp_keepalive
self.tcp_keepcnt = tcp_keepcnt
self.tcp_keepidle = tcp_keepidle
self.tcp_keepintvl = tcp_keepintvl
self.auto_pong = auto_pong
# Make paths settings absolute
self.cert = os.path.abspath(cert)
self.key = self.web = self.record = ''
if key:
self.key = os.path.abspath(key)
if web:
self.web = os.path.abspath(web)
if record:
self.record = os.path.abspath(record)
if self.web:
os.chdir(self.web)
self.only_upgrade = not self.web
# Sanity checks
if not ssl and self.ssl_only:
raise Exception("No 'ssl' module and SSL-only specified")
if self.daemon and not resource:
raise Exception("Module 'resource' required to daemonize")
# Show configuration
self.msg("WebSocket server settings:")
self.msg(" - Listen on %s:%s",
self.listen_host, self.listen_port)
self.msg(" - Flash security policy server")
if self.web:
if self.file_only:
self.msg(" - Web server (no directory listings). Web root: %s", self.web)
else:
self.msg(" - Web server. Web root: %s", self.web)
if ssl:
if os.path.exists(self.cert):
self.msg(" - SSL/TLS support")
if self.ssl_only:
self.msg(" - Deny non-SSL/TLS connections")
else:
self.msg(" - No SSL/TLS support (no cert file)")
else:
self.msg(" - No SSL/TLS support (no 'ssl' module)")
if self.daemon:
self.msg(" - Backgrounding (daemon)")
if self.record:
self.msg(" - Recording to '%s.*'", self.record)
#
# WebSocketServer static methods
#
@staticmethod
def get_logger():
return logging.getLogger("%s.%s" % (
WebSocketServer.log_prefix,
WebSocketServer.__class__.__name__))
@staticmethod
def socket(host, port=None, connect=False, prefer_ipv6=False,
unix_socket=None, use_ssl=False, tcp_keepalive=True,
tcp_keepcnt=None, tcp_keepidle=None, tcp_keepintvl=None):
""" Resolve a host (and optional port) to an IPv4 or IPv6
address. Create a socket. Bind to it if listen is set,
otherwise connect to it. Return the socket.
"""
flags = 0
if host == '':
host = None
if connect and not (port or unix_socket):
raise Exception("Connect mode requires a port")
if use_ssl and not ssl:
raise Exception("SSL socket requested but Python SSL module not loaded.");
if not connect and use_ssl:
raise Exception("SSL only supported in connect mode (for now)")
if not connect:
flags = flags | socket.AI_PASSIVE
if not unix_socket:
addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM,
socket.IPPROTO_TCP, flags)
if not addrs:
raise Exception("Could not resolve host '%s'" % host)
addrs.sort(key=lambda x: x[0])
if prefer_ipv6:
addrs.reverse()
sock = socket.socket(addrs[0][0], addrs[0][1])
if tcp_keepalive:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
if tcp_keepcnt:
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT,
tcp_keepcnt)
if tcp_keepidle:
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE,
tcp_keepidle)
if tcp_keepintvl:
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL,
tcp_keepintvl)
if connect:
sock.connect(addrs[0][4])
if use_ssl:
sock = ssl.wrap_socket(sock)
else:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addrs[0][4])
sock.listen(100)
else:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(unix_socket)
return sock
@staticmethod
def daemonize(keepfd=None, chdir='/'):
if keepfd is None:
keepfd = []
os.umask(0)
if chdir:
os.chdir(chdir)
else:
os.chdir('/')
os.setgid(os.getgid()) # relinquish elevations
os.setuid(os.getuid()) # relinquish elevations
# Double fork to daemonize
if os.fork() > 0: os._exit(0) # Parent exits
os.setsid() # Obtain new process group
if os.fork() > 0: os._exit(0) # Parent exits
# Signal handling
signal.signal(signal.SIGTERM, signal.SIG_IGN)
signal.signal(signal.SIGINT, signal.SIG_IGN)
# Close open files
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if maxfd == resource.RLIM_INFINITY: maxfd = 256
for fd in reversed(range(maxfd)):
try:
if fd not in keepfd:
os.close(fd)
except OSError:
_, exc, _ = sys.exc_info()
if exc.errno != errno.EBADF: raise
# Redirect I/O to /dev/null
os.dup2(os.open(os.devnull, os.O_RDWR), sys.stdin.fileno())
os.dup2(os.open(os.devnull, os.O_RDWR), sys.stdout.fileno())
os.dup2(os.open(os.devnull, os.O_RDWR), sys.stderr.fileno())
def do_handshake(self, sock, address):
"""
do_handshake does the following:
- Peek at the first few bytes from the socket.
- If the connection is Flash policy request then answer it,
close the socket and return.
- If the connection is an HTTPS/SSL/TLS connection then SSL
wrap the socket.
- Read from the (possibly wrapped) socket.
- If we have received a HTTP GET request and the webserver
functionality is enabled, answer it, close the socket and
return.
- Assume we have a WebSockets connection, parse the client
handshake data.
- Send a WebSockets handshake server response.
- Return the socket for this WebSocket client.
"""
ready = select.select([sock], [], [], 3)[0]
if not ready:
raise self.EClose("ignoring socket not ready")
# Peek, but do not read the data so that we have a opportunity
# to SSL wrap the socket first
handshake = sock.recv(1024, socket.MSG_PEEK)
#self.msg("Handshake [%s]" % handshake)
if not handshake:
raise self.EClose("ignoring empty handshake")
elif handshake.startswith(s2b("<policy-file-request/>")):
# Answer Flash policy request
handshake = sock.recv(1024)
sock.send(s2b(self.policy_response))
raise self.EClose("Sending flash policy response")
elif handshake[0] in ("\x16", "\x80", 22, 128):
# SSL wrap the connection
if not ssl:
raise self.EClose("SSL connection but no 'ssl' module")
if not os.path.exists(self.cert):
raise self.EClose("SSL connection but '%s' not found"
% self.cert)
retsock = None
try:
retsock = ssl.wrap_socket(
sock,
server_side=True,
certfile=self.cert,
keyfile=self.key)
except ssl.SSLError:
_, x, _ = sys.exc_info()
if x.args[0] == ssl.SSL_ERROR_EOF:
if len(x.args) > 1:
raise self.EClose(x.args[1])
else:
raise self.EClose("Got SSL_ERROR_EOF")
else:
raise
elif self.ssl_only:
raise self.EClose("non-SSL connection received but disallowed")
else:
retsock = sock
# If the address is like (host, port), we are extending it
# with a flag indicating SSL. Not many other options
# available...
if len(address) == 2:
address = (address[0], address[1], (retsock != sock))
self.RequestHandlerClass(retsock, address, self)
# Return the WebSockets socket which may be SSL wrapped
return retsock
#
# WebSocketServer logging/output functions
#
def msg(self, *args, **kwargs):
""" Output message as info """
self.logger.log(logging.INFO, *args, **kwargs)
def vmsg(self, *args, **kwargs):
""" Same as msg() but as debug. """
self.logger.log(logging.DEBUG, *args, **kwargs)
def warn(self, *args, **kwargs):
""" Same as msg() but as warning. """
self.logger.log(logging.WARN, *args, **kwargs)
#
# Events that can/should be overridden in sub-classes
#
def started(self):
""" Called after WebSockets startup """
self.vmsg("WebSockets server started")
def poll(self):
""" Run periodically while waiting for connections. """
#self.vmsg("Running poll()")
pass
def terminate(self):
raise self.Terminate()
def multiprocessing_SIGCHLD(self, sig, stack):
# TODO: figure out a way to actually log this information without
# calling `log` in the signal handlers
multiprocessing.active_children()
def fallback_SIGCHLD(self, sig, stack):
# Reap zombies when using os.fork() (python 2.4)
# TODO: figure out a way to actually log this information without
# calling `log` in the signal handlers
try:
result = os.waitpid(-1, os.WNOHANG)
while result[0]:
self.vmsg("Reaped child process %s" % result[0])
result = os.waitpid(-1, os.WNOHANG)
except (OSError):
pass
def do_SIGINT(self, sig, stack):
# TODO: figure out a way to actually log this information without
# calling `log` in the signal handlers
self.terminate()
def do_SIGTERM(self, sig, stack):
# TODO: figure out a way to actually log this information without
# calling `log` in the signal handlers
self.terminate()
def top_new_client(self, startsock, address):
""" Do something with a WebSockets client connection. """
# handler process
client = None
try:
try:
client = self.do_handshake(startsock, address)
except self.EClose:
_, exc, _ = sys.exc_info()
# Connection was not a WebSockets connection
if exc.args[0]:
self.msg("%s: %s" % (address[0], exc.args[0]))
except WebSocketServer.Terminate:
raise
except Exception:
_, exc, _ = sys.exc_info()
self.msg("handler exception: %s" % str(exc))
self.vmsg("exception", exc_info=True)
finally:
if client and client != startsock:
# Close the SSL wrapped socket
# Original socket closed by caller
client.close()
def get_log_fd(self):
"""
Get file descriptors for the loggers.
They should not be closed when the process is forked.
"""
descriptors = []
for handler in self.logger.parent.handlers:
if isinstance(handler, logging.FileHandler):
descriptors.append(handler.stream.fileno())
return descriptors
def start_server(self):
"""
Daemonize if requested. Listen for for connections. Run
do_handshake() method for each connection. If the connection
is a WebSockets client then call new_websocket_client() method (which must
be overridden) for each new client connection.
"""
lsock = self.socket(self.listen_host, self.listen_port, False,
self.prefer_ipv6,
tcp_keepalive=self.tcp_keepalive,
tcp_keepcnt=self.tcp_keepcnt,
tcp_keepidle=self.tcp_keepidle,
tcp_keepintvl=self.tcp_keepintvl)
if self.daemon:
keepfd = self.get_log_fd()
keepfd.append(lsock.fileno())
self.daemonize(keepfd=keepfd, chdir=self.web)
self.started() # Some things need to happen after daemonizing
# Allow override of signals
original_signals = {
signal.SIGINT: signal.getsignal(signal.SIGINT),
signal.SIGTERM: signal.getsignal(signal.SIGTERM),
signal.SIGCHLD: signal.getsignal(signal.SIGCHLD),
}
signal.signal(signal.SIGINT, self.do_SIGINT)
signal.signal(signal.SIGTERM, self.do_SIGTERM)
if not multiprocessing:
# os.fork() (python 2.4) child reaper
signal.signal(signal.SIGCHLD, self.fallback_SIGCHLD)
else:
# make sure that _cleanup is called when children die
# by calling active_children on SIGCHLD
signal.signal(signal.SIGCHLD, self.multiprocessing_SIGCHLD)
last_active_time = self.launch_time
try:
while True:
try:
try:
startsock = None
pid = err = 0
child_count = 0
if multiprocessing:
# Collect zombie child processes
child_count = len(multiprocessing.active_children())
time_elapsed = time.time() - self.launch_time
if self.timeout and time_elapsed > self.timeout:
self.msg('listener exit due to --timeout %s'
% self.timeout)
break
if self.idle_timeout:
idle_time = 0
if child_count == 0:
idle_time = time.time() - last_active_time
else:
idle_time = 0
last_active_time = time.time()
if idle_time > self.idle_timeout and child_count == 0:
self.msg('listener exit due to --idle-timeout %s'
% self.idle_timeout)
break
try:
self.poll()
ready = select.select([lsock], [], [], 1)[0]
if lsock in ready:
startsock, address = lsock.accept()
else:
continue
except self.Terminate:
raise
except Exception:
_, exc, _ = sys.exc_info()
if hasattr(exc, 'errno'):
err = exc.errno
elif hasattr(exc, 'args'):
err = exc.args[0]
else:
err = exc[0]
if err == errno.EINTR:
self.vmsg("Ignoring interrupted syscall")
continue
else:
raise
if self.run_once:
# Run in same process if run_once
self.top_new_client(startsock, address)
if self.ws_connection :
self.msg('%s: exiting due to --run-once'
% address[0])
break
elif multiprocessing:
self.vmsg('%s: new handler Process' % address[0])
p = multiprocessing.Process(
target=self.top_new_client,
args=(startsock, address))
p.start()
# child will not return
else:
# python 2.4
self.vmsg('%s: forking handler' % address[0])
pid = os.fork()
if pid == 0:
# child handler process
self.top_new_client(startsock, address)
break # child process exits
# parent process
self.handler_id += 1
except (self.Terminate, SystemExit, KeyboardInterrupt):
self.msg("In exit")
# terminate all child processes
if multiprocessing and not self.run_once:
children = multiprocessing.active_children()
for child in children:
self.msg("Terminating child %s" % child.pid)
child.terminate()
break
except Exception:
exc = sys.exc_info()[1]
self.msg("handler exception: %s", str(exc))
self.vmsg("exception", exc_info=True)
finally:
if startsock:
startsock.close()
finally:
# Close listen port
self.vmsg("Closing socket listening at %s:%s",
self.listen_host, self.listen_port)
lsock.close()
# Restore signals
for sig, func in original_signals.items():
signal.signal(sig, func)
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012-2014 Mag. Christian Tanzer All rights reserved
# Glasauergasse 32, A--1130 Wien, Austria. [email protected]
# #*** <License> ************************************************************#
# This module is part of the package CNDB.OMP.__test__.
#
# This module is licensed under the terms of the BSD 3-Clause License
# <http://www.c-tanzer.at/license/bsd_3c.html>.
# #*** </License> ***********************************************************#
#
#++
# Name
# CNDB.OMP.__test__.Nodes
#
# Purpose
# Test Node and associations
#
# Revision Dates
# 19-Sep-2012 (RS) Creation
# 24-Sep-2012 (RS) More tests, up to `Net_Interface_in_IP4_Network`
# 11-Oct-2012 (RS) Fix missing `raw` parameter
# 12-Oct-2012 (RS) Add tests for `Node` in role `Subject`
# 16-Oct-2012 (CT) Add tracebacks triggered by `CNDB.Node.refuse_links`
# 17-Dec-2012 (RS) Add tests for attributes of `belongs_to_node`
# 5-Mar-2013 (CT) Adapt to changes in `Net_Interface_in_IP4_Network`
# 7-Mar-2013 (RS) Add test for duplicate network allocation
# 16-Apr-2013 (CT) Add test `auto_children`,
# remove `Node_has_Phone`, `Node_has_Email`
# 17-Apr-2013 (CT) Add tests `owner` and `refuse_e_types`
# 18-Apr-2013 (CT) Add test for `eligible_e_types`,
# `selectable_e_types_unique_epk`
# 7-Aug-2013 (CT) Adapt to major surgery of GTW.OMP.NET.Attr_Type
# 30-Sep-2013 (CT) Adapt to uplift of `belongs_to_node`
# 14-Apr-2014 (CT) Rename `belongs_to_node` to `my_node`
# 13-Jun-2014 (RS) Fixes for new `PAP` objects, `Node` no longer derived
# from `Subject`, addition of `Node.desc`, `ui_name`
# for `desc`
# ««revision-date»»···
#--
from __future__ import absolute_import, division, print_function, unicode_literals
from _CNDB._OMP.__test__.model import *
from _MOM.inspect import children_trans_iter
from datetime import datetime
from rsclib.IP_Address import IP4_Address as R_IP4_Address
from rsclib.IP_Address import IP6_Address as R_IP6_Address
_test_code = """
>>> scope = Scaffold.scope (%(p1)s, %(n1)s) # doctest:+ELLIPSIS
Creating new scope MOMT__...
>>> CNDB = scope.CNDB
>>> PAP = scope.PAP
>>> Adr = str ### XXX CNDB.IP4_Network.net_address.P_Type
>>> mgr = PAP.Person \\
... (first_name = 'Ralf', last_name = 'Schlatterbeck', raw = True)
>>> comp = PAP.Company (name = "Open Source Consulting", raw = True)
>>> node1 = CNDB.Node \\
... (name = "nogps", manager = mgr, position = None, raw = True)
>>> gps1 = dict (lat = "48 d 17 m 9.64 s", lon = "15 d 52 m 27.84 s")
>>> node2 = CNDB.Node \\
... (name = "node2", manager = mgr, position = gps1, raw = True)
>>> adr = PAP.Address \\
... ( street = 'Example 23'
... , zip = '1010'
... , city = 'Wien'
... , country = 'Austria'
... )
>>> node1.address = adr
>>> node1.address
PAP.Address ('example 23', '1010', 'wien', 'austria')
>>> gps2 = dict (lat = "48.367088", lon = "16.187672")
>>> node3 = CNDB.Node \\
... (name = "node3", manager = mgr, owner = comp, position = gps2)
>>> fmt = '%%Y-%%m-%%d %%H:%%M:%%S'
>>> t1 = datetime.strptime ("2009-05-05 17:17:17", fmt)
>>> t2 = datetime.strptime ("2010-05-05 23:23:23", fmt)
>>> scope.ems.convert_creation_change (node3.pid, c_time = t1, time = t2)
>>> node3.creation_date
datetime.datetime(2009, 5, 5, 17, 17, 17)
>>> node3.last_changed
datetime.datetime(2010, 5, 5, 23, 23, 23)
>>> net = CNDB.IP4_Network ('192.168.23.0/24', owner = mgr, raw = True)
>>> a1 = net.reserve (Adr ('192.168.23.1/32'))
>>> a2 = net.reserve (Adr ('192.168.23.2/32'))
>>> a3 = net.reserve (Adr ('192.168.23.3/32'))
>>> a4 = net.reserve (Adr ('192.168.23.4/32'))
>>> ax = net.reserve ('192.168.23.42/32')
>>> ax
CNDB.IP4_Network ("192.168.23.42")
>>> devtype = CNDB.Net_Device_Type.instance_or_new \\
... (name = 'Generic', raw = True)
>>> dev = CNDB.Net_Device \\
... (left = devtype, node = node3, name = 'dev', raw = True)
>>> wr = CNDB.Wired_Interface (left = dev, name = 'wr', raw = True)
>>> wl = CNDB.Wireless_Interface (left = dev, name = 'wl', raw = True)
>>> ir1 = CNDB.Net_Interface_in_IP4_Network (wr, a1, mask_len = 24)
>>> il1 = CNDB.Net_Interface_in_IP4_Network (wl, a2, mask_len = 32)
>>> ir2 = CNDB.Net_Interface_in_IP4_Network (wr, a3, mask_len = 24)
>>> il2 = CNDB.Net_Interface_in_IP4_Network (wl, a4, mask_len = 24)
>>> with expect_except (MOM.Error.Invariants) :
... irx = CNDB.Net_Interface_in_IP4_Network (wr, ax, mask_len = 22) # doctest:+ELLIPSIS
Invariants: Condition `valid_mask_len` : The `mask_len` must match the one of `right` or of any
network containing `right`. (mask_len in possible_mask_lens)
mask_len = 22
possible_mask_lens = [24, 25, 26, 27, 28, 29, 30, 31, 32] << sorted ( right.ETM.query ( (Q.net_address.CONTAINS (right.net_address))).attr ("net_address.mask_len"))
right = 192.168.23.42
right.net_address = ...
>>> net2 = CNDB.IP4_Network (net_address = '10.0.0.0/8', owner = mgr, raw = True)
>>> a2_1 = net2.reserve (Adr ('10.139.187.0/27'))
>>> a2_2 = net2.reserve (Adr ('10.139.187.2'))
>>> with expect_except (CNDB.OMP.Error.Address_Already_Used) :
... a2_f = net2.reserve (Adr ('10.139.187.0/27'))
Address_Already_Used: Address 10.139.187.0/27 already in use by 'Schlatterbeck Ralf'
>>> at1 = CNDB.Antenna_Type \\
... ( name = "Yagi1"
... , desc = "A Yagi"
... , gain = 17.5
... , polarization = "vertical"
... , raw = True
... )
>>> args = dict (left = at1, azimuth = "180", elevation_angle = 0, raw = True)
>>> a = CNDB.Antenna (name = "1", ** args)
>>> wia = CNDB.Wireless_Interface_uses_Antenna (wl, a)
>>> CNDB.Antenna.query (Q.my_node == node3).count ()
1
>>> CNDB.Belongs_to_Node.query (Q.my_node == node3).count ()
10
>>> CNDB.Net_Device.query (Q.my_node == node3).count ()
1
>>> CNDB.Net_Interface.query (Q.my_node == node3).count ()
2
>>> CNDB.Node.query (Q.my_node == node3).count ()
1
>>> CNDB.Wired_Interface.query (Q.my_node == node3).count ()
1
>>> CNDB.Wireless_Interface.query (Q.my_node == node3).count ()
1
>>> CNDB.Wireless_Interface_uses_Antenna.query (Q.my_node == node3).count ()
1
>>> CNDB.Net_Device.query (Q.my_node.manager == mgr).count ()
1
>>> CNDB.Net_Device.query (Q.my_node != node3).count ()
0
"""
_test_auto_children = """
>>> scope = Scaffold.scope (%(p1)s, %(n1)s) # doctest:+ELLIPSIS
Creating new scope MOMT__...
>>> CNDB = scope.CNDB
>>> PAP = scope.PAP
>>> for T, l in children_trans_iter (scope.PAP.Subject_has_Property) :
... print ("%%-30s %%s" %% ("%%s%%s" %% (" " * l, T.type_name), sorted (T.children_np_transitive)))
PAP.Subject_has_Property ['PAP.Adhoc_Group_has_Address', 'PAP.Adhoc_Group_has_Email', 'PAP.Adhoc_Group_has_IM_Handle', 'PAP.Adhoc_Group_has_Nickname', 'PAP.Adhoc_Group_has_Phone', 'PAP.Adhoc_Group_has_Url', 'PAP.Association_has_Address', 'PAP.Association_has_Email', 'PAP.Association_has_IM_Handle', 'PAP.Association_has_Nickname', 'PAP.Association_has_Phone', 'PAP.Association_has_Url', 'PAP.Company_has_Address', 'PAP.Company_has_Email', 'PAP.Company_has_IM_Handle', 'PAP.Company_has_Nickname', 'PAP.Company_has_Phone', 'PAP.Company_has_Url', 'PAP.Person_has_Address', 'PAP.Person_has_Email', 'PAP.Person_has_IM_Handle', 'PAP.Person_has_Nickname', 'PAP.Person_has_Phone', 'PAP.Person_has_Url']
PAP.Subject_has_IM_Handle ['PAP.Adhoc_Group_has_IM_Handle', 'PAP.Association_has_IM_Handle', 'PAP.Company_has_IM_Handle', 'PAP.Person_has_IM_Handle']
PAP.Association_has_IM_Handle ['PAP.Association_has_IM_Handle']
PAP.Adhoc_Group_has_IM_Handle ['PAP.Adhoc_Group_has_IM_Handle']
PAP.Person_has_IM_Handle ['PAP.Person_has_IM_Handle']
PAP.Company_has_IM_Handle ['PAP.Company_has_IM_Handle']
PAP.Subject_has_Nickname ['PAP.Adhoc_Group_has_Nickname', 'PAP.Association_has_Nickname', 'PAP.Company_has_Nickname', 'PAP.Person_has_Nickname']
PAP.Association_has_Nickname ['PAP.Association_has_Nickname']
PAP.Adhoc_Group_has_Nickname ['PAP.Adhoc_Group_has_Nickname']
PAP.Person_has_Nickname ['PAP.Person_has_Nickname']
PAP.Company_has_Nickname ['PAP.Company_has_Nickname']
PAP.Subject_has_Address ['PAP.Adhoc_Group_has_Address', 'PAP.Association_has_Address', 'PAP.Company_has_Address', 'PAP.Person_has_Address']
PAP.Association_has_Address ['PAP.Association_has_Address']
PAP.Adhoc_Group_has_Address ['PAP.Adhoc_Group_has_Address']
PAP.Person_has_Address ['PAP.Person_has_Address']
PAP.Company_has_Address ['PAP.Company_has_Address']
PAP.Subject_has_Email ['PAP.Adhoc_Group_has_Email', 'PAP.Association_has_Email', 'PAP.Company_has_Email', 'PAP.Person_has_Email']
PAP.Association_has_Email ['PAP.Association_has_Email']
PAP.Adhoc_Group_has_Email ['PAP.Adhoc_Group_has_Email']
PAP.Person_has_Email ['PAP.Person_has_Email']
PAP.Company_has_Email ['PAP.Company_has_Email']
PAP.Subject_has_Phone ['PAP.Adhoc_Group_has_Phone', 'PAP.Association_has_Phone', 'PAP.Company_has_Phone', 'PAP.Person_has_Phone']
PAP.Association_has_Phone ['PAP.Association_has_Phone']
PAP.Adhoc_Group_has_Phone ['PAP.Adhoc_Group_has_Phone']
PAP.Person_has_Phone ['PAP.Person_has_Phone']
PAP.Company_has_Phone ['PAP.Company_has_Phone']
PAP.Subject_has_Url ['PAP.Adhoc_Group_has_Url', 'PAP.Association_has_Url', 'PAP.Company_has_Url', 'PAP.Person_has_Url']
PAP.Association_has_Url ['PAP.Association_has_Url']
PAP.Adhoc_Group_has_Url ['PAP.Adhoc_Group_has_Url']
PAP.Person_has_Url ['PAP.Person_has_Url']
PAP.Company_has_Url ['PAP.Company_has_Url']
>>> for T, l in children_trans_iter (scope.PAP.Subject_has_Property) :
... rr = T.relevant_root.type_name if T.relevant_root else sorted (T.relevant_roots)
... print ("%%-30s %%-5s %%s" %% ("%%s%%s" %% (" " * l, T.type_name), T.is_partial, rr))
PAP.Subject_has_Property True PAP.Subject_has_Property
PAP.Subject_has_IM_Handle True PAP.Subject_has_Property
PAP.Association_has_IM_Handle False PAP.Subject_has_Property
PAP.Adhoc_Group_has_IM_Handle False PAP.Subject_has_Property
PAP.Person_has_IM_Handle False PAP.Subject_has_Property
PAP.Company_has_IM_Handle False PAP.Subject_has_Property
PAP.Subject_has_Nickname True PAP.Subject_has_Property
PAP.Association_has_Nickname False PAP.Subject_has_Property
PAP.Adhoc_Group_has_Nickname False PAP.Subject_has_Property
PAP.Person_has_Nickname False PAP.Subject_has_Property
PAP.Company_has_Nickname False PAP.Subject_has_Property
PAP.Subject_has_Address True PAP.Subject_has_Property
PAP.Association_has_Address False PAP.Subject_has_Property
PAP.Adhoc_Group_has_Address False PAP.Subject_has_Property
PAP.Person_has_Address False PAP.Subject_has_Property
PAP.Company_has_Address False PAP.Subject_has_Property
PAP.Subject_has_Email True PAP.Subject_has_Property
PAP.Association_has_Email False PAP.Subject_has_Property
PAP.Adhoc_Group_has_Email False PAP.Subject_has_Property
PAP.Person_has_Email False PAP.Subject_has_Property
PAP.Company_has_Email False PAP.Subject_has_Property
PAP.Subject_has_Phone True PAP.Subject_has_Property
PAP.Association_has_Phone False PAP.Subject_has_Property
PAP.Adhoc_Group_has_Phone False PAP.Subject_has_Property
PAP.Person_has_Phone False PAP.Subject_has_Property
PAP.Company_has_Phone False PAP.Subject_has_Property
PAP.Subject_has_Url True PAP.Subject_has_Property
PAP.Association_has_Url False PAP.Subject_has_Property
PAP.Adhoc_Group_has_Url False PAP.Subject_has_Property
PAP.Person_has_Url False PAP.Subject_has_Property
PAP.Company_has_Url False PAP.Subject_has_Property
"""
_test_owner = """
>>> scope = Scaffold.scope (%(p1)s, %(n1)s) # doctest:+ELLIPSIS
Creating new scope MOMT__...
>>> CNDB = scope.CNDB
>>> PAP = scope.PAP
>>> Adr = CNDB.IP4_Network.net_address.P_Type
>>> mgr = PAP.Person \\
... (first_name = 'Ralf', last_name = 'Schlatterbeck', raw = True)
>>> node1 = CNDB.Node (name = "nogps", manager = mgr, position = None, raw = True)
>>> node1.owner
PAP.Person ('schlatterbeck', 'ralf', '', '')
>>> with expect_except (MOM.Error.Wrong_Type) :
... node4 = CNDB.Node (name = "node4", manager = mgr, owner = node1)
Wrong_Type: Node 'nogps' not eligible for attribute owner,
must be instance of Subject
"""
_test_refuse_e_types = """
>>> scope = Scaffold.scope (%(p1)s, %(n1)s) # doctest:+ELLIPSIS
Creating new scope MOMT__...
>>> CNDB = scope.CNDB
>>> PAP = scope.PAP
>>> for ET in scope.app_type._T_Extension :
... for a in ET.id_entity_attr :
... if getattr (a, "refuse_e_types", None) :
... print (ET.type_name, a.name, sorted (a.refuse_e_types))
>>> for ET in scope.app_type._T_Extension :
... for a in ET.id_entity_attr :
... if getattr (a, "refuse_e_types", None) :
... print (ET.type_name, a.name, sorted (a.refuse_e_types_transitive))
>>> sorted (CNDB.Node.manager.eligible_e_types)
['PAP.Adhoc_Group', 'PAP.Association', 'PAP.Company', 'PAP.Person']
>>> sorted (CNDB.Node.owner.eligible_e_types)
['PAP.Adhoc_Group', 'PAP.Association', 'PAP.Company', 'PAP.Person']
>>> sorted (CNDB.Node.owner.selectable_e_types)
['PAP.Adhoc_Group', 'PAP.Association', 'PAP.Company', 'PAP.Person']
>>> sorted (PAP.Subject_has_Property.left.eligible_e_types)
['PAP.Adhoc_Group', 'PAP.Association', 'PAP.Company', 'PAP.Person']
>>> sorted (PAP.Subject_has_Phone.left.eligible_e_types)
['PAP.Adhoc_Group', 'PAP.Association', 'PAP.Company', 'PAP.Person']
>>> AQ = CNDB.Node.AQ
>>> print (formatted (AQ.As_Template_Elem))
[ Record
( attr = String `name`
, full_name = 'name'
, id = 'name'
, name = 'name'
, sig_key = 3
, ui_name = 'Name'
)
, Record
( Class = 'Entity'
, attr = Entity `manager`
, children_np =
[ Record
( Class = 'Entity'
, attr = Entity `manager`
, attrs =
[ Record
( attr = String `name`
, full_name = 'manager.name'
, id = 'manager__name'
, name = 'name'
, sig_key = 3
, ui_name = 'Manager[Adhoc_Group]/Name'
)
]
, full_name = 'manager'
, id = 'manager'
, name = 'manager'
, sig_key = 2
, type_name = 'PAP.Adhoc_Group'
, ui_name = 'Manager[Adhoc_Group]'
, ui_type_name = 'Adhoc_Group'
)
, Record
( Class = 'Entity'
, attr = Entity `manager`
, attrs =
[ Record
( attr = String `name`
, full_name = 'manager.name'
, id = 'manager__name'
, name = 'name'
, sig_key = 3
, ui_name = 'Manager[Association]/Name'
)
]
, full_name = 'manager'
, id = 'manager'
, name = 'manager'
, sig_key = 2
, type_name = 'PAP.Association'
, ui_name = 'Manager[Association]'
, ui_type_name = 'Association'
)
, Record
( Class = 'Entity'
, attr = Entity `manager`
, attrs =
[ Record
( attr = String `name`
, full_name = 'manager.name'
, id = 'manager__name'
, name = 'name'
, sig_key = 3
, ui_name = 'Manager[Company]/Name'
)
, Record
( attr = String `registered_in`
, full_name = 'manager.registered_in'
, id = 'manager__registered_in'
, name = 'registered_in'
, sig_key = 3
, ui_name = 'Manager[Company]/Registered in'
)
]
, full_name = 'manager'
, id = 'manager'
, name = 'manager'
, sig_key = 2
, type_name = 'PAP.Company'
, ui_name = 'Manager[Company]'
, ui_type_name = 'Company'
)
, Record
( Class = 'Entity'
, attr = Entity `manager`
, attrs =
[ Record
( attr = String `last_name`
, full_name = 'manager.last_name'
, id = 'manager__last_name'
, name = 'last_name'
, sig_key = 3
, ui_name = 'Manager[Person]/Last name'
)
, Record
( attr = String `first_name`
, full_name = 'manager.first_name'
, id = 'manager__first_name'
, name = 'first_name'
, sig_key = 3
, ui_name = 'Manager[Person]/First name'
)
, Record
( attr = String `middle_name`
, full_name = 'manager.middle_name'
, id = 'manager__middle_name'
, name = 'middle_name'
, sig_key = 3
, ui_name = 'Manager[Person]/Middle name'
)
, Record
( attr = String `title`
, full_name = 'manager.title'
, id = 'manager__title'
, name = 'title'
, sig_key = 3
, ui_name = 'Manager[Person]/Academic title'
)
]
, full_name = 'manager'
, id = 'manager'
, name = 'manager'
, sig_key = 2
, type_name = 'PAP.Person'
, ui_name = 'Manager[Person]'
, ui_type_name = 'Person'
)
]
, default_child = 'PAP.Person'
, full_name = 'manager'
, id = 'manager'
, name = 'manager'
, sig_key = 2
, type_name = 'PAP.Subject'
, ui_name = 'Manager'
, ui_type_name = 'Subject'
)
, Record
( Class = 'Entity'
, attr = Entity `address`
, attrs =
[ Record
( attr = String `street`
, full_name = 'address.street'
, id = 'address__street'
, name = 'street'
, sig_key = 3
, ui_name = 'Address/Street'
)
, Record
( attr = String `zip`
, full_name = 'address.zip'
, id = 'address__zip'
, name = 'zip'
, sig_key = 3
, ui_name = 'Address/Zip code'
)
, Record
( attr = String `city`
, full_name = 'address.city'
, id = 'address__city'
, name = 'city'
, sig_key = 3
, ui_name = 'Address/City'
)
, Record
( attr = String `country`
, full_name = 'address.country'
, id = 'address__country'
, name = 'country'
, sig_key = 3
, ui_name = 'Address/Country'
)
, Record
( attr = String `desc`
, full_name = 'address.desc'
, id = 'address__desc'
, name = 'desc'
, sig_key = 3
, ui_name = 'Address/Description'
)
, Record
( attr = String `region`
, full_name = 'address.region'
, id = 'address__region'
, name = 'region'
, sig_key = 3
, ui_name = 'Address/Region'
)
]
, full_name = 'address'
, id = 'address'
, name = 'address'
, sig_key = 2
, type_name = 'PAP.Address'
, ui_name = 'Address'
, ui_type_name = 'Address'
)
, Record
( attr = Text `desc`
, full_name = 'desc'
, id = 'desc'
, name = 'desc'
, sig_key = 3
, ui_name = 'Description'
)
, Record
( Class = 'Entity'
, attr = Entity `owner`
, children_np =
[ Record
( Class = 'Entity'
, attr = Entity `owner`
, attrs =
[ Record
( attr = String `name`
, full_name = 'owner.name'
, id = 'owner__name'
, name = 'name'
, sig_key = 3
, ui_name = 'Owner[Adhoc_Group]/Name'
)
]
, full_name = 'owner'
, id = 'owner'
, name = 'owner'
, sig_key = 2
, type_name = 'PAP.Adhoc_Group'
, ui_name = 'Owner[Adhoc_Group]'
, ui_type_name = 'Adhoc_Group'
)
, Record
( Class = 'Entity'
, attr = Entity `owner`
, attrs =
[ Record
( attr = String `name`
, full_name = 'owner.name'
, id = 'owner__name'
, name = 'name'
, sig_key = 3
, ui_name = 'Owner[Association]/Name'
)
]
, full_name = 'owner'
, id = 'owner'
, name = 'owner'
, sig_key = 2
, type_name = 'PAP.Association'
, ui_name = 'Owner[Association]'
, ui_type_name = 'Association'
)
, Record
( Class = 'Entity'
, attr = Entity `owner`
, attrs =
[ Record
( attr = String `name`
, full_name = 'owner.name'
, id = 'owner__name'
, name = 'name'
, sig_key = 3
, ui_name = 'Owner[Company]/Name'
)
, Record
( attr = String `registered_in`
, full_name = 'owner.registered_in'
, id = 'owner__registered_in'
, name = 'registered_in'
, sig_key = 3
, ui_name = 'Owner[Company]/Registered in'
)
]
, full_name = 'owner'
, id = 'owner'
, name = 'owner'
, sig_key = 2
, type_name = 'PAP.Company'
, ui_name = 'Owner[Company]'
, ui_type_name = 'Company'
)
, Record
( Class = 'Entity'
, attr = Entity `owner`
, attrs =
[ Record
( attr = String `last_name`
, full_name = 'owner.last_name'
, id = 'owner__last_name'
, name = 'last_name'
, sig_key = 3
, ui_name = 'Owner[Person]/Last name'
)
, Record
( attr = String `first_name`
, full_name = 'owner.first_name'
, id = 'owner__first_name'
, name = 'first_name'
, sig_key = 3
, ui_name = 'Owner[Person]/First name'
)
, Record
( attr = String `middle_name`
, full_name = 'owner.middle_name'
, id = 'owner__middle_name'
, name = 'middle_name'
, sig_key = 3
, ui_name = 'Owner[Person]/Middle name'
)
, Record
( attr = String `title`
, full_name = 'owner.title'
, id = 'owner__title'
, name = 'title'
, sig_key = 3
, ui_name = 'Owner[Person]/Academic title'
)
]
, full_name = 'owner'
, id = 'owner'
, name = 'owner'
, sig_key = 2
, type_name = 'PAP.Person'
, ui_name = 'Owner[Person]'
, ui_type_name = 'Person'
)
]
, default_child = 'PAP.Person'
, full_name = 'owner'
, id = 'owner'
, name = 'owner'
, sig_key = 2
, type_name = 'PAP.Subject'
, ui_name = 'Owner'
, ui_type_name = 'Subject'
)
, Record
( attr = Position `position`
, attrs =
[ Record
( attr = Angle `lat`
, full_name = 'position.lat'
, id = 'position__lat'
, name = 'lat'
, sig_key = 4
, ui_name = 'Position/Latitude'
)
, Record
( attr = Angle `lon`
, full_name = 'position.lon'
, id = 'position__lon'
, name = 'lon'
, sig_key = 4
, ui_name = 'Position/Longitude'
)
, Record
( attr = Float `height`
, full_name = 'position.height'
, id = 'position__height'
, name = 'height'
, sig_key = 0
, ui_name = 'Position/Height'
)
]
, full_name = 'position'
, id = 'position'
, name = 'position'
, ui_name = 'Position'
)
, Record
( attr = Boolean `show_in_map`
, choices =
[ 'no'
, 'yes'
]
, full_name = 'show_in_map'
, id = 'show_in_map'
, name = 'show_in_map'
, sig_key = 1
, ui_name = 'Show in map'
)
, Record
( Class = 'Entity'
, attr = Rev_Ref `creation`
, attrs =
[ Record
( attr = Date-Time `c_time`
, full_name = 'creation.c_time'
, id = 'creation__c_time'
, name = 'c_time'
, sig_key = 0
, ui_name = 'Creation/C time'
)
, Record
( Class = 'Entity'
, attr = Entity `c_user`
, children_np =
[ Record
( Class = 'Entity'
, attr = Entity `c_user`
, attrs =
[ Record
( attr = Email `name`
, full_name = 'c_user.name'
, id = 'c_user__name'
, name = 'name'
, sig_key = 3
, ui_name = 'C user[Account]/Name'
)
]
, full_name = 'c_user'
, id = 'c_user'
, name = 'c_user'
, sig_key = 2
, type_name = 'Auth.Account'
, ui_name = 'C user[Account]'
, ui_type_name = 'Account'
)
, Record
( Class = 'Entity'
, attr = Entity `c_user`
, attrs =
[ Record
( attr = String `last_name`
, full_name = 'c_user.last_name'
, id = 'c_user__last_name'
, name = 'last_name'
, sig_key = 3
, ui_name = 'C user[Person]/Last name'
)
, Record
( attr = String `first_name`
, full_name = 'c_user.first_name'
, id = 'c_user__first_name'
, name = 'first_name'
, sig_key = 3
, ui_name = 'C user[Person]/First name'
)
, Record
( attr = String `middle_name`
, full_name = 'c_user.middle_name'
, id = 'c_user__middle_name'
, name = 'middle_name'
, sig_key = 3
, ui_name = 'C user[Person]/Middle name'
)
, Record
( attr = String `title`
, full_name = 'c_user.title'
, id = 'c_user__title'
, name = 'title'
, sig_key = 3
, ui_name = 'C user[Person]/Academic title'
)
]
, full_name = 'c_user'
, id = 'c_user'
, name = 'c_user'
, sig_key = 2
, type_name = 'PAP.Person'
, ui_name = 'C user[Person]'
, ui_type_name = 'Person'
)
]
, full_name = 'creation.c_user'
, id = 'creation__c_user'
, name = 'c_user'
, sig_key = 2
, type_name = 'MOM.Id_Entity'
, ui_name = 'Creation/C user'
, ui_type_name = 'Id_Entity'
)
, Record
( attr = String `kind`
, full_name = 'creation.kind'
, id = 'creation__kind'
, name = 'kind'
, sig_key = 3
, ui_name = 'Creation/Kind'
)
, Record
( attr = Date-Time `time`
, full_name = 'creation.time'
, id = 'creation__time'
, name = 'time'
, sig_key = 0
, ui_name = 'Creation/Time'
)
, Record
( Class = 'Entity'
, attr = Entity `user`
, children_np =
[ Record
( Class = 'Entity'
, attr = Entity `user`
, attrs =
[ Record
( attr = Email `name`
, full_name = 'user.name'
, id = 'user__name'
, name = 'name'
, sig_key = 3
, ui_name = 'User[Account]/Name'
)
]
, full_name = 'user'
, id = 'user'
, name = 'user'
, sig_key = 2
, type_name = 'Auth.Account'
, ui_name = 'User[Account]'
, ui_type_name = 'Account'
)
, Record
( Class = 'Entity'
, attr = Entity `user`
, attrs =
[ Record
( attr = String `last_name`
, full_name = 'user.last_name'
, id = 'user__last_name'
, name = 'last_name'
, sig_key = 3
, ui_name = 'User[Person]/Last name'
)
, Record
( attr = String `first_name`
, full_name = 'user.first_name'
, id = 'user__first_name'
, name = 'first_name'
, sig_key = 3
, ui_name = 'User[Person]/First name'
)
, Record
( attr = String `middle_name`
, full_name = 'user.middle_name'
, id = 'user__middle_name'
, name = 'middle_name'
, sig_key = 3
, ui_name = 'User[Person]/Middle name'
)
, Record
( attr = String `title`
, full_name = 'user.title'
, id = 'user__title'
, name = 'title'
, sig_key = 3
, ui_name = 'User[Person]/Academic title'
)
]
, full_name = 'user'
, id = 'user'
, name = 'user'
, sig_key = 2
, type_name = 'PAP.Person'
, ui_name = 'User[Person]'
, ui_type_name = 'Person'
)
]
, full_name = 'creation.user'
, id = 'creation__user'
, name = 'user'
, sig_key = 2
, type_name = 'MOM.Id_Entity'
, ui_name = 'Creation/User'
, ui_type_name = 'Id_Entity'
)
]
, full_name = 'creation'
, id = 'creation'
, name = 'creation'
, sig_key = 2
, type_name = 'MOM.MD_Change'
, ui_name = 'Creation'
, ui_type_name = 'MD_Change'
)
, Record
( Class = 'Entity'
, attr = Rev_Ref `last_change`
, attrs =
[ Record
( attr = Date-Time `c_time`
, full_name = 'last_change.c_time'
, id = 'last_change__c_time'
, name = 'c_time'
, sig_key = 0
, ui_name = 'Last change/C time'
)
, Record
( Class = 'Entity'
, attr = Entity `c_user`
, children_np =
[ Record
( Class = 'Entity'
, attr = Entity `c_user`
, attrs =
[ Record
( attr = Email `name`
, full_name = 'c_user.name'
, id = 'c_user__name'
, name = 'name'
, sig_key = 3
, ui_name = 'C user[Account]/Name'
)
]
, full_name = 'c_user'
, id = 'c_user'
, name = 'c_user'
, sig_key = 2
, type_name = 'Auth.Account'
, ui_name = 'C user[Account]'
, ui_type_name = 'Account'
)
, Record
( Class = 'Entity'
, attr = Entity `c_user`
, attrs =
[ Record
( attr = String `last_name`
, full_name = 'c_user.last_name'
, id = 'c_user__last_name'
, name = 'last_name'
, sig_key = 3
, ui_name = 'C user[Person]/Last name'
)
, Record
( attr = String `first_name`
, full_name = 'c_user.first_name'
, id = 'c_user__first_name'
, name = 'first_name'
, sig_key = 3
, ui_name = 'C user[Person]/First name'
)
, Record
( attr = String `middle_name`
, full_name = 'c_user.middle_name'
, id = 'c_user__middle_name'
, name = 'middle_name'
, sig_key = 3
, ui_name = 'C user[Person]/Middle name'
)
, Record
( attr = String `title`
, full_name = 'c_user.title'
, id = 'c_user__title'
, name = 'title'
, sig_key = 3
, ui_name = 'C user[Person]/Academic title'
)
]
, full_name = 'c_user'
, id = 'c_user'
, name = 'c_user'
, sig_key = 2
, type_name = 'PAP.Person'
, ui_name = 'C user[Person]'
, ui_type_name = 'Person'
)
]
, full_name = 'last_change.c_user'
, id = 'last_change__c_user'
, name = 'c_user'
, sig_key = 2
, type_name = 'MOM.Id_Entity'
, ui_name = 'Last change/C user'
, ui_type_name = 'Id_Entity'
)
, Record
( attr = String `kind`
, full_name = 'last_change.kind'
, id = 'last_change__kind'
, name = 'kind'
, sig_key = 3
, ui_name = 'Last change/Kind'
)
, Record
( attr = Date-Time `time`
, full_name = 'last_change.time'
, id = 'last_change__time'
, name = 'time'
, sig_key = 0
, ui_name = 'Last change/Time'
)
, Record
( Class = 'Entity'
, attr = Entity `user`
, children_np =
[ Record
( Class = 'Entity'
, attr = Entity `user`
, attrs =
[ Record
( attr = Email `name`
, full_name = 'user.name'
, id = 'user__name'
, name = 'name'
, sig_key = 3
, ui_name = 'User[Account]/Name'
)
]
, full_name = 'user'
, id = 'user'
, name = 'user'
, sig_key = 2
, type_name = 'Auth.Account'
, ui_name = 'User[Account]'
, ui_type_name = 'Account'
)
, Record
( Class = 'Entity'
, attr = Entity `user`
, attrs =
[ Record
( attr = String `last_name`
, full_name = 'user.last_name'
, id = 'user__last_name'
, name = 'last_name'
, sig_key = 3
, ui_name = 'User[Person]/Last name'
)
, Record
( attr = String `first_name`
, full_name = 'user.first_name'
, id = 'user__first_name'
, name = 'first_name'
, sig_key = 3
, ui_name = 'User[Person]/First name'
)
, Record
( attr = String `middle_name`
, full_name = 'user.middle_name'
, id = 'user__middle_name'
, name = 'middle_name'
, sig_key = 3
, ui_name = 'User[Person]/Middle name'
)
, Record
( attr = String `title`
, full_name = 'user.title'
, id = 'user__title'
, name = 'title'
, sig_key = 3
, ui_name = 'User[Person]/Academic title'
)
]
, full_name = 'user'
, id = 'user'
, name = 'user'
, sig_key = 2
, type_name = 'PAP.Person'
, ui_name = 'User[Person]'
, ui_type_name = 'Person'
)
]
, full_name = 'last_change.user'
, id = 'last_change__user'
, name = 'user'
, sig_key = 2
, type_name = 'MOM.Id_Entity'
, ui_name = 'Last change/User'
, ui_type_name = 'Id_Entity'
)
]
, full_name = 'last_change'
, id = 'last_change'
, name = 'last_change'
, sig_key = 2
, type_name = 'MOM.MD_Change'
, ui_name = 'Last change'
, ui_type_name = 'MD_Change'
)
, Record
( attr = Int `last_cid`
, full_name = 'last_cid'
, id = 'last_cid'
, name = 'last_cid'
, sig_key = 0
, ui_name = 'Last cid'
)
, Record
( attr = Surrogate `pid`
, full_name = 'pid'
, id = 'pid'
, name = 'pid'
, sig_key = 0
, ui_name = 'Pid'
)
, Record
( attr = String `type_name`
, full_name = 'type_name'
, id = 'type_name'
, name = 'type_name'
, sig_key = 3
, ui_name = 'Type name'
)
, Record
( Class = 'Entity'
, attr = Link_Ref_List `documents`
, attrs =
[ Record
( attr = Url `url`
, full_name = 'documents.url'
, id = 'documents__url'
, name = 'url'
, sig_key = 3
, ui_name = 'Documents/Url'
)
, Record
( attr = String `type`
, full_name = 'documents.type'
, id = 'documents__type'
, name = 'type'
, sig_key = 3
, ui_name = 'Documents/Type'
)
, Record
( attr = String `desc`
, full_name = 'documents.desc'
, id = 'documents__desc'
, name = 'desc'
, sig_key = 3
, ui_name = 'Documents/Description'
)
]
, full_name = 'documents'
, id = 'documents'
, name = 'documents'
, sig_key = 2
, type_name = 'MOM.Document'
, ui_name = 'Documents'
, ui_type_name = 'Document'
)
]
"""
__test__ = Scaffold.create_test_dict \
( dict
( main = _test_code
, auto_children = _test_auto_children
, owner = _test_owner
, refuse_e_types = _test_refuse_e_types
)
)
### __END__ CNDB.OMP.__test__.Nodes
|
# Generated by Django 3.1.4 on 2020-12-21 19:01
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('aptlist', '0008_auto_20201221_1855'),
]
operations = [
migrations.AlterField(
model_name='apartment',
name='date_built',
field=models.DateTimeField(default=datetime.datetime(2020, 12, 21, 19, 1, 4, 7284, tzinfo=utc), verbose_name='date built'),
),
migrations.AlterField(
model_name='lease',
name='lease_expires',
field=models.DateTimeField(default=datetime.datetime(2020, 12, 21, 19, 1, 4, 30309, tzinfo=utc), verbose_name='lease expires'),
),
]
|
"""Top-level package for opytional."""
__author__ = """Matthew Andres Moreno"""
__email__ = '[email protected]'
__version__ = '0.1.0'
from .apply_if_or_else import apply_if_or_else
from .apply_if_or_value import apply_if_or_value
from .apply_if import apply_if
from .or_else import or_else
from .or_value import or_value
# adapted from https://stackoverflow.com/a/31079085
__all__ = [
'apply_if_or_else',
'apply_if_or_value',
'apply_if',
'or_else',
'or_value',
]
|
import os
import shutil
from pathlib import Path
from django.core.management.base import BaseCommand
from ._config import GrpcFrameworkSettings
from ._utils import LoggingMixIn
class Command(LoggingMixIn, BaseCommand):
help = 'Compile protobuf files'
settings = GrpcFrameworkSettings()
force = False
verbosity = 1
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument('-y', action='store_true', dest='force')
def rmdir(self, target: Path):
sub_items = set(x for x in target.iterdir())
init_file = target.joinpath('__init__.py')
cache_dir = target.joinpath('__pycache__')
re_list = False
if sub_items == {init_file} or sub_items == {init_file, cache_dir}:
if init_file.lstat().st_size == 0 and self.prompt(f'delete {init_file}'):
os.remove(init_file)
re_list = True
if re_list:
sub_items = set(x for x in target.iterdir())
if (not sub_items or sub_items == {cache_dir}) and self.prompt(f'delete {target}/'):
shutil.rmtree(target)
def clear(self):
prefix = self.settings.temporary_dir
for root, dirs, files in os.walk(prefix, topdown=False):
for file in files:
source = Path(root).joinpath(file).absolute()
target = self.settings.base_dir.joinpath(source.relative_to(prefix))
if target.exists() and self.prompt(f'delete {target}'):
os.remove(target)
for d in dirs:
source = Path(root).joinpath(d).absolute()
target = self.settings.base_dir.joinpath(source.relative_to(prefix))
self.rmdir(target)
if self.prompt(f'delete {prefix}'):
shutil.rmtree(prefix)
def handle(self, *args, **options):
self.verbosity = options.get('verbosity')
self.force = options.get('force', False)
self.clear()
|
import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="uscalendar",
version="0.0.1",
author="Matthew McElhaney",
author_email="[email protected]",
description="Package that contains modules for US Federal Holidays and US Market Opens",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/mmcelhan/us_calendar_source",
project_urls={
"blog post": "https://lamplightlab.com/?p=61",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_dir={"": "src"},
packages=setuptools.find_packages(where="src"),
python_requires=">=3.6",
)
|
from ._train import add_args
|
#! /usr/bin/python3
#
# Copyright (c) 2017 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
"""
Test Target Broker Daemon HW Health Check Monitor
Monitor the kernel's journal output looking for telltale signs of some
piece of hardware gone unresponsive and take action to remediate it.
This has to be configured via config files in
/etc/ttbd-hw-healthmonitor/conf_*.py
"""
import argparse
import bisect
import collections
import logging
import os
import pprint
import re
import select
import subprocess
import time
import systemd.journal
import systemd.daemon
import ttbl._install
import commonl
usb_root_regex = re.compile("^(?P<bus>[0-9]+)-(?P<port>[0-9]+)$")
def _usb_special_case(path):
if not path.startswith("/sys/bus/usb/drivers"):
return path
filename = os.path.basename(path)
# we only can workaround root-ports, which look like
# /sys/bus/usb/drivers/usb/3-2
match = usb_root_regex.match(filename)
if not match:
return path
# Sometimes /sys/bus/usb/drivers/usb/3-2 (for example) doesn't
# exist because it has been blown to pieces somehow, but there is
# a:
#
# $ find /sys/ -iname usb3-port2
# /sys/devices/pci0000:80/0000:80:01.0/0000:81:00.0/0000:82:00.2/usb3/3-0:1.0/usb3-port2
#
# so if it doesn't exist, we are going to use that one
if os.path.exists(path):
return path
# $ readlink -e /sys/bus/usb/drivers/usb/3-2
# /sys/devices/pci0000:80/0000:80:01.0/0000:81:00.0/0000:82:00.2/usb3/3-2
#
# and when it doesn't exist
#
# $ find /sys/ -iname usb3-port2
# /sys/devices/pci0000:80/0000:80:01.0/0000:81:00.0/0000:82:00.2/usb3/3-0:1.0/usb3-port2
def _find(filename):
for parent, dirs, _files in os.walk("/sys"):
if filename in dirs: # usb3-port2 is a dir
# return just
# /sys/devices/pci0000:80/0000:80:01.0/0000:81:00.0/0000:82:00.2/usb3/3-0:1.0,
# so it is at the same level as
# /sys/devices/pci0000:80/0000:80:01.0/0000:81:00.0/0000:82:00.2/usb3/3-2.
logging.info("%s: doesn't exist, but %s does, dead controller",
path, parent)
return parent
return None
gd = match.groupdict()
return _find("usb" + gd['bus'] + "-port" + gd['port'])
def _driver_rebind(bus_name, driver_name, device_name, strip_generations):
global _ttbd_hw_health_monitor_driver_rebind_path
# let's start by componsing the /sys path from the arguments
path = os.path.join("/", "sys", "bus", bus_name, "drivers", driver_name,
device_name)
_path = _usb_special_case(path)
if _path == None:
logging.error("%s: doesn't exist, can't rebind", path)
return
path = _path
if strip_generations:
# Strip children from the device path up because we want to
# rebind a parent device, not the children
#
# For example, for /sys/bus/usb/devices/3-4.1:1.0
# parent is 3-4.1
# grandpa is 3-4
# great-grandpa is usb3
# great-great-grandpa is 0000:05.00.2
#
# we know because
#
# $ readlink -e /sys/bus/usb/devices/3-4.1:1.0
# /sys/devices/pci0000:00/0000:00:03.0/0000:04:00.0/0000:05:00.2/usb3/3-4/3-4.1/3-4.1:1.0
assert strip_generations > 0
# Now see where that points to, that's the
# $ readlink -e /sys/bus/usb/devices/3-4.1:1.0
# /sys/devices/pci0000:00/0000:00:03.0/0000:04:00.0/0000:05:00.2/usb3/3-4/3-4.1/3-4.1:1.0
real_path = os.path.realpath(path).split(os.path.sep)
# So now chop strip-generations on the right, that's our new device
# /sys/devices/pci0000:00/0000:00:03.0/0000:04:00.0/0000:05:00.2
stripped_path = os.path.join(real_path[:-strip_generations])
# got device name
new_device_name = stripped_path[-1]
# Now let's get what bus and driver this device is attached to
# by following /DEVICEPATH/driver
#
# /sys/devices/pci0000:00/0000:00:03.0/0000:04:00.0/0000:05:00.2/driver
driver_path = os.path.realpath(os.path.join(*(
[ "/" ] + stripped_path +[ "driver" ])))
# this will give /sys/bus/BUSNAME/drivers/DRIVERNAME, so let's split
# it and extract the data
driver_path_components = driver_path.split("/")
new_bus_name = driver_path_components[3]
new_driver_name = driver_path_components[5]
logging.info("%s/%s/%s: stripped %d generations yields %s/%s/%s",
bus_name, driver_name, device_name,
strip_generations,
new_bus_name, new_driver_name, new_device_name)
device_name = new_device_name
driver_name = new_driver_name
bus_name = new_bus_name
cmdline = [
"sudo", "-n",
_ttbd_hw_health_monitor_driver_rebind_path,
bus_name, driver_name, device_name
]
try:
logging.info("%s/%s/%s: rebinding with command '%s'",
bus_name, driver_name, device_name,
" ".join(cmdline))
output = subprocess.check_output(cmdline, stderr = subprocess.STDOUT)
except subprocess.CalledProcessError as cpe:
logging.error("%s/%s/%s: rebinding with command '%s' failed: %s",
bus_name, driver_name, device_name,
" ".join(cpe.cmd), cpe.output)
return # well, nothing we can really do...
logging.warning("%s/%s/%s: rebound with command '%s': %s",
bus_name, driver_name, device_name,
" ".join(cmdline), output)
def action_driver_rebind(bus_name, driver_name, device_name,
condition, entry, strip_generations = 0):
"""
Rebind a device to it's driver to reset it
A device that is in a hosed state will be re-bound to its driver
to try to reset it and bring it back to life.
:param str bus_name: name of bus in */sys/bus*
:param str driver_name: name of driver in
*/sys/bus/BUS_NAME/drivers*
:param str device_name: name of the device in
*/sys/bus/BUS_NAME/drivers/DRIVER_NAME*
:param str condition: condition in the configuration given to
:func:`config_watch_add` that caused this call
:param dict entry: Systemd journal entry that caused this call
"""
ts = entry.get('__REALTIME_TIMESTAMP', None)
logging.error("%s: ACTION: reloading driver due to '%s' @ %s",
device_name, condition, ts)
_driver_rebind(bus_name, driver_name, device_name, strip_generations)
_thresholds = collections.defaultdict(list)
def action_driver_rebind_threshold(bus_name, driver_name, device_name,
condition, entry,
max_hits, period, strip_generations = 0):
"""
Rebind a device to its driver to reset it if a condition happens often
When the condition is reported more than *max_hits* time in
*period* seconds, then the device will be reset via driver
rebinding.
See :func:`action_driver_rebind` for information on the common
paramenters
:param int period: (in second) amount of time to monitor
:param int max_hits: maximum number of occurrences of the
condition that can heppen in a period after which we'd rebind
the device.
"""
global _thresholds
logging.debug("%s/%s/%s rebind_threshold: considering entry %s",
bus_name, driver_name, device_name,
entry)
ts = entry.get('__REALTIME_TIMESTAMP', None)
threshold_name = device_name + "/" + condition
threshold = _thresholds[threshold_name]
bisect.insort(threshold, ts)
ts0 = threshold[0]
tse = threshold[-1]
while (tse - ts0).total_seconds() > period:
# the current list of thresholds we have in the list is longer
# than the period, so remove the older ones until we are
# within the period
threshold.pop(0)
ts0 = threshold[0]
logging.warning(
"%s/%s/%s: current queue has %d (max %d) hits "
"in %.1f minutes (max %.1f) for '%s'",
bus_name, driver_name, device_name, len(threshold), max_hits,
(tse - ts0).total_seconds() / 60, period / 60, condition)
if len(threshold) > max_hits:
logging.error("%s/%s/%s: ACTION: reload driver due to: '%s' @ %s "
"causing %d (max %d) hits in %.1f minutes (max %.1f)",
bus_name, driver_name, device_name,
condition, ts,
len(threshold), max_hits,
(tse - ts0).total_seconds() / 60, period / 60)
_driver_rebind(bus_name, driver_name, device_name, strip_generations)
# we start checking from scratch
_thresholds[threshold_name] = []
_kernel_device_regex = re.compile(r"^\+usb:(?P<busno>[0-9]+)-(?P<devno>[0-9]+)(\.[0-9]+)*$")
def action_driver_rebind_threshold_kernel_device(
bus_name, driver_name, device_name,
condition, entry,
max_hits, period, strip_generations = 0):
"""
This is the same as action_driver_rebind_threshold(), but adapted
to the case when the actual /sys/bus/usb/devices/M-N dissapears
due to a root port failure.
In this case we get a kernel device name +usb:BUSNUMBER-DEVICENO
(eg: +usb:3-2) which we have to translate to controller
/sys/bus/usb/devices/usb3.
Now we can't just replace with 3-2 becasue in some cases, that
sysfs node has dissapeared.
Note the slight change in configuration language:
>>> config_watch_add("usb", "usb", re.compile("[0-9]+-[0-9]+$"), {
>>> # Case happened where /sys/bus/usb/devices/3-2 dissapeared but:
>>>
>>> # May 03 20:44:51 HOSTNAME kernel: usb 3-2: device descriptor read/64, error -110
>>> # Apr 27 22:44:02 ... kernel: usb 3-2: clear tt 4 (85c0) error -71
>>> # Just reload the thing if we get more than five in a minute
>>> 'device descriptor read/64, error -110': (
>>> # 2 is the number of generations we want to strip from the
>>> # device path; because 3-2's parent is usb3, whose
>>> # parent is the actual PCI device we need to reset
>>> action_driver_rebind_threshold_kernel-device, 5, 60, 2
>>> )},
>>> kernel_device = re.compile("\+usb:[0-9]+-[0-9]+$"))
Note the trailing *kernel_device* argument, a regex used to latch
on a kernel device name dynamically.
"""
match = _kernel_device_regex.match(device_name)
if not match:
raise AssertionError("device name %s does not match +usb:M-N[.O]*"
% device_name)
busno = match.groupdict()['busno']
# so now we have /sys/bus/usb/devices/usbBUSNO
realpath = os.path.realpath("/sys/bus/usb/devices/usb" + busno)
if not os.path.exists(realpath):
logging.error("%s: doesn't exist -- can't do anything", realpath)
return
# which is a symlink to /sys/devices/pci0000:00/0000:00:14.0/usb3
parent_dev = os.path.dirname(realpath)
# which is a symlink to /sys/devices/pci0000:00/0000:00:14.0 and
# it's driver is
driver_path = os.path.realpath(parent_dev + "/driver")
# /sys/bus/pci/drivers/xhci_hcd
# ok, so extract now to [ '', 'sys, 'bus', 'usb', 'drivers', 'xhci_hcd' # ... ]
_driver_path_parts = driver_path.split('/')
# bus_name = pci, driver_name = xhci_hcd, device_name #
# 0000:00:14.0
_bus_name = _driver_path_parts[3]
_driver_name = _driver_path_parts[5]
_device_name = os.path.basename(parent_dev)
logging.warning("%s/%s/%s mapped to %s/%s/%s",
bus_name, driver_name, device_name,
_bus_name, _driver_name, _device_name)
# and let the other function do it for us
action_driver_rebind_threshold(_bus_name, _driver_name, _device_name,
condition, entry, max_hits, period)
_watch_rules = []
def config_watch_add(bus_name, driver_name, device_name, actions):
r"""
:param str bus_name: name of bus in */sys/bus* to watch
:param str driver_name: name of driver in
*/sys/bus/BUS_NAME/drivers* to watch
:param str device_name: device under
/sys/bus/BUS_NAME/drivers/DRIVER_NAME to watch; if *None*, watch
all of them
:param dict actions: dictionary describing actions to do; key is a
substring of a message, value is a function to call or a tuple
that starts with a function to call and the rest are arguments
to add
The action function has to follow this prototype:
>>> def action_function(bus_name, driver_name, device_name,
condition, entry, *args, **kwargs:
thus, when called, bus_name, driver_name and device_name are all
the names of the entity that is causing it; condition is the
condition string that was matched (the key) and *entry* is the
journal entry which matched. *\*args* and *\*\*kwargs* are the
extra arguments given in the *actions* value tuple.
"""
assert isinstance(bus_name, str)
assert isinstance(driver_name, str)
if device_name:
if isinstance(device_name, str):
_device_name = "/" + device_name
elif isinstance(device_name, re.Pattern):
_device_name = "/" + device_name.pattern
else:
raise AssertionError(
"'device_name' must be string or regex, found %s",
type(device_name).__name__)
else:
_device_name = ""
assert isinstance(actions, dict)
global _watch_rules
_actions = {}
origin = commonl.origin_get(2)
# verify arguments and transform all the actions to a unique
# form (all have to be a list)
for condition, action in actions.items():
assert isinstance(condition, str), \
"Key passed as condition is not a string"
try:
action_fn = action[0]
_actions[condition] = action
except TypeError:
action_fn = action
_actions[condition] = [ action_fn ]
assert callable(action_fn), \
"Argument passed as action function to condition '%s' " \
"is not callable" % condition
driver_path = os.path.join("/sys/bus", bus_name, "drivers", driver_name)
if not os.path.isdir(driver_path):
logging.warning(
"%s/%s%s @%s: driver path does not exist, will not monitor",
bus_name, driver_name, _device_name, origin)
return
_watch_rules.append((
bus_name, driver_name, device_name, _actions, origin
))
logging.info("%s/%s%s @%s: will monitor",
bus_name, driver_name, _device_name, origin)
# Given a journal entry, check it against the list of stuff we have to
# watch for. note an entry comes as:
#
# {'MESSAGE': u'usb 2-1.2.4: reset full-speed USB device number 17 using ehci-pci',
# 'PRIORITY': 6,
# 'SYSLOG_FACILITY': 0,
# 'SYSLOG_IDENTIFIER': u'kernel',
# '_BOOT_ID': UUID('dc527a86-fa21-4085-bac2-ed4eccf83d0b'),
# '_HOSTNAME': u'some.hsot.domain',
# '_KERNEL_DEVICE': u'c189:144',
# '_KERNEL_SUBSYSTEM': u'usb',
# '_MACHINE_ID': UUID('2c766c91-79da-41ab-bb1a-2c903adf2211'),
# '_SOURCE_MONOTONIC_TIMESTAMP': datetime.timedelta(2, 43626, 293600),
# '_TRANSPORT': u'kernel',
# '_UDEV_DEVNODE': u'/dev/bus/usb/002/000',
# '_UDEV_SYSNAME': u'2-1.2.4',
# '__CURSOR': 's=9228bb40b9d140a585632aaeaf6c60e5;i=1987771;b=dc527a86fa214085bac2ed4eccf83d0b;m=3263f58acd;t=56c7f257761cc;x=d1b8e5236bc5e591',
# '__MONOTONIC_TIMESTAMP': (datetime.timedelta(2, 43625, 401037),
# UUID('dc527a86-fa21-4085-bac2-ed4eccf83d0b')),
# '__REALTIME_TIMESTAMP': datetime.datetime(2018, 5, 19, 0, 0, 28, 780492)}
#
def _entry_matched(entry, bus_name, driver_name, devname, actions, origin):
msg = entry['MESSAGE']
if '__REALTIME_TIMESTAMP' in entry:
ts = " " + str(entry['__REALTIME_TIMESTAMP'])
else:
ts = ""
# Device messages usually start with 'DRIVERNAME DEVICE: msg', so
# if we have a driver name, we try to match against that
_driver_name = msg.split(None, 1)[0]
if driver_name:
if isinstance(driver_name, str) \
and driver_name == _driver_name:
logging.debug("%s/%s: match on driver name @%s",
driver_name, devname, origin)
elif isinstance(driver_name, re.Pattern) \
and driver_name.match(_driver_name):
logging.debug("%s/%s: match on driver name @%s",
driver_name, devname, origin)
else:
# No driver match
logging.debug("%s: mismatch on driver name (%s vs %s requested) "
"@%s", devname, _driver_name, driver_name, origin)
return
else:
driver_name = _driver_name
found_actions = False
for condition, action in actions.items():
if condition in msg:
action_fn = action[0]
_args = action[1:]
try:
if logging.getLogger().getEffectiveLevel() < logging.DEBUG:
entry_info = ": %s" % pprint.pformat(entry)
else:
entry_info = ""
found_actions = True
if args.dry_run:
logging.error(
"[dry run]%s ACTION %s (%s, %s, %s, %s) @%s%s",
ts, action_fn, bus_name, devname, condition, _args,
origin, entry_info)
else:
logging.info("%s/%s/%s:%s matched entry%s",
bus_name, driver_name, devname, ts,
entry_info)
action_fn(bus_name, driver_name, devname,
condition, entry, *_args)
except Exception as e: # pylint: disable = broad-except
logging.exception(
"%s/%s/%s:%s action function raised uncaught "
"exception: %s",
bus_name, driver_name, devname, ts, e)
if not found_actions:
logging.debug("%s/%s/%s: mismatch on actions @%s",
bus_name, driver_name, devname, origin)
# Given a journal entry, check it against the list of stuff we have to
# watch for. note an entry comes as:
#
# {'MESSAGE': u'usb 2-1.2.4: reset full-speed USB device number 17 using ehci-pci',
# 'PRIORITY': 6,
# 'SYSLOG_FACILITY': 0,
# 'SYSLOG_IDENTIFIER': u'kernel',
# '_BOOT_ID': UUID('dc527a86-fa21-4085-bac2-ed4eccf83d0b'),
# '_HOSTNAME': u'some.hsot.domain',
# '_KERNEL_DEVICE': u'c189:144',
# '_KERNEL_SUBSYSTEM': u'usb',
# '_MACHINE_ID': UUID('2c766c91-79da-41ab-bb1a-2c903adf2211'),
# '_SOURCE_MONOTONIC_TIMESTAMP': datetime.timedelta(2, 43626, 293600),
# '_TRANSPORT': u'kernel',
# '_UDEV_DEVNODE': u'/dev/bus/usb/002/000',
# '_UDEV_SYSNAME': u'2-1.2.4',
# '__CURSOR': 's=9228bb40b9d140a585632aaeaf6c60e5;i=1987771;b=dc527a86fa214085bac2ed4eccf83d0b;m=3263f58acd;t=56c7f257761cc;x=d1b8e5236bc5e591',
# '__MONOTONIC_TIMESTAMP': (datetime.timedelta(2, 43625, 401037),
# UUID('dc527a86-fa21-4085-bac2-ed4eccf83d0b')),
# '__REALTIME_TIMESTAMP': datetime.datetime(2018, 5, 19, 0, 0, 28, 780492)}
#
def _check_entry(entry):
msg = entry['MESSAGE']
_device_name = entry.get('_UDEV_SYSNAME', None)
_kernel_name = entry.get('_KERNEL_DEVICE', None)
bus_name = None
driver_name = None
device_name = None
actions = None
origin = None
while not _device_name and not _kernel_name:
# If the entry has no device message, then let's try to
# extract it from the message, things like:
#
# usb 3-2-port1: cannot reset (err = -110)',
regex_usb = re.compile("usb (?P<devname>[0-9]+-[0-9]+)-.*:")
m = regex_usb.match(msg)
if m:
_device_name = m.groupdict()['devname']
if _device_name:
logging.warning("guessed USB device %s from message (had "
"no entry for it)", _device_name)
break
logging.debug("ignored deviceless entry: %s",
pprint.pformat(entry))
return
for bus_name, driver_name, device_name, actions, origin \
in _watch_rules:
if device_name and _device_name:
if isinstance(device_name, str) \
and device_name == _device_name:
logging.debug("%s: match on device name @%s",
_device_name, origin)
devname = _device_name
_entry_matched(entry, bus_name, driver_name,
devname, actions, origin)
continue
elif isinstance(device_name, re.Pattern) \
and device_name.match(_device_name):
logging.debug("%s: match on device name @%s",
_device_name, origin)
devname = _device_name
_entry_matched(entry, bus_name, driver_name,
devname, actions, origin)
continue
if device_name and _kernel_name:
# lookup by kernel device name (for example, for USB
# they look like +usb:3-2
if isinstance(device_name, str) \
and device_name == _kernel_name:
logging.debug("%s: match on kernel name @%s",
_kernel_name, origin)
devname = _kernel_name
_entry_matched(entry, bus_name, driver_name,
devname, actions, origin)
continue
elif isinstance(device_name, re.Pattern) \
and device_name.match(_kernel_name):
logging.debug("%s: match on kernel name @%s",
_kernel_name, origin)
devname = _kernel_name
_entry_matched(entry, bus_name, driver_name,
devname, actions, origin)
continue
# Support for -v option to increase verbosity
def _logging_verbosity_inc(level):
if level == 0:
return
if level > logging.DEBUG:
delta = 10
else:
delta = 1
return level - delta
class _action_increase_level(argparse.Action):
def __init__(self, option_strings, dest, default = None, required = False,
nargs = None, **kwargs):
super(_action_increase_level, self).__init__(
option_strings, dest, nargs = 0, required = required,
**kwargs)
#
# Python levels are 50, 40, 30, 20, 10 ... (debug) 9 8 7 6 5 ... :)
def __call__(self, parser, namespace, values, option_string = None):
if namespace.level == None:
namespace.level = logging.ERROR
namespace.level = _logging_verbosity_inc(namespace.level)
logging.addLevelName(50, "C")
logging.addLevelName(40, "E")
logging.addLevelName(30, "W")
logging.addLevelName(20, "I")
logging.addLevelName(10, "D")
# Initialize command line argument parser
arg_parser = argparse.ArgumentParser(
description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
arg_parser.set_defaults(level = logging.ERROR)
arg_parser.add_argument("-v", "--verbose",
dest = "level",
action = _action_increase_level, nargs = 0,
help = "Increase verbosity")
arg_parser.add_argument("--config-path",
action = "store", dest = "config_path",
default = "/etc/ttbd-hw-healthmonitor",
help = "Path from where to load conf_*.py "
"configuration files (in alphabetic order)")
arg_parser.add_argument("-b", "--bootid",
action = 'store', default = None,
help = "select bootid (from journalctl --list-boots)")
arg_parser.add_argument("--seek-realtime",
action = 'store', default = False,
help = "check from the given time")
arg_parser.add_argument("--seek-head",
action = 'store_true', default = False,
help = "check from the begining of the boot")
arg_parser.add_argument("-n", "--dry-run",
action = 'store_true', default = False,
help = "only show what would it do")
args = arg_parser.parse_args()
logging.basicConfig(
level = args.level,
format = "%(levelname)s: %(message)s")
#
# Read configuration and decide what to watch
#
_ttbd_hw_health_monitor_driver_rebind_path = \
commonl.ttbd_locate_helper("ttbd-hw-healthmonitor-driver-rebind.py",
ttbl._install.share_path,
log = logging)
logging.debug("Found helper %s", _ttbd_hw_health_monitor_driver_rebind_path)
args.config_path = os.path.expanduser(args.config_path)
if args.config_path != [ "" ]:
commonl.config_import([ args.config_path ], re.compile("^conf[-_].*.py$"))
journal = systemd.journal.Reader()
journal.log_level(systemd.journal.LOG_INFO)
logging.debug("opened journal")
systemd.daemon.notify("READY=1")
journal.this_boot(args.bootid)
journal.this_machine()
logging.debug("journal: filtering for kernel messages")
journal.add_match(_TRANSPORT = "kernel")
# We don't filter per-subsystem, because some of them messages (like
# USB's cannot reset) are not bound to it
poller = select.poll()
poller.register(journal, journal.get_events())
# Enter directly to iterate to consume all the records since we booted
if args.seek_head:
journal.seek_head()
elif args.seek_realtime:
journal.seek_realtime(time.mktime(time.strptime(
args.seek_realtime, "%Y-%m-%d %H:%M:%S")))
else:
journal.seek_tail()
_bark_ts0 = time.time()
def _bark_periodically(period, msg):
global _bark_ts0
ts = time.time()
if ts - _bark_ts0 > period: # every five seconds, bark
_bark_ts0 = ts
systemd.daemon.notify("WATCHDOG=1")
if msg:
logging.debug("currently checking: %s", msg)
else:
logging.debug("currently checking")
first_run = True
while True:
if not first_run:
poller.poll(5000)
if journal.process() != systemd.journal.APPEND:
continue
first_run = False
logging.debug("polled")
_bark_periodically(5, "main loop")
for _entry in journal:
logging.log(8, "entry %s", pprint.pformat(_entry))
_check_entry(_entry)
if '__REALTIME_TIMESTAMP' in _entry:
_bark_periodically(5, _entry.get('__REALTIME_TIMESTAMP'))
else:
_bark_periodically(5, _entry)
|
import torch
import torchvision
import torchvision.transforms as transforms
import matplotlib.pyplot as plt
import wandb
import os
def authorize_wandb(project, name, config):
"""Authorizes Weights and Biases for the project
:param project: Name of the project.
:type project: str
:param name: Name for the experiment.
:type name: str
:param config: Configuration for the run.
:type config: dict
"""
wandb.init(project=project, name=name, config=config)
def log_wandb(logs, step):
"""Helper function to save logs to a particular step.
:param logs: A Python dictionary of the parameters and their values.
:type logs: dict
:param step: The current step.
:type step: int
"""
wandb.log(logs, step)
def save_output(epoch, output_path, fixed_noise, generator, fixed_labels=None):
"""Save the output of the generator into a specified folder or on Weights and Biases.
:param epoch: Current epoch.
:type epoch: int
:param output_path: Directory to which the image would be saved. \
If output_path is set to wandb, it will save to your wandb project.
:type outpt_path: str
:param fixed_noise: The fixed noise created before training.
:type fixed_noise: torch.Tensor
:param generator: The generator model.
:type generator: Generator
:param fixed_labels: Labels for conditional generation.
:type fixed_labels: torch.Tensor
"""
plt.clf()
generator.eval()
with torch.no_grad():
if fixed_labels is not None:
test_images = generator(fixed_noise, fixed_labels)
else:
test_images = generator(fixed_noise)
generator.train()
grid = torchvision.utils.make_grid(test_images.cpu(), normalize=True)
if output_path == "wandb":
wandb.log(
{"output": wandb.Image(grid, caption=f"Output for epoch: {epoch}")},
step=epoch,
)
else:
image = transforms.ToPILImage()(grid)
if not os.path.exists(output_path):
os.mkdir(output_path)
image.save(f"./{output_path}/epoch_{epoch}.jpeg")
|
import re
import Regex
def Name(string,pattern = Regex.NamePattern):
if re.findall(pattern,string):
return True
else:
return False
def City(string,pattern = Regex.CityPattern):
if re.findall(pattern,string):
return True
else:
return False
def Number(string,pattern = Regex.NumberPattern):
if re.findall(pattern,string):
return True
else:
return False |
from django.contrib import admin
from import_export.admin import ImportExportModelAdmin
from recommender.models import Item
@admin.register(Item)
class ItemAdmin(ImportExportModelAdmin):
search_fields = ['id']
|
import time
import traceback
from ccapi import EventHandler, SessionOptions, SessionConfigs, Session, Subscription, Event
class MyEventHandler(EventHandler):
def __init__(self):
super().__init__()
def processEvent(self, event: Event, session: Session) -> bool:
try:
raise Exception('oops')
return True # This line is needed.
except Exception:
print(traceback.format_exc())
sys.exit(1)
if __name__ == '__main__':
eventHandler = MyEventHandler()
option = SessionOptions()
config = SessionConfigs()
session = Session(option, config, eventHandler)
subscription = Subscription('coinbase', 'BTC-USD', 'MARKET_DEPTH')
session.subscribe(subscription)
time.sleep(10)
session.stop()
print('Bye')
|
# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generate an HTML file containing license info for all installed packages.
Documentation on this script is also available here:
http://www.chromium.org/chromium-os/licensing-for-chromiumos-developers
End user (i.e. package owners) documentation is here:
http://www.chromium.org/chromium-os/licensing-for-chromiumos-package-owners
Usage:
For this script to work, you must have built the architecture
this is being run against, _after_ you've last run repo sync.
Otherwise, it will query newer source code and then fail to work on packages
that are out of date in your build.
Recommended build:
cros_sdk
export BOARD=x86-alex
sudo rm -rf /build/$BOARD
cd ~/trunk/src/scripts
# If you wonder why we need to build Chromium OS just to run
# `emerge -p -v virtual/target-os` on it, we don't.
# However, later we run ebuild unpack, and this will apply patches and run
# configure. Configure will fail due to aclocal macros missing in
# /build/x86-alex/usr/share/aclocal (those are generated during build).
# This will take about 10mn on a Z620.
./build_packages --board=$BOARD --nowithautotest --nowithtest --nowithdev
--nowithfactory
cd ~/trunk/chromite/licensing
# This removes left over packages from an earlier build that could cause
# conflicts.
eclean-$BOARD packages
%(prog)s [--debug] [--all-packages] --board $BOARD [-o o.html] 2>&1 | tee out
The workflow above is what you would do to generate a licensing file by hand
given a chromeos tree.
Note that building packages now creates a license.yaml fork in the package
which you can see with
qtbz2 -x -O /build/x86-alex/packages/dev-util/libc-bench-0.0.1-r8.tbz2 |
qxpak -x -O - license.yaml
This gets automatically installed in
/build/x86-alex/var/db/pkg/dev-util/libc-bench-0.0.1-r8/license.yaml
Unless you run with --generate, the script will now gather those license
bits and generate a license file from there.
License bits for each package are generated by default from
src/scripts/hooks/install/gen-package-licenses.sh which gets run automatically
by emerge as part of a package build (by running this script with
--hook /path/to/tmp/portage/build/tree/for/that/package
If license bits are missing, they are generated on the fly if you were running
with sudo. If you didn't use sudo, this on the fly late generation will fail
and act as a warning that your prebuilts were missing package build time
licenses.
You can check the licenses and/or generate a HTML file for a list of
packages using --package or -p:
%(prog)s --package "dev-libs/libatomic_ops-7.2d" --package
"net-misc/wget-1.14" --board $BOARD -o out.html
Note that you'll want to use --generate to force regeneration of the licensing
bits from a package source you may have just modified but not rebuilt.
If you want to check licensing against all ChromeOS packages, you should
run ./build_packages --board=$BOARD to build everything and then run
this script with --all-packages.
By default, when no package is specified, this script processes all
packages for $BOARD. The output HTML file is meant to update
http://src.chromium.org/viewvc/chrome/trunk/src/chrome/browser/resources/ +
chromeos/about_os_credits.html?view=log
(gclient config svn://svn.chromium.org/chrome/trunk/src)
For an example CL, see https://codereview.chromium.org/13496002/
The detailed process is listed below.
* Check out the branch you intend to generate the HTML file for. Use
the internal manifest for this purpose.
repo init -b <branch_name> -u <URL>
The list of branches (e.g. release-R33-5116.B) are available here:
https://chromium.googlesource.com/chromiumos/manifest/+refs
* Generate the HTML file by following the steps mentioned
previously. Check whether your changes are valid with:
bin/diff_license_html output.html-M33 output.html-M34
and review the diff.
* Update the about_os_credits.html in the svn repository. Create a CL
and upload it for review.
gcl change <change_name>
gcl upload <change_name>
When uploading, you may get a warning for file being too large to
upload. In this case, your CL can still be reviewed. Always include
the diff in your commit message so that the reviewers know what the
changes are. You can add reviewers on the review page by clicking on
"Edit issue". (A quick reference:
http://www.chromium.org/developers/quick-reference)
Make sure you click on 'Publish+Mail Comments' after adding reviewers
(the review URL looks like this https://codereview.chromium.org/183883018/ ).
* After receiving LGTMs, commit your change with 'gcl commit <change_name>'.
If you don't get this in before the freeze window, it'll need to be merged into
the branch being released, which is done by adding a Merge-Requested label.
Once it's been updated to "Merge-Approved" by a TPM, please merge into the
required release branch. You can ask karen@ for merge approve help.
Example: http://crbug.com/221281
Note however that this is only during the transition period.
build-image will be modified to generate the license for each board and save
the file in /opt/google/chrome/resources/about_os_credits.html or as defined
in http://crbug.com/271832 .
"""
from __future__ import print_function
import os
from chromite.lib import commandline
from chromite.lib import cros_build_lib
from chromite.lib import cros_logging as logging
from chromite.lib import osutils
from chromite.licensing import licenses_lib
EXTRA_LICENSES_DIR = os.path.join(licenses_lib.SCRIPT_DIR,
'extra_package_licenses')
# These packages exist as workarounds....
EXTRA_PACKAGES = (
('sys-kernel/Linux-2.6',
['http://www.kernel.org/'], ['GPL-2'], []),
('app-arch/libarchive-3.1.2',
['http://www.libarchive.org/'], ['BSD', 'public-domain'],
['libarchive-3.1.2.LICENSE']),
)
def LoadPackageInfo(board, all_packages, generateMissing, packages):
"""Do the work when we're not called as a hook."""
logging.info("Using board %s.", board)
builddir = os.path.join(cros_build_lib.GetSysroot(board=board),
'tmp', 'portage')
if not os.path.exists(builddir):
raise AssertionError(
"FATAL: %s missing.\n"
"Did you give the right board and build that tree?" % builddir)
detect_packages = not packages
if detect_packages:
# If no packages were specified, we look up the full list.
packages = licenses_lib.ListInstalledPackages(board, all_packages)
if not packages:
raise AssertionError('FATAL: Could not get any packages for board %s' %
board)
logging.debug("Initial Package list to work through:\n%s",
'\n'.join(sorted(packages)))
licensing = licenses_lib.Licensing(board, packages, generateMissing)
licensing.LoadPackageInfo()
logging.debug("Package list to skip:\n%s",
'\n'.join([p for p in sorted(packages)
if licensing.packages[p].skip]))
logging.debug("Package list left to work through:\n%s",
'\n'.join([p for p in sorted(packages)
if not licensing.packages[p].skip]))
licensing.ProcessPackageLicenses()
if detect_packages:
# If we detected 'all' packages, we have to add in these extras.
for fullnamewithrev, homepages, names, files in EXTRA_PACKAGES:
license_texts = [osutils.ReadFile(os.path.join(EXTRA_LICENSES_DIR, f))
for f in files]
licensing.AddExtraPkg(fullnamewithrev, homepages, names, license_texts)
return licensing
def main(args):
parser = commandline.ArgumentParser(usage=__doc__)
parser.add_argument("-b", "--board",
help="which board to run for, like x86-alex")
parser.add_argument("-p", "--package", action="append", default=[],
dest="packages",
help="check the license of the package, e.g.,"
"dev-libs/libatomic_ops-7.2d")
parser.add_argument("-a", "--all-packages", action="store_true",
dest="all_packages",
help="Run licensing against all packages in the "
"build tree, instead of just virtual/target-os "
"dependencies.")
parser.add_argument("-g", "--generate-licenses", action="store_true",
dest="gen_licenses",
help="Generate license information, if missing.")
parser.add_argument("-o", "--output", type="path",
help="which html file to create with output")
opts = parser.parse_args(args)
if not opts.board:
raise AssertionError("No board given (--board)")
if not opts.output and not opts.gen_licenses:
raise AssertionError("You must specify --output and/or --generate-licenses")
if opts.gen_licenses and os.geteuid() != 0:
raise AssertionError("Run with sudo if you use --generate-licenses.")
licensing = LoadPackageInfo(
opts.board, opts.all_packages, opts.gen_licenses, opts.packages)
if opts.output:
licensing.GenerateHTMLLicenseOutput(opts.output)
|
#!/usr/bin/env python3
import os
import pytest
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../lib"))
import dartsense.organisation
organisation_list = None
def test_organisation_list_init(setup_db):
organisation_list = dartsense.organisation.OrganisationList()
assert isinstance(organisation_list, dartsense.organisation.OrganisationList)
assert len(organisation_list) == 2
i = 0
for organisation in organisation_list:
i = i + 1
assert isinstance(organisation, dartsense.organisation.Organisation)
assert i == 2
|
from envs import ShippingFacilityEnvironment, rewards
from envs.network_flow_env import (
EnvironmentParameters,
)
from envs.order_generators import (
ActualOrderGenerator,
BiasedOrderGenerator,
NormalOrderGenerator,
)
from envs.inventory_generators import DirichletInventoryGenerator
from envs.shipping_assignment_env import ShippingAssignmentEnvironment
from network.physical_network import PhysicalNetwork
def build_next_gen_network_flow_environment(
environment_config, episode_length, order_gen: str, reward_function_name: str
):
"""
Second generation of shipping point assignment environment.
Args:
environment_config: A dictionary with the config to build the environment, see any PTL run for an example.
episode_length:
order_gen:
reward_function:
Returns:
"""
physical_network = PhysicalNetwork(
num_dcs=environment_config["num_dcs"],
num_customers=environment_config["num_customers"],
dcs_per_customer=environment_config["dcs_per_customer"],
demand_mean=environment_config["demand_mean"],
demand_var=environment_config["demand_var"],
big_m_factor=environment_config["big_m_factor"],
num_commodities=environment_config["num_commodities"],
)
order_generator = order_generator_chooser(
physical_network, order_gen, environment_config["orders_per_day"]
)
inventory_generator = DirichletInventoryGenerator(physical_network)
reward_function = rewards.reward_chooser(reward_function_name)
return ShippingAssignmentEnvironment(
physical_network,
order_generator,
inventory_generator,
reward_function,
num_steps=episode_length,
)
def build_network_flow_env_parameters( # TODO receive individuals instead of all dict?
environment_config, episode_length, order_gen: str
):
"""
Deprecated
Old way of creating environment (first step, parameters).
"""
physical_network = PhysicalNetwork(
num_dcs=environment_config["num_dcs"],
num_customers=environment_config["num_customers"],
dcs_per_customer=environment_config["dcs_per_customer"],
demand_mean=environment_config["demand_mean"],
demand_var=environment_config["demand_var"],
big_m_factor=environment_config["big_m_factor"],
num_commodities=environment_config["num_commodities"],
)
order_generator = order_generator_chooser(
physical_network, order_gen, environment_config["orders_per_day"]
)
generator = DirichletInventoryGenerator(physical_network)
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, episode_length
)
return environment_parameters
def order_generator_chooser(physical_network, order_gen, orders_per_day):
"""
Picks an order generator based on configuraiton
Args:
orders_per_day:
order_gen:
physical_network:
Returns:
"""
if order_gen == "original": #
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
elif order_gen == "biased":
order_generator = BiasedOrderGenerator(physical_network, orders_per_day)
elif order_gen == "normal_multivariate":
order_generator = NormalOrderGenerator(physical_network, orders_per_day)
else:
raise NotImplementedError("alternatives are original and biased")
return order_generator
|
name = input("Enter your name")
print("Hello" + name)
print("Long Live India!")
|
"""
The tool to check the availability or syntax of domain, IP or URL.
::
██████╗ ██╗ ██╗███████╗██╗ ██╗███╗ ██╗ ██████╗███████╗██████╗ ██╗ ███████╗
██╔══██╗╚██╗ ██╔╝██╔════╝██║ ██║████╗ ██║██╔════╝██╔════╝██╔══██╗██║ ██╔════╝
██████╔╝ ╚████╔╝ █████╗ ██║ ██║██╔██╗ ██║██║ █████╗ ██████╔╝██║ █████╗
██╔═══╝ ╚██╔╝ ██╔══╝ ██║ ██║██║╚██╗██║██║ ██╔══╝ ██╔══██╗██║ ██╔══╝
██║ ██║ ██║ ╚██████╔╝██║ ╚████║╚██████╗███████╗██████╔╝███████╗███████╗
╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝╚══════╝╚═════╝ ╚══════╝╚══════╝
Provides the list helpers
Author:
Nissar Chababy, @funilrys, contactTATAfunilrysTODTODcom
Special thanks:
https://pyfunceble.github.io/#/special-thanks
Contributors:
https://pyfunceble.github.io/#/contributors
Project link:
https://github.com/funilrys/PyFunceble
Project documentation:
https://pyfunceble.readthedocs.io/en/dev/
Project homepage:
https://pyfunceble.github.io/
License:
::
Copyright 2017, 2018, 2019, 2020, 2021 Nissar Chababy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import copy
from typing import Any, List, Optional
class ListHelper:
"""
Simplify the list manipulation.
:param subject:
The list to work with.
:param bool remove_empty: Process the deletion of empty strings.
"""
_subject: Optional[List[Any]] = None
def __init__(self, subject: Optional[List[Any]] = None):
if subject is not None:
self.subject = subject
@property
def subject(self):
"""
Provides the current state of the :code:`_subject` attribute.
"""
return self._subject
@subject.setter
def subject(self, value: List[Any]) -> None:
"""
Sets the subject to work with.
:param value:
The subject to work with.
:raise TypeError:
When :code:`value` is not a :py:class:`list`.
"""
if not isinstance(value, list):
raise TypeError(f"<value> should be {list}, {type(value)} given.")
self._subject = copy.deepcopy(value)
def set_subject(self, value: List[Any]) -> "ListHelper":
"""
Sets the subject to work with.
:param value:
The subject to work with.
"""
self.subject = value
return self
def remove_empty(self) -> "ListHelper":
"""
Removes the empty entries of the given list.
"""
self.subject = [x for x in self.subject if x is None or x]
return self
def remove_duplicates(self) -> "ListHelper":
"""
Removes the duplicates of the current list.
"""
result = []
for element in self.subject:
if element not in result:
result.append(element)
self.subject = result
return self
def sort(self, *, reverse: bool = False) -> "ListHelper":
"""
Sorts the given list (of string preferably).
:param bool reverse: Tell us if we have to reverse the list.
"""
self.custom_sort(str.lower, reverse=reverse)
return self
def custom_sort(self, key_method: Any, *, reverse: bool = False) -> "ListHelper":
"""
Sorts the list with the given key method.
:param key_method:
A function or method to use to format the
readed element before sorting.
:type key_method: function|method
:param bool reverse: Tell us if we have to reverse the list.
"""
self.subject = sorted(self.subject, key=key_method, reverse=reverse)
return self
|
#!/usr/bin/env python3
import sys
import psycopg2
import datetime
from psycopg2 import Error
from termcolor import colored, cprint
class postgres_cursor_print:
def __init__ ( self, cursor ):
self.cursor = cursor
self.rows = None
self.query = None
self.col_widths = None
self.col_name_mappings = {}
self.header_color = 'green'
self.row_color = 'yellow'
self.column_padding = 0
def info(self):
info = connection.get_dsn_parameters()
for key in info.keys():
print('{0:>24} : {1:<}'.format(key, info[key]))
return
def map_column_name(self, from_name, to_name):
self.col_name_mappings[from_name] = to_name
return
def map_column_name_dict(self, name_dict):
self.col_name_mappings.update(name_dict)
return
def get_mapped_column_name(self, col_name):
if col_name in self.col_name_mappings.keys():
return self.col_name_mappings[col_name]
return col_name
def exec_query ( self, query ):
self.query = query
self.cursor.execute(query)
rows = self.cursor.fetchall()
self.rows = rows
return
def get_column_names ( self ):
colnames = [desc[0] for desc in self.cursor.description]
return colnames
def calc_columns_widths( self ):
col_names = self.get_column_names()
self.col_widths = [0] * len(col_names)
for idx, col_name in enumerate(col_names):
col_name = self.get_mapped_column_name(col_name)
self.col_widths[idx] = len(col_name)
for row in self.rows:
for idx, col_value in enumerate(row):
if len(str(col_value or '')) > self.col_widths[idx]:
self.col_widths[idx] = len(str(col_value) or '')
for idx, wid in enumerate(self.col_widths):
wid += self.column_padding
return
def formatted_column_name(self, idx ):
return f"{idx}"
def output_rows ( self , header_color = None, row_color = None ):
if not header_color == None:
self.header_color = header_color
if not row_color == None:
self.row_color = row_color
self.calc_columns_widths()
print(f"\nThere are {len(self.rows)} rows to output.")
for idx, col_name in enumerate(self.get_column_names()):
fline = f"{{0:{self.col_widths[idx]}}}{' ' * self.column_padding}|"
cprint(fline.format(self.get_mapped_column_name(col_name)), 'yellow', 'on_blue', end=' ' )
print()
for idx,row in enumerate(self.rows):
for cidx,col in enumerate(row):
alignment_char = '<'
if col == None:
col = ''
if type(col) == float:
col = "{0:.2f}".format(col)
alignment_char = '>'
if type(col) == int:
alignment_char = '>'
if type(col) == datetime.date:
col = col.strftime('%m/%d/%y')
fline = f"{{0:{alignment_char}{self.col_widths[cidx]}}}{' ' * self.column_padding}|"
cprint ( fline.format(str(col)), self.row_color, end = ' ')
print()
return
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
connection = psycopg2.connect( user="postgres",
password="",
host="127.0.0.1",
port="5432",
database="Northwind")
cursor = connection.cursor()
pcp = postgres_cursor_print(cursor)
pcp.info()
limit_amount = '5'
pcp.exec_query(f"""SELECT ord.order_id as "id"
, concat(emp.last_name, ', ', emp.first_name) as "employee"
, concat(ord.customer_id, ', ', cst.company_name) as "customer"
-- , cst.company_name as "customer"
, ord.ship_name as "ship to"
, ord.order_date as "ordered"
, ord.required_date as "required"
, ord.shipped_date as "shipped"
, shp.company_name as "shipper"
, ord.freight as "shipping cost"
, cst.address as "ship address"
, ord.ship_address as "ship address"
, ord.ship_city as "city"
, ord.ship_region as "region"
, ord.ship_postal_code as "postal code"
, ord.ship_country as "country"
FROM orders ord
JOIN employees emp ON emp.employee_id = ord.employee_id
JOIN shippers shp ON shp.shipper_id = ord.ship_via
JOIN customers cst ON cst.customer_id = ord.customer_id
ORDER BY employee
LIMIT {limit_amount}
;""")
pcp.column_padding = 0
pcp.output_rows()
|
from spidermon import Monitor, MonitorSuite, monitors
class DummyMonitor(Monitor):
def runTest(self):
pass
class DummyMonitorSuite(MonitorSuite):
monitors = [DummyMonitor]
# ----------------------------------
# Monitors ordering
# ----------------------------------
class Unordered:
class A(DummyMonitor):
pass
class B(DummyMonitor):
pass
class C(DummyMonitorSuite):
pass
class D(DummyMonitorSuite):
pass
class Ordered:
@monitors.order(1)
class A(DummyMonitor):
pass
@monitors.order(2)
class B(DummyMonitor):
pass
@monitors.order(3)
class C(DummyMonitorSuite):
pass
@monitors.order(4)
class D(DummyMonitorSuite):
pass
# ----------------------------------
# Methods ordering
# ----------------------------------
class UnorderedMethodsMonitor(Monitor):
def test_a(self):
pass
def test_b(self):
pass
def test_c(self):
pass
class OrderedMethodsMonitor(Monitor):
@monitors.order(3)
def test_a(self):
pass
@monitors.order(2)
def test_b(self):
pass
@monitors.order(1)
def test_c(self):
pass
class EqualOrderedMethodsMonitor(Monitor):
@monitors.order(5)
def test_a(self):
pass
@monitors.order(5)
def test_b(self):
pass
@monitors.order(5)
def test_c(self):
pass
|
from django.contrib.auth.models import User
from django.urls import reverse
from django.db import models
from PIL import Image
# Create your models here.
class Prof(models.Model):
user = models.OneToOneField(
User, null=True, blank=True, on_delete=models.CASCADE)
nickname = models.CharField(
max_length=255, null=True)
work = models.CharField(max_length=255, blank=True, default="studiant")
phone = models.CharField(max_length=12, blank=True, null=True)
linkedin_link = models.URLField(max_length=255, blank=True)
instagram_link = models.URLField(max_length=255, blank=True)
facebook_link = models.URLField(max_length=255, blank=True)
youtube_link = models.URLField(max_length=255, blank=True)
twitter_link = models.URLField(max_length=255, blank=True)
bio = models.TextField(max_length=255, null=True, blank=True)
prof_views = models.IntegerField(default=0)
image = models.ImageField(
null=True, blank=True, upload_to="images/", default="images/default.jpg")
def __str__(self):
return str(self.nickname)
|
import cv2 as cv
import numpy as np
import matplotlib.pyplot as plt
import json
from random import randint
def get_img_from_dataset(no):
img = cv.imread(f"dataset\image_{no}.jpg")
return img
def resize_picture(img, scale_percent=20):
print('original Dimensions:', img.shape)
width = int(img.shape[1] * scale_percent / 100)
height = int(img.shape[0] * scale_percent / 100)
dim = (width, height)
#TODO make interpolation using opencv
resized = cv.resize(img, dim, interpolation = cv.INTER_AREA)
return resized
def print_img(img, title="domyslny", gray_scale_flag=False, BGR=True):
if not gray_scale_flag:
if BGR is True:
plt.title(title)
plt.imshow(img)
plt.show()
else:
plt.title(title)
plt.imshow(img, cmap="gray")
plt.show()
def create_bbox(img, p_min, p_max, thickness=2, color=(255, 0, 0)):
for i in range(p_min[0], p_max[0]):
for j in range(p_min[1], p_min[1] + thickness):
img[j][i] = color
for i in range(p_min[0], p_max[0]):
for j in range(p_max[1] - thickness, p_max[1]):
img[j][i] = color
for i in range(p_min[1], p_max[1]):
for j in range(p_min[0], p_min[0] + thickness):
img[i][j] = color
for i in range(p_min[1], p_max[1]):
for j in range(p_max[0] - thickness, p_max[0]):
img[i][j] = color
return img
def import_vector_for_cls():
try:
f = open('config.json')
data = json.load(f)
f.close()
return data
except FileNotFoundError:
return None
|
import RPi.GPIO as GPIO
from tuxdroid.tuxdroid import TuxDroid
tux = TuxDroid("config.yaml")
input("Press Enter to stop...")
tux.stop()
|
#!/usr/bin/python3
# coding: utf-8
"""
This module is to login Disney+
"""
import logging
import re
import json
from getpass import getpass
import sys
import requests
from configs.config import Config
from utils.helper import get_locale
class Login(object):
def __init__(self, email, password, ip_info, locale):
self.logger = logging.getLogger(__name__)
self._ = get_locale(__name__, locale)
self.config = Config()
self.email = email
self.password = password
location = ip_info['loc'].split(',')
self.latitude = location[0]
self.longitude = location[1]
self.user_agent = self.config.get_user_agent()
self.session = requests.Session()
self.session.headers = {
'user-agent': self.user_agent
}
self.proxy = ip_info['proxy']
if self.proxy:
self.session.proxies.update(self.proxy)
self.api = {
'login_page': 'https://www.disneyplus.com/login',
'devices': 'https://global.edge.bamgrid.com/devices',
'login': 'https://global.edge.bamgrid.com/idp/login',
'token': 'https://global.edge.bamgrid.com/token',
'grant': 'https://global.edge.bamgrid.com/accounts/grant',
'current_account': 'https://global.edge.bamgrid.com/accounts/me',
'session': 'https://disney.api.edge.bamgrid.com/session'
}
def client_info(self):
res = self.session.get(self.api['login_page'])
match = re.search('window.server_path = ({.*});', res.text)
data = json.loads(match.group(1))
client_id = data['sdk']['clientId']
client_apikey = data['sdk']['clientApiKey']
self.logger.debug("client_id: %s\nclient_apikey: %s",
client_id, client_apikey)
return client_id, client_apikey
def assertion(self, client_apikey):
postdata = {
'applicationRuntime': 'chrome',
'attributes': {},
'deviceFamily': 'browser',
'deviceProfile': 'macintosh'
}
header = {'authorization': f'Bearer {client_apikey}',
'Origin': 'https://www.disneyplus.com'}
res = self.session.post(url=self.api['devices'],
headers=header, json=postdata)
assertion = res.json()['assertion']
self.logger.debug("assertion: %s", assertion)
return assertion
def access_token(self, client_apikey, assertion_):
header = {'authorization': f'Bearer {client_apikey}',
'Origin': 'https://www.disneyplus.com'}
postdata = {
'grant_type': 'urn:ietf:params:oauth:grant-type:token-exchange',
'latitude': self.latitude,
'longitude': self.longitude,
'platform': 'browser',
'subject_token': assertion_,
'subject_token_type': 'urn:bamtech:params:oauth:token-type:device'
}
res = self.session.post(
url=self.api['token'], headers=header, data=postdata)
if res.status_code == 200:
access_token = res.json()['access_token']
self.logger.debug("access_token: %s", access_token)
return access_token
if 'unreliable-location' in str(res.text):
self.logger.error(
"Make sure you use NL proxy/vpn, or your proxy/vpn is blacklisted.")
sys.exit(1)
else:
try:
self.logger.error("Error: %s", res.json()[
'errors']['error_description'])
sys.exit(0)
except Exception:
self.logger.error("Error: %s", res.text)
sys.exit(0)
def login(self, access_token):
if self.email and self.password:
email = self.email.strip()
password = self.password.strip()
else:
email = input(self._("Disney+ email: "))
password = getpass(self._("Disney+ password: "))
headers = {
'accept': 'application/json; charset=utf-8',
'authorization': f'Bearer {access_token}',
'content-type': 'application/json; charset=UTF-8',
'Origin': 'https://www.disneyplus.com',
'Referer': 'https://www.disneyplus.com/login/password',
'Sec-Fetch-Mode': 'cors',
'User-Agent': self.user_agent,
'x-bamsdk-platform': 'macintosh',
'x-bamsdk-version': '3.10',
}
data = {'email': email, 'password': password}
res = self.session.post(
url=self.api['login'], data=json.dumps(data), headers=headers)
if res.status_code == 200:
id_token = res.json()['id_token']
self.logger.debug("id_token: %s", id_token)
return id_token
try:
self.logger.error("Error: %s", res.json()['errors'])
sys.exit(0)
except Exception:
self.logger.error("Error: %s", res.text)
sys.exit(0)
def grant(self, id_token, access_token):
headers = {
'accept': 'application/json; charset=utf-8',
'authorization': f'Bearer {access_token}',
'content-type': 'application/json; charset=UTF-8',
'Origin': 'https://www.disneyplus.com',
'Referer': 'https://www.disneyplus.com/login/password',
'Sec-Fetch-Mode': 'cors',
'User-Agent': self.user_agent,
'x-bamsdk-platform': 'macintosh',
'x-bamsdk-version': '3.10',
}
data = {'id_token': id_token}
res = self.session.post(
url=self.api['grant'], data=json.dumps(data), headers=headers)
if res.ok:
return res.json()['assertion']
else:
self.logger.error(res.text)
sys.exit(1)
def final_token(self, subject_token, client_apikey):
header = {'authorization': f'Bearer {client_apikey}',
'Origin': 'https://www.disneyplus.com'}
postdata = {
'grant_type': 'urn:ietf:params:oauth:grant-type:token-exchange',
'latitude': self.latitude,
'longitude': self.longitude,
'platform': 'browser',
'subject_token': subject_token,
'subject_token_type': 'urn:bamtech:params:oauth:token-type:account'
}
res = self.session.post(
url=self.api['token'], headers=header, data=postdata)
if res.status_code == 200:
self.logger.debug(res.json())
access_token = res.json()['access_token']
self.logger.debug("access_token: %s", access_token)
# expires_in = res.json()['expires_in']
refresh_token = res.json()['refresh_token']
# return access_token
return access_token, refresh_token
try:
self.logger.error("Error: %s", res.json()['errors'])
sys.exit(0)
except Exception:
self.logger.error("Error: %s", res.text)
sys.exit(0)
def get_profile_name(self, client_id, token):
headers = {
'accept': 'application/json; charset=utf-8',
'authorization': f'Bearer {token}',
'content-type': 'application/json; charset=UTF-8',
'Sec-Fetch-Mode': 'cors',
'User-Agent': self.user_agent,
'x-bamsdk-client-id': client_id,
'x-bamsdk-platform': 'macintosh',
'x-bamsdk-version': '3.10',
}
res = self.session.get(
url=self.api['current_account'], headers=headers)
if res.ok:
self.logger.debug(res.json())
user = res.json()
profile = dict()
profile['name'] = user['activeProfile']['profileName']
profile['language'] = user['activeProfile']['attributes']['languagePreferences']['appLanguage']
self.logger.info(
self._("\nSuccessfully logged in. Welcome %s!"), profile['name'])
return profile
else:
self.logger.error(res.text)
sys.exit(1)
def get_region(self, token):
headers = {
"Accept": "application/vnd.session-service+json; version=1",
"Authorization": token,
"Content-Type": "application/json",
'User-Agent': self.user_agent
}
session_url = self.api['session']
res = self.session.get(url=session_url, headers=headers)
if res.ok:
return res.json()['location']['country_code']
else:
self.logger.error(res.text)
sys.exit(1)
def get_auth_token(self):
client_id, client_apikey = self.client_info()
print(client_apikey)
assertion = self.assertion(client_apikey)
access_token = self.access_token(client_apikey, assertion)
id_token = self.login(access_token)
user_assertion = self.grant(id_token, access_token)
final_access_token, refresh_token = self.final_token(
user_assertion, client_apikey)
profile = self.get_profile_name(client_id, final_access_token)
region = self.get_region(final_access_token)
profile['region'] = region
return profile, final_access_token
|
import boto3
import zsec_aws_tools.aws_lambda as zaws_lambda
import zsec_aws_tools.iam as zaws_iam
import io
import zipfile
import textwrap
import json
import logging
import uuid
import pytest
logging.getLogger('botocore').setLevel(logging.WARNING)
logging.getLogger('boto3').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)
logging.basicConfig(level=logging.WARNING)
zaws_lambda.logger.setLevel(logging.INFO)
def create_test_lambda_code(code):
output = io.BytesIO()
with zipfile.ZipFile(output, 'w', compression=zipfile.ZIP_DEFLATED) as zf:
zf.writestr(zinfo_or_arcname='main.py', data=code)
# set permissions
zf.filelist[0].external_attr = 0o0666 << 16
return output.getvalue()
@pytest.fixture
def session():
yield boto3.Session(profile_name='test', region_name='us-east-1')
@pytest.fixture
def role_for_lambda(session):
assume_role_policy_document = {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": "lambda.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
policy = zaws_iam.Policy(index_id="arn:aws:iam::aws:policy/ReadOnlyAccess", session=session)
role = zaws_iam.Role(
name='test_lambda_1_role',
ztid=uuid.UUID('1b761bcf-eaef-b927-ca02-cc6c927b228d'),
session=session,
config=dict(Path='/test/',
AssumeRolePolicyDocument=json.dumps(assume_role_policy_document, ),
Policies=[policy]))
role.put(wait=True)
yield role
role.detach_all_policies()
role.delete()
role.wait_until_not_exists()
@pytest.fixture
def fn(session, role_for_lambda):
test_code = textwrap.dedent("""
def lambda_handler(event, context):
print(event)
return "147306"
""")
code: bytes = create_test_lambda_code(test_code)
fn = zaws_lambda.FunctionResource(
name='test_lambda_1',
ztid=uuid.UUID('6db733ed-c2f0-ac73-78ec-8ab2bdffd124'),
session=session,
config=dict(
Code={'ZipFile': code},
Runtime='python3.7',
Role=role_for_lambda,
Handler='main.lambda_handler',
Timeout=3,
))
yield fn
fn.delete()
fn.wait_until_not_exists()
def test_aws_lambda(session, role_for_lambda, fn):
# print(list(role.boto3_resource().policies.all()))
# print(list(role.boto3_resource().attached_policies.all()))
# attached_policies = list(role.list_role_policies())
fn.put(force=True, wait=True)
arn = fn.arn
assert arn.endswith(fn.name)
assert arn.startswith("arn:aws:lambda:")
resp = fn.invoke(json_codec=True, Payload={'a': 'a'})
assert resp == "147306"
|
import os
import time
import uuid
from flask import Flask, request, make_response, render_template, redirect
from google.cloud import storage
from peewee import *
db = SqliteDatabase("core.db")
class User(Model): # mapping from user token to their background pic url
id = AutoField()
token = CharField()
url = TextField()
class Meta:
database = db
@db.connection_context()
def initialize():
db.create_tables([User])
User.create(token=os.environ["ADMIN_TOKEN"], url=os.environ["FLAG_URL"])
initialize()
app = Flask(__name__)
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "auth.json" # set up Google Cloud credentials
CLOUD_STORAGE_BUCKET = "gallery-wectf21" # Google Cloud Storage bucket name
DEFAULT_PIC = "/static/default.gif"
CSP = "script-src 'nonce-%s'; connect-src 'self'; base-uri 'self'; object-src 'none'; frame-ancestors 'none'; "
def uuid4() -> str:
return str(uuid.uuid4())
@app.route('/')
@db.connection_context()
def index():
token = request.cookies.get("token") # get token from cookies
if not token: token = uuid4() # user has no token, generate one for them
nonce = uuid4() # generate a random nonce
user_obj = User.select().where(User.token == token)
resp = make_response(render_template("index.html", background=user_obj[-1].url if len(user_obj) > 0 else DEFAULT_PIC,
nonce=nonce)) # render the template with background & CSP nonce
resp.set_cookie("token", token, samesite="strict") # set cookie to the token
resp.headers['Content-Security-Policy'] = CSP % nonce # wanna be safe
resp.headers['X-Frame-Options'] = 'DENY' # ensure no one is putting our site in iframe
return resp
def is_bad_content_type(content_type):
return content_type and "html" in content_type # uploading a html picture? seriously?
@app.route('/upload', methods=['POST'])
@db.connection_context()
def upload():
token = request.cookies.get("token")
if not token: return redirect("/") # no token, go to home page
uploaded_file = request.files.get('file') # the file uploaded by user
if not uploaded_file:
return 'No file uploaded.', 400 # dumbass user uploads nothing
if is_bad_content_type(uploaded_file.content_type):
return "Don't hack me :(", 400 # hacker uploading html
gcs = storage.Client() # do some Google Cloud Storage bs copied from their docs
bucket = gcs.get_bucket(CLOUD_STORAGE_BUCKET)
blob = bucket.blob(uuid4() + uuid4()) # use uuid + uuid as file name
blob.upload_from_string(uploaded_file.read(), content_type=uploaded_file.content_type) # upload it
# get the signed url expiring in 1000min
url = blob.generate_signed_url(expiration=int(time.time()) + 600000)\
.replace("https://storage.googleapis.com/gallery-wectf21/", "https://gallery-img-cdn.ctf.so/")
User.create(token=token, url=url)
return redirect("/") # go back home
if __name__ == '__main__':
app.run(host='127.0.0.1', port=8083, debug=True)
|
import torch
from torch.jit.annotations import List
from torch import Tensor
def _new_empty_tensor(x: Tensor, shape: List[int]) -> Tensor:
"""
Arguments:
input (Tensor): input tensor
shape List[int]: the new empty tensor shape
Returns:
output (Tensor)
"""
return torch.ops.torchvision._new_empty_tensor_op(x, shape)
|
from mptt.managers import TreeManager
class MenuItemManager(TreeManager):
def enabled(self, *args, **kwargs):
return self.filter(*args, enabled=True, **kwargs)
|
from lesson23_projects.house3n2.auto_gen.data.const import (
E_FAILED,
E_TURNED_KNOB,
MSG_TURN_KNOB,
)
def create_out(state):
def __on_entry(req):
req.context.c_sock.send(
"""You can see the house.
You can see the close knob.""".encode()
)
def __on_trigger(req):
msg = req.pull_trigger()
if msg == MSG_TURN_KNOB:
return E_TURNED_KNOB
else:
return E_FAILED
state.on_entry = __on_entry
state.on_trigger = __on_trigger
return state
|
#NAME: arm.py
#DATE: 14/06/2019
#AUTH: Ryan McCartney
#DESC: A python class for moving an entity in real-time via and http API
#COPY: Copyright 2019, All Rights Reserved, Ryan McCartney
import threading
import time
import json
import requests
import random
from requests import Session
from kinematics import Kinematic
#define threading wrapper
def threaded(fn):
def wrapper(*args, **kwargs):
thread = threading.Thread(target=fn, args=args, kwargs=kwargs)
thread.start()
return thread
return wrapper
class Arm:
debug = False
logFilePath = "logs/log.txt"
header = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36'}
#Config Variable Initialise
jointMaxRotation = []
jointMaxSpeed = []
jointMinSpeed = []
jointPosDefault = []
jointSpeedDefault = []
jointAccelDefault = []
def __init__(self,config, ipAddress=None):
self.joints = 6
self.logging = True
if ipAddress == None:
self.ipAddress = config["ipAddress"]
else:
self.ipAddress = ipAddress
self.port = config["port"]
self.baseURL = "http://"+str(self.ipAddress)+":"+str(self.port)+"/"
self.error = False
self.timeout = 2 #Seconds
self.pollingStatus = False
#Values loaded from 'config.json'
for joint in config["joints"]:
self.jointMaxRotation.append(joint["maxRotation"])
self.jointMaxSpeed.append(joint["maxSpeed"])
self.jointMinSpeed.append(joint["minSpeed"])
self.jointPosDefault.append(joint["defaultPosition"])
self.jointSpeedDefault.append(joint["defaultSpeed"])
self.jointAccelDefault.append(joint["defaultAccel"])
#Status Flags
self.jointPosition = [None]*self.joints
self.switchState = [0]*self.joints
self.calibrationState = [0]*self.joints
self.movementFlag = [0]*self.joints
try:
self.session = requests.session()
self.clearLogs()
self.connected = True
except:
self.log("ERROR: Cannot create a session.")
self.connected = False
#Open a solver for kinematics
self.kin = Kinematic()
#Start capturing status packets
self.getStatus()
#Logging Function
def log(self, entry):
currentDateTime = time.strftime("%d/%m/%Y %H:%M:%S")
logEntry = currentDateTime + ": " + entry
if self.logging == True:
#open a txt file to use for logging
logFile = open(self.logFilePath,"a+")
logFile.write(logEntry+"\n")
logFile.close()
print(logEntry)
#Send and Receive Messages with implemented logging
def sendCommand(self, command):
#Start Timing
start = time.time()
#combine with host address
message = self.baseURL + "send?command=" + command
message = message.encode('ascii')
if self.pollingStatus == False:
self.getStatus()
try:
if self.debug == True:
response = self.session.get(message,timeout=self.timeout)
status = response.content.decode("utf-8").split("\n")
self.log("INFO: Transmission response code is "+str(response.status_code))
end = time.time()
self.log("STATUS: Sending '"+str(command)+"' took "+str(round((end-start),2))+" seconds.")
self.log(status[0])
else:
#t1 = time.time()
self.session.get(message,timeout=self.timeout)
#t2 = time.time()
#print("in class ", t2 - t1)
self.connected = True
except:
self.log("ERROR: Could not access API.")
self.connected = False
@threaded
def getStatus(self):
while self.connected:
self.pollingStatus = True
try:
message = self.baseURL + "getLatest"
response = self.session.get(message,timeout=self.timeout)
status = str(response.text)
#Extract Joint Positions
if(status.find("STATUS:")!=-1):
if(status.find("MOVEMENT") != -1):
data = status.split(",")
self.movementFlag = list(map(int,data[1:]))
elif(status.find("CALIBRATION") != -1):
data = status.split(",")
self.calibrationState = list(map(int,data[1:]))
elif(status.find("POSITION") != -1):
data = status.split(",")
try:
self.jointPosition = list(map(float,data[1:]))
except:
pass
elif(status.find("SWITCH") != -1):
data = status.split(",")
self.switchState = list(map(int,data[1:]))
else:
self.log("FAILED TO PARSE: "+status)
elif(status !=""):
self.log(status)
except:
self.log("INFO: Did not receive status response from API.")
self.pollingStatus = False
def moveJointTo(self,joint,position):
if self.calibrationState[joint]:
if (position >= 0) and (position <= self.jointMaxRotation[joint]):
command = "p"+str(joint)+str(position)
self.sendCommand(command)
self.log("INFO: Joint "+str(joint)+" moved to "+str(position)+" degrees.")
else:
self.log("ERROR: Positon out of range.")
else:
self.log("ERROR: Joint "+str(joint)+" not calibrated.")
def moveJoint(self,motor,degrees):
#Check movement is within range allowed
if (int(self.jointPosition[motor])+degrees) > self.jointMaxRotation[motor]:
degrees = self.jointMaxRotation[motor] - int(self.jointPosition[motor])
if (int(self.jointPosition[motor])+degrees) < 0:
degrees = -self.jointMaxRotation[motor]
command = "m"+str(motor)+str(degrees)
self.sendCommand(command)
self.log("INFO: Command sent to adjust motor "+str(motor)+" "+str(degrees)+" degrees.")
def getPose(self):
pose = self.kin.forwardKinematics(self.jointPosition)
return pose
def getPositions(self):
return self.jointPosition
def getJointPosition(self,motor):
position = float(self.jointPosition[motor])
return position
def positionJoints(self,positions):
if self.armCalibrated():
if len(positions) == self.joints:
motor = 0
for position in positions:
self.moveJointTo(motor,position)
motor += 1
else:
self.log("ERROR: Invalid Joint Positions.")
else:
self.log("ERROR: Calibrate arm before continuing.")
def rest(self):
if self.armCalibrated():
self.log("INFO: Arm moving to a resting position.")
restPosition = [None]*self.joints
restPosition[0] = self.jointPosDefault[0]
restPosition[1] = 150
restPosition[2] = 175
restPosition[3] = self.jointPosDefault[3]
restPosition[4] = self.jointPosDefault[4]
restPosition[5] = self.jointPosDefault[5]
self.positionJoints(restPosition)
else:
self.log("ERROR: Calibrate arm before trying to rest.")
def standUp(self):
if self.armCalibrated():
self.log("INFO: Arm standing upright.")
self.positionJoints(self.jointPosDefault)
else:
self.log("ERROR: Calibrate arm before trying to stand.")
def setAccel(self,joint,accel):
command = "z"+str(joint)+str(int(accel))
self.sendCommand(command)
self.log("INFO: Joint "+str(joint)+" acceleration rate adjusted to "+str(int(accel))+" degrees per second squared.")
def setSpeed(self,joint,speed):
command = "s"+str(joint)+str(int(speed))
self.sendCommand(command)
self.log("INFO: Joint "+str(joint)+" speed adjusted to "+str(int(speed))+" degrees per second.")
def setMinSpeed(self,joint,minSpeed):
command = "d"+str(joint)+str(int(minSpeed))
self.sendCommand(command)
self.log("INFO: Joint "+str(joint)+" minimum speed adjusted to "+str(int(minSpeed))+" degrees per second.")
def calibrateArm(self):
command = "ca"
self.sendCommand(command)
self.log("INFO: Arm is Currently Calibrating.")
def calibrateJoint(self, joint):
command = "c"+joint
self.sendCommand(command)
self.log("INFO: Joint "+str(joint)+" is currently calibrating.")
def stop(self):
self.sendCommand("q")
self.log("INFO: Arm Emergency Stopped.")
def checkConnection(self):
self.sendCommand("test")
self.log("INFO: Testing the connection.")
def selectRandomPosition(self,motor):
randomPosition = random.randint(0,self.jointMaxRotation[motor])
return randomPosition
def waitToStationary(self):
time.sleep(0.2)
while(self.checkMovement()):
pass
def checkMovement(self):
time.sleep(0.2)
moving = False
for jointMoving in self.movementFlag:
if bool(jointMoving):
moving = jointMoving
return moving
def clearLogs(self):
url = self.baseURL + "clearLogs"
response = self.session.get(url,timeout=self.timeout)
if response.content.decode("utf-8"):
self.log(response.content.decode("utf-8"))
def resetArduino(self):
messages = ["disconnect","connect"]
for message in messages:
url = self.baseURL + message
response = self.session.get(url,timeout=self.timeout)
if response.content.decode("utf-8"):
self.log(response.content.decode("utf-8"))
time.sleep(1.5)
self.log("INFO: Arduino for Arm Reset.")
def resetEStop(self):
self.sendCommand("r")
time.sleep(1)
self.log("INFO: Emergency Stop Latch Reset.")
def stopDemos(self):
url = self.baseURL + "stopDemos"
response = self.session.get(url,timeout=self.timeout)
if response.content.decode("utf-8"):
self.log(response.content.decode("utf-8"))
time.sleep(1.5)
self.log("INFO: Raspberry Pi Demo Processes Terminated.")
def armCalibrated(self):
calibrated = True
for jointCalibrated in self.calibrationState:
calibrated &= int(jointCalibrated)
return calibrated
def setDefaults(self):
i = 0
for i in range(0,self.joints):
self.setSpeed(i,self.jointSpeedDefault[i])
self.setMinSpeed(i,(int(self.jointSpeedDefault[i])-10))
self.setAccel(i,self.jointAccelDefault[i])
self.log("INFO: Joint "+str(i)+" defaults set.")
|
# Load pickled data
import pickle
import numpy as np
import tensorflow as tf
tf.python.control_flow_ops = tf
with open('small_train_traffic.p', mode='rb') as f:
data = pickle.load(f)
X_train, y_train = data['features'], data['labels']
# Initial Setup for Keras
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Flatten
# Build the Fully Connected Neural Network in Keras Here
model = Sequential()
model.add(Flatten(input_shape=(32, 32, 3)))
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dense(5))
model.add(Activation('softmax'))
# preprocess data
X_normalized = np.array(X_train / 255.0 - 0.5 )
from sklearn.preprocessing import LabelBinarizer
label_binarizer = LabelBinarizer()
y_one_hot = label_binarizer.fit_transform(y_train)
model.compile('adam', 'categorical_crossentropy', ['accuracy'])
history = model.fit(X_normalized, y_one_hot, nb_epoch=3, validation_split=0.2) |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './examples/ScatterPlotSpeedTestTemplate.ui'
#
# Created: Fri Sep 21 15:39:09 2012
# by: pyside-uic 0.2.13 running on PySide 1.1.0
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(400, 300)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setObjectName("gridLayout")
self.sizeSpin = QtGui.QSpinBox(Form)
self.sizeSpin.setProperty("value", 10)
self.sizeSpin.setObjectName("sizeSpin")
self.gridLayout.addWidget(self.sizeSpin, 1, 1, 1, 1)
self.pixelModeCheck = QtGui.QCheckBox(Form)
self.pixelModeCheck.setObjectName("pixelModeCheck")
self.gridLayout.addWidget(self.pixelModeCheck, 1, 3, 1, 1)
self.label = QtGui.QLabel(Form)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 1, 0, 1, 1)
self.plot = PlotWidget(Form)
self.plot.setObjectName("plot")
self.gridLayout.addWidget(self.plot, 0, 0, 1, 4)
self.randCheck = QtGui.QCheckBox(Form)
self.randCheck.setObjectName("randCheck")
self.gridLayout.addWidget(self.randCheck, 1, 2, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.pixelModeCheck.setText(QtGui.QApplication.translate("Form", "pixel mode", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Form", "Size", None, QtGui.QApplication.UnicodeUTF8))
self.randCheck.setText(QtGui.QApplication.translate("Form", "Randomize", None, QtGui.QApplication.UnicodeUTF8))
from pyqtgraph import PlotWidget
|
# -*- coding: UTF-8 -*-
from flask import request, jsonify
from app.api_1_0 import api
from app.models import Compilation
__author__ = 'Ivan'
#PAGE_INDEX=0
PAGE_SIZE=10
@api.route('/compilations/<int:page_index>')
def page_compilations(page_index):
#json = request.get_json()
#page_index = json.get('page_index')
paginate =Compilation.query.paginate(page_index,PAGE_SIZE)
items = paginate.items
total = paginate.total
return jsonify({'total':total,'page_size':PAGE_SIZE,'compilations':[c.to_Json() for c in items]})
|
""" fundamental_analysis/financial_modeling_prep/fmp_view.py tests """
import sys
import unittest
# Not testing these tests further. I do not have a fmp key
from contextlib import contextmanager
import vcr
from gamestonk_terminal.stocks.fundamental_analysis.financial_modeling_prep import (
fmp_view,
)
from tests.helpers import check_print
@contextmanager
def replace_stdin(target):
orig = sys.stdin
sys.stdin = target
yield
sys.stdin = orig
class TestFMPView(unittest.TestCase):
@check_print(assert_in="Ticker should be a NASDAQ")
@vcr.use_cassette(
"tests/cassettes/test_fa/test_fa_fmp/test_fmp_valinvest.yaml",
record_mode="new_episodes",
)
def test_fmp_valinvest_score(self):
fmp_view.valinvest_score([], "GME")
|
import sys
import importer
module
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the CLI argument helper interface."""
from __future__ import unicode_literals
import locale
import sys
import unittest
from plaso.lib import errors
from tests.cli import test_lib as cli_test_lib
from tests.cli.helpers import test_lib
class HelperManagerTest(unittest.TestCase):
"""Tests the parsers manager."""
# pylint: disable=protected-access
def testParseNumericOption(self):
"""Tests the _ParseNumericOption function."""
test_helper = test_lib.TestHelper()
expected_option_value = 123
options = cli_test_lib.TestOptions()
options.test = expected_option_value
option_value = test_helper._ParseNumericOption(options, 'test')
self.assertEqual(option_value, expected_option_value)
options = cli_test_lib.TestOptions()
option_value = test_helper._ParseNumericOption(options, 'test')
self.assertIsNone(option_value)
option_value = test_helper._ParseNumericOption(
options, 'test', default_value=expected_option_value)
self.assertEqual(option_value, expected_option_value)
expected_option_value = 123.456
options = cli_test_lib.TestOptions()
options.test = expected_option_value
option_value = test_helper._ParseNumericOption(options, 'test')
self.assertEqual(option_value, expected_option_value)
options = cli_test_lib.TestOptions()
options.test = b'abc'
with self.assertRaises(errors.BadConfigOption):
test_helper._ParseNumericOption(options, 'test')
def testParseStringOption(self):
"""Tests the _ParseStringOption function."""
encoding = sys.stdin.encoding
# Note that sys.stdin.encoding can be None.
if not encoding:
encoding = locale.getpreferredencoding()
test_helper = test_lib.TestHelper()
expected_option_value = 'Test Unicode string'
options = cli_test_lib.TestOptions()
options.test = expected_option_value
option_value = test_helper._ParseStringOption(options, 'test')
self.assertEqual(option_value, expected_option_value)
options = cli_test_lib.TestOptions()
option_value = test_helper._ParseStringOption(options, 'test')
self.assertIsNone(option_value)
option_value = test_helper._ParseStringOption(
options, 'test', default_value=expected_option_value)
self.assertEqual(option_value, expected_option_value)
options = cli_test_lib.TestOptions()
options.test = expected_option_value.encode(encoding)
option_value = test_helper._ParseStringOption(options, 'test')
self.assertEqual(option_value, expected_option_value)
if encoding and encoding == 'UTF-8':
options = cli_test_lib.TestOptions()
options.test = (
b'\xad\xfd\xab\x73\x99\xc7\xb4\x78\xd0\x8c\x8a\xee\x6d\x6a\xcb\x90')
with self.assertRaises(errors.BadConfigOption):
test_helper._ParseStringOption(options, 'test')
if __name__ == '__main__':
unittest.main()
|
import asyncio
import os
import re
import aiohttp
from bs4 import BeautifulSoup
import pandas as pd
import tqdm
BASE_URL = "https://link.springer.com/"
BASE_FOLDER = "Springer"
CONN_LIMIT = 100
TIMEOUT = 3600
def create_folders(books):
for topic in books["English Package Name"].unique():
os.makedirs(f"{BASE_FOLDER}/{topic}", exist_ok=True)
def get_valid_title(value):
value = re.sub(r"[\:\/\®]+", "-", value)
return value
async def download_book(session, book, href, ext):
async with session.get(f"{BASE_URL}/{href}") as response:
topic = book["English Package Name"]
title = get_valid_title(book["Book Title"])
filename = f'{title}, {book["Author"]}, {book["Edition"]}.{ext}'
filepath = f"{BASE_FOLDER}/{topic}/{filename}"
if not os.path.exists(filepath):
with open(filepath, "wb") as fh:
fh.write(await response.content.read())
async def fetch(session, book):
async with session.get(book["OpenURL"]) as response:
text = await response.text()
soup = BeautifulSoup(text, "html.parser")
a_pdf = soup.find("a", class_="test-bookpdf-link")
a_epub = soup.find("a", class_="test-bookepub-link")
if a_pdf:
href_pdf = a_pdf.get("href")
await download_book(session, book, href_pdf, "pdf")
if a_epub:
href_ebook = a_epub.get("href")
await download_book(session, book, href_ebook, "epub")
async def main():
tout = aiohttp.ClientTimeout(total=TIMEOUT)
conn = aiohttp.TCPConnector(limit=CONN_LIMIT)
async with aiohttp.ClientSession(connector=conn, timeout=tout) as session:
tasks = [fetch(session, book) for _, book in books.iterrows()]
for task in tqdm.tqdm(asyncio.as_completed(tasks), total=len(tasks)):
await task
if __name__ == "__main__":
books = pd.read_csv("./books.csv")
create_folders(books)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
|
from PIL import Image
import numpy as np
import streamlit as st
from PIL import ImageFilter
from .converter import Converter
from .correlator import Correlator
class Filter:
def __init__(self):
self.image = None
self.output = None
def apply_negative_filter(self, image_path, R=True,G=True,B=True):
image = np.array(Image.open(image_path).convert('RGB'))
if R:
image[:,:,0] = 255 - image[:,:,0]
if G:
image[:,:,1] = 255 - image[:,:,1]
if B:
image[:,:,2] = 255 - image[:,:,2]
transf_image = Image.fromarray(image.astype('uint8'))
return transf_image
def apply_negative_filter_in_y(self, image_path):
#image = np.array(Image.open(image_path).convert('RGB'))
converter = Converter()
yiq_img, yiq_arr = converter.RGB_2_YIQ(image_path=image_path)
yiq = yiq_arr.copy()
yiq[:,:,0] = 255 - yiq[:,:,0]
rgb_img, rgb_arr = converter.YIQ_2_RGB(arr_img=yiq)
rgb = rgb_img.copy()
return rgb
def visualize_image(self, image, capt = 'Image'):
st.image(image, caption=capt, use_column_width=True)
def apply_sobel_filter(self, image_path, zero_padding=True, mode="vertical"):
c = Correlator()
if mode == "vertical":
sobel_filter = np.array([[-1,0,1],
[-2,0,2],
[-1,0,1]])
elif mode == "horizontal":
sobel_filter = np.array([[-1,0,1],
[-2,0,2],
[-1,0,1]]).T
else:
print("Choose either vertical or Horizontal")
return -1
return c.apply_correlation(image_path, sobel_filter, zero_padding=zero_padding)
def apply_box_filter(self, image_path, box_shape=(3,3), zero_padding=True):
c = Correlator()
divisor = box_shape[0] * box_shape[1]
return c.apply_correlation(image_path, np.ones((box_shape[0],box_shape[1]))/divisor, zero_padding)
def apply_median_filter(self, image_path, filter_shape=(3,3), zero_padding=True):
c = Correlator()
self.image = np.array(Image.open(image_path).convert('RGB'))
c.image = self.image
vertical_padding = filter_shape[0]//2
horizontal_padding = filter_shape[1]//2
if not horizontal_padding and not vertical_padding:
print("Could not execute padding due to filter shape. Try a Bi dimensional kernel.")
zero_padding = False
if zero_padding:
preprocessed_img = c.padding(horizontal_padding, vertical_padding)
output = np.zeros((self.image.shape[0], self.image.shape[1], 3))
else:
preprocessed_img = self.image
output = np.zeros((self.image.shape[0] - 2 * vertical_padding, self.image.shape[1] - 2 * horizontal_padding, 3))
for i in range(preprocessed_img.shape[0] - filter_shape[0]):
for j in range(preprocessed_img.shape[1] - filter_shape[1]):
for k in range(3):
output[i,j,k] = np.median(preprocessed_img[i: i + filter_shape[0], j: j + filter_shape[1], k])
return self.image, preprocessed_img, output |
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Chatterbug(MakefilePackage):
"""A suite of communication-intensive proxy applications that mimic
commonly found communication patterns in HPC codes. These codes can be
used as synthetic codes for benchmarking, or for trace generation using
Score-P / OTF2.
"""
tags = ['proxy-app']
homepage = "https://chatterbug.readthedocs.io"
git = "https://github.com/LLNL/chatterbug.git"
version('develop', branch='master')
version('1.0', tag='v1.0')
variant('scorep', default=False, description='Build with Score-P tracing')
depends_on('mpi')
depends_on('scorep', when='+scorep')
@property
def build_targets(self):
targets = []
targets.append('MPICXX = {0}'.format(self.spec['mpi'].mpicxx))
return targets
def build(self, spec, prefix):
if "+scorep" in spec:
make('WITH_OTF2=YES')
else:
make()
def install(self, spec, prefix):
if "+scorep" in spec:
make('WITH_OTF2=YES', 'PREFIX=' + spec.prefix, 'install')
else:
make('PREFIX=' + spec.prefix, 'install')
|
import setuptools
with open("README.md", "r") as f:
longdesc = f.read()
setuptools.setup(
name = "ps-minifier",
version = "0.1.2",
author = "Willumz",
description = "A minifier for PowerShell scripts.",
long_description = longdesc,
long_description_content_type="text/markdown",
url = "https://github.com/Willumz/ps-minifier",
packages = setuptools.find_packages(),
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License"
],
entry_points = {
'console_scripts': ['psminifier=ps_minifier.psminifier:main']
}
) |
import itertools
import operator
def evaluate_distribution(spec, function_lookup):
''' Process the declarative specification and return a function
of the form:
def wrapper(rstate, **kwargs):
...
Regardless of the specification, the generated function expects a
positional argument which is a random number generator (like
np.random.RandomState), and a set of keyword arguments.
'''
if 'value' in spec:
assert len(spec) == 1
_wrapped_value = spec['value']
def wrapper(rstate, **kwargs):
return _wrapped_value
wrapper._exposed_kwargs = set()
return wrapper
elif 'function' in spec or 'generator' in spec:
assert set(spec.keys()).issubset({
'function', 'generator', 'parameters', 'declare'})
if 'function' in spec:
assert 'generator' not in spec
_wrapped_function = function_lookup(spec['function'])
def _wrapped_generator(rstate, **kwargs):
return _wrapped_function(**kwargs)
else:
_wrapped_generator = function_lookup(spec['generator'])
exposed_kwargs_map = dict()
param_callables = dict()
if 'parameters' in spec:
param_callables = {
param_name: evaluate_distribution(
param_spec, function_lookup)
for param_name, param_spec in spec['parameters'].items()
if 'expose' not in param_spec
}
exposed_kwargs_map = {
param_name: param_spec['expose']
for param_name, param_spec in spec['parameters'].items()
if 'expose' in param_spec
}
_exposed_kwargs = set(exposed_kwargs_map.values())
_exposed_kwargs.update(itertools.chain(*(
param_callable._exposed_kwargs
for param_callable in param_callables.values()
)))
declared_callables = dict()
if 'declare' in spec:
declared_callables = {
declared_name: evaluate_distribution(
declared_spec, function_lookup)
for declared_name, declared_spec
in spec['declare'].items()
}
_exposed_kwargs.update(itertools.chain(*(
declared_callable._exposed_kwargs
for declared_callable in declared_callables.values()
)))
_exposed_kwargs = {
kwarg
for kwarg in _exposed_kwargs
if kwarg not in declared_callables
}
def wrapper(rstate, **kwargs):
missing_kwargs = set(_exposed_kwargs) - set(kwargs)
if missing_kwargs:
str_missed = ', '.join('\'{}\''.format(kw) for kw in sorted(missing_kwargs))
raise TypeError('function missing required keyword-only arguments: {}'.format(str_missed))
inner_kwargs = {
inner_kw: kwargs[outer_kw]
for inner_kw, outer_kw in exposed_kwargs_map.items()
}
kwargs.update({
declared_name: declared_callable(rstate, **kwargs)
for declared_name, declared_callable
in sorted(declared_callables.items(), key=operator.itemgetter(0))
})
inner_kwargs.update({
param_name: param_callable(rstate, **kwargs)
for param_name, param_callable
in sorted(param_callables.items(), key=operator.itemgetter(0))
})
return _wrapped_generator(rstate, **inner_kwargs)
wrapper._exposed_kwargs = _exposed_kwargs
return wrapper
else:
raise ValueError(spec)
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from numpy.random import randint
from typing import Union
class RandomCropper3D():
"""Randomly crop a sub-block out of a 3D tensor.
Args:
out_shape (tuple or int): desired output shape.
"""
def __init__(self, out_shape: Union[int, tuple]):
"""
Args:
out_shape (int | tuple): desired shape (after cropping), expanded to 3D if int.
"""
assert isinstance(out_shape, (int, tuple))
if isinstance(out_shape, int):
self.out_shape = (out_shape, out_shape, out_shape)
else:
assert len(out_shape) == 3
self.out_shape = out_shape
def __call__(self, x, y):
"""Apply the random cropping to a (x,y) pair."""
h, w, d = x.shape[0], x.shape[1], x.shape[2]
bh, bw, bd = self.out_shape
tx = randint(0, h - bh)
ty = randint(0, w - bw)
tz = randint(0, d - bd)
x_cropped = x[tx:tx + bh, ty:ty + bw, tz:tz + bd]
y_cropped = y[tx:tx + bh, ty:ty + bw, tz:tz + bd]
return x_cropped, y_cropped
|
from database import db_session
from models.notificacao import Notificacao
class NotificacaoDAO:
'''
CLASSE NotificacaoDAO - IMPLEMENTA O ACESSO AO BANCO RELACIONADO A CLASSE
Notificacao DO MÓDULO models.py QUE MAPEIA A TABELA TNotificacao
@autor: Luciano Gomes Vieira dos Anjos -
@data: 01/10/2020 -
@versao: 1.0.0
'''
def __init__(self, db):
self.__db = db_session
def get_notificacoes(self):
'''
METODO QUE RETORNA TODAS AS NOTIFICAÇÕES REGISTRADAS
@autor: Luciano Gomes Vieira dos Anjos -
@data: 30/10/2020 -
@versao: 1.0.0
'''
notificacoes = self.__db.query(Notificacao).order_by(Notificacao.data_notificacao.desc()).all()
self.__db.expunge_all()
self.__db.close()
return notificacoes
def get_notificacoes_data_validade(self):
'''
METODO QUE RETORNA TODAS AS NOTIFICAÇÕES DE DATA DE VALIDADE
REGISTRADAS
@autor: Luciano Gomes Vieira dos Anjos -
@data: 10/10/2020 -
@versao: 1.0.0
'''
notificacoes = self.__db.query(Notificacao).filter(Notificacao.fk_id_tipo_notificacao == 1).all()
self.__db.expunge_all()
self.__db.close()
return notificacoes
def get_notificacao_data_validade(self, fk_id_estoque_produto):
'''
METODO QUE RETORNA A NOTIFICAÇÃO DE DATA DE VALIDADE
REFERENTE À UM PRODUTO ESPECÍFICO
@autor: Luciano Gomes Vieira dos Anjos -
@data: 10/10/2020 -
@versao: 1.0.0
'''
notificacoes = self.__db.query(Notificacao).filter(Notificacao.fk_id_estoque_produto, Notificacao.fk_id_tipo_notificacao == 1).first()
self.__db.expunge_all()
self.__db.close()
return notificacoes
def get_notificacoes_quantidade(self):
'''
METODO QUE RETORNA TODAS AS NOTIFICAÇÕES DE QUANTIDADE
REGISTRADAS NO SISTEMA
@autor: Luciano Gomes Vieira dos Anjos -
@data: 10/10/2020 -
@versao: 1.0.0
'''
notificacoes = self.__db.query(Notificacao).filter(Notificacao.fk_id_tipo_notificacao == 2).all()
self.__db.expunge_all()
self.__db.close()
return notificacoes
def get_notificacoes_data_validade_kit(self):
'''
METODO QUE RETORNA TODAS AS NOTIFICAÇÕES DE DATA DE VALIDADE
DE KITS REGISTRADAS
@autor: Luciano Gomes Vieira dos Anjos -
@data: 30/10/2020 -
@versao: 1.0.0
'''
notificacoes = self.__db.query(Notificacao).filter(Notificacao.fk_id_tipo_notificacao == 3).all()
self.__db.expunge_all()
self.__db.close()
return notificacoes
def get_info_notificacoes_data_validade(self, info_notificacao):
'''
METODO QUE RETORNA A INFORMAÇÃO DE NOTIFICAÇÕES
DE DATA DE VALIDADE REGISTRADA, PASSANDO COMO PARÂMETRO
A STRING info_notificacao
@autor: Luciano Gomes Vieira dos Anjos -
@data: 10/10/2020 -
@versao: 1.0.0
'''
infos_notificacoes = self.__db.query(Notificacao.info_notificacao).filter(Notificacao.fk_id_tipo_notificacao == 1, Notificacao.info_notificacao == info_notificacao).all()
self.__db.expunge_all()
self.__db.close()
infos_notificacoes_list = []
for info in infos_notificacoes:
infos_notificacoes_list.append(info[0])
return infos_notificacoes_list
def get_info_notificacoes_data_validade_kit(self, info_notificacao):
'''
METODO QUE RETORNA A INFORMAÇÃO DE NOTIFICAÇÕES
DE DATA DE VALIDADE DE KITS REGISTRADAS, PASSANDO COMO PARÂMETRO
A STRING info_notificacao
@autor: Luciano Gomes Vieira dos Anjos -
@data: 30/10/2020 -
@versao: 1.0.0
'''
infos_notificacoes = self.__db.query(Notificacao.info_notificacao).filter(Notificacao.fk_id_tipo_notificacao == 3, Notificacao.info_notificacao == info_notificacao).all()
self.__db.expunge_all()
self.__db.close()
infos_notificacoes_list = []
for info in infos_notificacoes:
infos_notificacoes_list.append(info[0])
return infos_notificacoes_list
def get_info_notificacoes_quantidade(self, info_notificacao):
'''
METODO QUE RETORNA A INFORMAÇÃO DE NOTIFICAÇÕES
DE QUANTIDADE DE PRODUTO REGISTRADA, PASSANDO COMO PARÂMETRO
A STRING info_notificacao
@autor: Luciano Gomes Vieira dos Anjos -
@data: 10/10/2020 -
@versao: 1.0.0
'''
infos_notificacoes = self.__db.query(Notificacao.info_notificacao).filter(Notificacao.fk_id_tipo_notificacao == 2, Notificacao.info_notificacao == info_notificacao).all()
self.__db.expunge_all()
self.__db.close()
infos_notificacoes_list = []
for info in infos_notificacoes:
infos_notificacoes_list.append(info[0])
return infos_notificacoes_list
def get_id_notificacao_email_id_estoque_produto(self, id_estoque):
'''
METODO QUE RETORNA O ID DA NOTIFICAÇÃO DE UM DETERMINADO
PRODUTO EM ESTOQUE, PASSANDO O ID DELE EM ESTOQUE COMO
PARÂMETRO
@autor: Luciano Gomes Vieira dos Anjos -
@data: 10/10/2020 -
@versao: 1.0.0
'''
id_notificacao = self.__db.query(Notificacao.id_notificacao).filter(Notificacao.fk_id_estoque_produto == id_estoque, Notificacao.fk_id_tipo_notificacao == 1).first()
self.__db.expunge_all()
self.__db.close()
return id_notificacao
def registra_notificacao(self, notificacao):
'''
METODO QUE PERSISTE AS INFORMAÇÕES DA NOTIFICAÇÃO NO BANCO
@autor: Luciano Gomes Vieira dos Anjos -
@data: 01/10/2020 -
@versao: 1.0.0
'''
try:
self.__db.add(notificacao)
self.__db.commit()
except:
print("Erro ao registrar notificação")
self.__db.rollback()
finally:
self.__db.close()
return 'Notificação registrada com sucesso' |
# -*- coding: utf-8 -*-
# ProDy: A Python Package for Protein Dynamics Analysis
#
# Copyright (C) 2010-2012 Ahmet Bakan
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
"""This module defines a pointer class for handling subsets of normal modes."""
__author__ = 'Ahmet Bakan'
__copyright__ = 'Copyright (C) 2010-2012 Ahmet Bakan'
import numpy as np
import prody
__all__ = ['ModeSet']
class ModeSet(object):
"""A class for providing access to subset of mode data. Instances
are obtained by slicing an NMA model (:class:`ANM`, :class:`GNM`, or
:class:`PCA`). ModeSet's contain a reference to the model and a list
of mode indices. Methods common to NMA models are also defined for
mode sets."""
__slots__ = ['_model', '_indices']
def __init__(self, model, indices):
if not isinstance(model, NMA):
raise TypeError('model must be an NMA, not {0:s}'
.format(type(model)))
self._model = model
self._indices = np.array(indices, int)
def __len__(self):
return len(self._indices)
def __iter__(self):
for i in self._indices:
yield self._model.getMode(i)
def __repr__(self):
return '<ModeSet: {0:d} modes from {1:s}>'.format(len(self),
str(self._model))
def __str__(self):
return '{0:d} modes from {1:s}'.format(len(self._indices),
str(self._model))
def is3d(self):
"""Return True if mode instance is from a 3-dimensional model."""
return self._model._is3d
def numAtoms(self):
"""Return number of atoms."""
return self._model._n_atoms
def numModes(self):
"""Return number of modes in the instance (not necessarily maximum
number of possible modes)."""
return len(self._indices)
def numDOF(self):
"""Return number of degrees of freedom."""
return self._model._dof
def getModes(self):
"""Return a list that contains the modes in the mode set."""
getMode = self._model.getMode
return [getMode(i) for i in self._indices]
def getTitle(self):
"""Return title of the mode set."""
return str(self)
def getModel(self):
"""Return the model that the modes belongs to."""
return self._model
def getIndices(self):
"""Return indices of modes in the mode set."""
return self._indices
def getEigenvalues(self):
"""Deprecated, use :meth:`getEigvals` instead."""
prody.deprecate('getEigenvalue', 'getEigvals')
return self.getEigvals()
def getEigvals(self):
"""Return eigenvalues."""
return self._model._eigvals[self._indices]
def getEigenvectors(self):
"""Deprecated, use :meth:`getEigvecs` instead."""
prody.deprecate('getEigenvector', 'getEigvecs')
return self.getEigvecs()
def getVariances(self):
"""Return variances (~inverse eigenvalues)."""
return self._model._vars[self._indices]
def getArray(self):
"""Return a copy of eigenvectors array."""
return self._model._array[:, self._indices]
getEigvecs = getArray
def _getArray(self):
"""Return a copy of eigenvectors array."""
return self._model._array[:, self._indices]
def getCovariance(self):
"""Deprecated, use :func:`~.calcCovariance` instead."""
prody.deprecate('getCovariance', 'calcCovariance')
return prody.calcCovariance(self)
|
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from kmeans_100D import *
#learing rate = 0.1, K = 3, epoch=300
logging = runKmeans(0.1,10,300)
|
import os
import numpy as np
import pandas as pd
import urllib.request
from bs4 import BeautifulSoup
import xml.etree.ElementTree as ETREE
import datetime as dt
pd.set_option('display.max_columns', 500)
def add_pitcher_ids(data):
"""
"""
last_registries = [
fname for fname in sorted(os.listdir(ref_dest))[-50:]
]
registry = pd.concat(
objs=[
pd.read_parquet(ref_dest + fname) for fname in last_registries
],
axis=0
)
for col in ['first_name', 'last_name']:
registry.loc[:, col] = registry[col].astype(str)
registry.loc[:, col] = \
registry[col].apply(lambda x: x.lower().strip())
registry.to_csv('/Users/peteraltamura/Desktop/registry_all.csv')
registry.reset_index(drop=True, inplace=True)
registry.drop_duplicates(
subset=['first_name', 'last_name', 'team'],
inplace=True
)
data[['starterFirstName', 'starterLastName']].to_csv(
'/Users/peteraltamura/Desktop/data.csv')
registry.to_csv('/Users/peteraltamura/Desktop/registry.csv')
data = pd.merge(
data,
registry,
how='left',
left_on=['starterFirstName', 'starterLastName', 'team'],
right_on=['first_name', 'last_name', 'team'],
validate='1:1'
)
return data
def extract_probables(data):
"""
Extracts probable home and away pitchers from atv_preview.xml
"""
resp = data.getroot()
# Home
try:
pitcher_prev_name = resp[0][0][0][2][1][1][0].text
except IndexError as IE:
pitcher_prev_name = np.NaN
try:
pitcher_prev_stat = resp[0][0][0][2][1][1][1].text
except IndexError as IE:
pitcher_prev_stat = np.NaN
try:
pitcher_prev_side = resp[0][0][0][2][1][1][2].text
except IndexError as IE:
pitcher_prev_side = np.NaN
df_home = pd.DataFrame({'probableStarterName': [pitcher_prev_name],
'probableStarterStat': [pitcher_prev_stat],
'probableStarterSide': [str(pitcher_prev_side)]})
# Away
try:
pitcher_prev_name = resp[0][0][0][2][2][1][0].text
except IndexError as IE:
pitcher_prev_name = np.NaN
try:
pitcher_prev_stat = resp[0][0][0][2][2][1][1].text
except IndexError as IE:
pitcher_prev_stat = np.NaN
try:
pitcher_prev_side = resp[0][0][0][2][2][1][2].text
except IndexError as IE:
pitcher_prev_side = np.NaN
df_away = pd.DataFrame({'probableStarterName': [pitcher_prev_name],
'probableStarterStat': [pitcher_prev_stat],
'probableStarterSide': [str(pitcher_prev_side)]})
if any(df_away['probableStarterSide'].apply(lambda x: "current" in x.lower())):
raise Exception("Probable Starters have been replaced with gameplay data")
if any(df_home['probableStarterSide'].apply(lambda x: "current" in x.lower())):
raise Exception("Probable Starters have been replaced with gameplay data")
df = pd.concat(objs=[df_home, df_away], axis=0)
return df
def scrape_game_previews(date):
"""
"""
date_url = "year_{}/month_{}/day_{}".format(
str(date.year).zfill(4),
str(date.month).zfill(2),
str(date.day).zfill(2)
)
full_url = base_url + date_url
print(full_url)
# Generate list of gid links
test_resp = urllib.request.urlopen(full_url)
req = BeautifulSoup(test_resp)
game_links = [x for x in req.find_all('a') if
str(x.get('href'))[7:10] == 'gid']
print(game_links)
# Previews
probable_starters = []
for gid in game_links:
print(" {}".format(str(gid)))
try:
game_id = str(gid.get('href'))[7:]
rbs = full_url + "/" + str(gid.get('href'))[7:] + 'atv_preview.xml'
resp = urllib.request.urlopen(rbs)
resp = ETREE.parse(resp)
df = extract_probables(resp)
df['gameId'] = game_id
probable_starters.append(df)
except ValueError as VE:
pitcher_prev = pd.DataFrame()
pass
try:
probable_starters = pd.concat(
objs=probable_starters,
axis=0
)
print(probable_starters.head())
except ValueError as VE:
probable_starters = pd.DataFrame()
pass
# Filter to Games with two probable starters
probable_starters = probable_starters.loc[
probable_starters['probableStarterName'].notnull(), :]
psvc = probable_starters['gameId'].value_counts()
psvc = pd.DataFrame(psvc).reset_index(inplace=False)
psvc.columns = ['gameId', 'freq']
psvc = psvc.loc[psvc['freq'] == 2, :]
games = list(set(psvc['gameId']))
probable_starters = probable_starters.loc[probable_starters['gameId'].isin(games), :]
# Add Format Pitcher Name
# First Name - assign out
probable_starters['starterFirstName'] =\
probable_starters['probableStarterName'].apply(
lambda s: s.split(" ")[0]
)
# Format
probable_starters.loc[:, 'starterFirstName'] = \
probable_starters['starterFirstName'].apply(
lambda x: x.lower()
)
# Last Name - assign out
probable_starters['starterLastName'] = \
probable_starters['probableStarterName'].apply(
lambda s: s.split(" ")[1]
)
# Format
probable_starters.loc[:, 'starterLastName'] = \
probable_starters['starterLastName'].apply(
lambda x: x.lower()
)
# Strip both
for x in ['starterFirstName', 'starterLastName']:
probable_starters.loc[:, x] = probable_starters[x].str.strip()
# Add Home Team / Away Team
probable_starters.loc[:, 'probableStarterSide'] = \
probable_starters['probableStarterSide'].apply(
lambda x: x.strip().lower()
)
probable_starters['homeTeam'] = probable_starters['gameId'].apply(
lambda x: x.split("_")[5]
)
probable_starters['awayTeam'] = probable_starters['gameId'].apply(
lambda x: x.split("_")[4]
)
probable_starters.reset_index(drop=True, inplace=True)
probable_starters.loc[
probable_starters['probableStarterSide'] == 'home',
'team'] = probable_starters['homeTeam'].str[:3]
probable_starters.loc[
probable_starters['probableStarterSide'] == 'away',
'team'] = probable_starters['awayTeam'].str[:3]
# Add Pitcher ID From team register
probable_starters = add_pitcher_ids(probable_starters)
probable_starters.rename(
columns={
'id': 'startingPitcherId',
'team': 'startingPitcherTeam',
'dob': 'startingPitcherDob',
'throws': 'startingPitcherThrows',
'weight': 'startingPitcherWeight'
},
inplace=True
)
# Make Wide
results = []
for gid in list(set(probable_starters['gameId'])):
curr = probable_starters.loc[probable_starters['gameId'] == gid, :]
assert curr.shape[0] == 2
curr_home = curr.loc[curr['probableStarterSide'] == 'home', :]
curr_home = curr_home.loc[:, [
'gameId', 'startingPitcherId', 'startingPitcherTeam', 'startingPitcherDob',
'startingPitcherThrows', 'startingPitcherWeight'
]]
curr_away = curr.loc[curr['probableStarterSide'] == 'away', :]
curr_away = curr_away.loc[:, [
'gameId', 'startingPitcherId', 'startingPitcherTeam', 'startingPitcherDob',
'startingPitcherThrows', 'startingPitcherWeight'
]]
curr = pd.merge(
curr_home, curr_away, how='left',
on=['gameId'], validate='1:1', suffixes=['_home', '_away']
)
print(curr.shape)
results.append(curr)
probable_starters = pd.concat(results, axis=0)
# Write out
outpath = base_dest + "{}/".format(date_url.replace("/", ""))
if not os.path.exists(outpath):
os.makedirs(outpath)
probable_starters.to_csv(
outpath + "probableStarters.csv",
index=False
)
probable_starters.to_parquet(
outpath + 'probableStarters.parquet'
)
def main()
# Run Log
date = dt.datetime(year=2019, month=7, day=13)
# Teams
base_url = "http://gd2.mlb.com/components/game/mlb/"
base_dest = "/Volumes/Transcend/00_gameday/"
ref_dest = "/Volumes/Transcend/99_reference/"
# Misc
registry_hist = 10
# Iterate over today and tomorrow
dates = [date]
for dd in dates:
scrape_game_previews(dd)
if __name__ == "__main__":
main()
|
import time
import numpy as np
def parse_input(file):
nums = [int(n) for n in file.readline().split(',')]
file.readline()
boards = []
board = []
for line in file.readlines():
if line == '\n':
boards.append(board)
board = []
else:
# each number has boolean value corresponding to if it has been marked
row = [ [int(n), False] for n in line.split() ]
board.append(row)
return np.array(nums), np.array(boards)
def mark_board(board, num):
for row in board:
for col in row:
if col[0] == num:
col[1] = True
def check_for_bingo(board):
bingo = False
# check rows for bingo
for row in board:
if not bingo:
bingo = all(value[1] for value in row)
# check columns for bingo
for col in range(len(board)):
if not bingo:
bingo = all(value[1] for value in board[:, col])
return bingo
def calculate_score(board, num):
unmarked_sum = 0
for row in board:
for col in row:
if col[1] == False: # only sum the numbers that are not marked
unmarked_sum += col[0]
return unmarked_sum * num
def solution_1():
with open('input.txt') as file:
nums, boards = parse_input(file)
for num in nums:
for board in boards:
mark_board(board, num)
bingo = check_for_bingo(board)
if bingo:
return calculate_score(board, num)
def solution_2():
with open('input.txt') as file:
nums, boards = parse_input(file)
board_count = boards.shape[0]
bingo_count = 0
for num in nums:
for board in boards:
# only check for bingo if board doesn't have one already
bingo = check_for_bingo(board)
if not bingo:
mark_board(board, num)
bingo = check_for_bingo(board)
if bingo:
bingo_count += 1
if bingo_count == board_count: # last board to get a bingo
return calculate_score(board, num)
def main():
print("\n* Advent of Code 2021: Day 3 *\n")
tic = time.perf_counter()
s1 = solution_1()
toc = time.perf_counter()
print(f"Solution for problem 1: {s1}, acquired in: {toc-tic:0.4f} seconds")
tic = time.perf_counter()
s2 = solution_2()
toc = time.perf_counter()
print(f"Solution for problem 2: {s2}, acquired in: {toc-tic:0.4f} seconds")
if __name__ == '__main__':
main() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.