code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
from topo.custom.topo import Topo
from . import testutil as testutil
import math
from core.engine import Engine
class MyTopo( Topo ):
"simple bottleneck test 2"
def __init__( self, ctx ):
propagation_delay = float(ctx.config.get("topo.propagation_delay", 0.5))
processing_delay = float(ctx.config.get("topo.processing_delay", 0))
# Initialize
Topo.__init__( self )
s1 = self.addSwitch( 's1', x=3, y=2, processing_delay=processing_delay )
s2 = self.addSwitch( 's2', x=3, y=2, processing_delay=processing_delay )
s3 = self.addSwitch( 's3', x=3, y=2, processing_delay=processing_delay )
h1 = self.addHost( 'h1', x=4, y=1)
h2 = self.addHost( 'h2',x=4, y=3)
h3 = self.addHost( 'h3',x=4, y=3)
self.addLink( h1, s1, capacity=1000, propagation_delay=propagation_delay )
self.addLink( h2, s2, capacity=1000, propagation_delay=propagation_delay )
self.addLink( h3, s3, capacity=1000, propagation_delay=propagation_delay )
self.addLink( s1, s2, capacity=10, propagation_delay=propagation_delay )
self.addLink( s2, s3, capacity=10, propagation_delay=propagation_delay )
# add traffic
self.addTraffic(
dict(fg_class='Single', fg_label="f0", fg_start=0, fg_demand=100, fg_duration=10,
fg_fixed_path=['h1', 's1', 's2', 'h2'], fg_color='g'),
dict(fg_class='Single', fg_label="f1", fg_start=5, fg_demand=100, fg_duration=10,
fg_fixed_path=['h1', 's1', 's2', 's3', 'h3'], fg_color='r'),
dict(fg_class='Single', fg_label="f2", fg_start=10, fg_demand=100, fg_duration=10,
fg_fixed_path=['h1', 's1', 's2', 's3', 'h3'], fg_color='purple'),
)
# call on_done if simulation is finished
ctx.on_test_finished = self.on_done
def on_done(self, ctx):
testutil.print_summary(ctx)
#print(testutil.get_flow_timings(ctx))
errors = []
errors += testutil.verify_flow_timings(ctx, FLOW_TIMINGS)
return errors
def get_topo(ctx):
return MyTopo(ctx)
topos = { 'myTopo': ( lambda: MyTopo() ) }
FLOW_TIMINGS = """{
"s1->h1":{
},
"s1->s2":{
"f0":[
0.5,
18.5
],
"f1":[
5.5,
28.5
],
"f2":[
10.5,
31.25
]
},
"s2->s1":{
},
"s2->h2":{
"f0":[
1.0,
20.0
]
},
"s2->s3":{
"f1":[
6.0,
29.0
],
"f2":[
11.0,
31.416666666666668
]
},
"s3->s2":{
},
"s3->h3":{
"f1":[
6.5,
29.5
],
"f2":[
11.5,
31.583333333333336
]
},
"h1->s1":{
"f0":[
0,
17.0
],
"f1":[
5,
27.5
],
"f2":[
10,
30.75
]
},
"h2->s2":{
},
"h3->s3":{
}
}""" | [
"topo.custom.topo.Topo.__init__"
] | [((387, 406), 'topo.custom.topo.Topo.__init__', 'Topo.__init__', (['self'], {}), '(self)\n', (400, 406), False, 'from topo.custom.topo import Topo\n')] |
import boto3
from botocore.exceptions import ClientError
import os
def send_email(email_details):
sender = os.environ['BASE_EMAIL']
recipient = email_details.get('recipient')
subject = email_details.get('subject')
body_text = (
email_details.get('body'))
body_html = """<html>
<head></head>
<body>
<p>""" + email_details.get('body') + """</p>
</body>
</html>
"""
charset = "UTF-8"
client = boto3.client('ses', region_name=os.environ['REGION'])
try:
# Provide the contents of the email.
client.send_email(
Destination={
'ToAddresses': [
recipient,
],
},
Message={
'Body': {
'Html': {
'Charset': charset,
'Data': body_html,
},
'Text': {
'Charset': charset,
'Data': body_text,
},
},
'Subject': {
'Charset': charset,
'Data': subject,
},
},
Source=sender,
)
# Display an error if something goes wrong.
except ClientError as e:
raise e
def send_already_exist_email(email):
email_details = {
'recipient': email,
'subject' : "Important Notice : Someone is using your email to signup on our system",
'body' : "Your email has been used for registration , this is considered as a notice\r\n"
}
try :
send_email(email_details)
except Exception as e:
raise e
| [
"boto3.client"
] | [((464, 517), 'boto3.client', 'boto3.client', (['"""ses"""'], {'region_name': "os.environ['REGION']"}), "('ses', region_name=os.environ['REGION'])\n", (476, 517), False, 'import boto3\n')] |
#!/usr/bin/env python3
import logging
import socket
logger = logging.getLogger(__name__)
class TCP:
bind_ip = '0.0.0.0'
bind_port = 9091
conn_buffer_size = 4096
def __init__(self):
logger.setLevel(logging.DEBUG)
self.conn = None
def send(self, msg):
if self.conn:
print("Sending " + msg)
self.conn.sendall((msg + '\n').encode())
else:
print("Not connected. Can not send.")
def listen(self, handler):
print(
"Listening for TCP/IP connections on port ", self.bind_port)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((self.bind_ip, self.bind_port))
s.listen(1)
exit = False
while True:
try:
self.conn, addr = s.accept()
print('Connection address: ' + addr[0])
smsg = ""
while True:
buffer = self.conn.recv(self.conn_buffer_size)
smsg += str(buffer, "utf-8")
# print("Received " + str(buffer, "utf-8"))
if not buffer:
break
cmds = smsg.split('\n')
if smsg.endswith('\n'):
smsg = ''
else:
smsg = cmds[-1]
cmds = cmds[:-1]
for cmd in cmds:
if cmd:
handler(cmd)
except KeyboardInterrupt:
print("User exit.")
exit = True
break
if exit:
break
if self.conn:
self.conn.close()
self.conn = None
s.close()
print("Connection closed.")
| [
"logging.getLogger",
"socket.socket"
] | [((63, 90), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (80, 90), False, 'import logging\n'), ((599, 648), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (612, 648), False, 'import socket\n')] |
"""
This module contains facilities for building a call graph for an Ada program.
The 'StaticCallGraphVisitor' class runs a call graph analysis on a libadalang
AdaNode via a visitor pattern (inherit from 'AdaVisitor') and makes available
its result call graph in the 'nodes' and 'edges' member variables.
"""
__copyright__ = "Copyright (c) 2020, Galois, Inc."
import logging
from typing import Callable, Dict, NewType, Optional, Set
import urllib.parse
import re
import libadalang as lal
from ada_visitor import AdaVisitor
from node_naming import GraphNode, NodeKey, get_node_key
class StaticCallGraphVisitor(AdaVisitor):
"""
Computes the static call graph within some AST node. Once `visit()` has
completed, you can read the call graph in the `edges` instance variable.
"""
def __init__(
self,
context: lal.AnalysisContext,
caller_being_defined: Optional[GraphNode],
nodes: Dict[NodeKey, GraphNode],
edges: Dict[NodeKey, Set[NodeKey]]
) -> None:
"""
Initialize the visitor. Because it is not very practical to locally
update the parameters when doing recursive calls, we suggest instead to
instantiate a new local visitor, run it, and then gather from its final
state whatever data you need. Avoids code duplication, at the price of
creating a bunch of short-lived instances.
"""
self.context: lal.AnalysisContext = context
self.caller_being_defined: Optional[GraphNode] = caller_being_defined
"""
Name of the caller currently being defined, that will be deemed the
caller of whatever call expression we encounter. This can either be a
function/procedure, or a file if we're at the top level.
"""
# INVARIANT
# All nodes appearing in edges, either via their key, or in the set of
# destinations, should be in the nodes set.
# There may be nodes that participate in no edges.
# We store nodes by key so that we can retrieve node instances by their
# key and avoid creating duplicates.
self.nodes: Dict[NodeKey, GraphNode] = nodes
"""All nodes in the graph, unsorted."""
if caller_being_defined:
# Register the caller as a node
caller_key = get_node_key(caller_being_defined)
if caller_key is not None and caller_key not in nodes:
nodes[caller_key] = caller_being_defined
self.edges: Dict[NodeKey, Set[NodeKey]] = edges
"""
Edges of the graph, keyed by their origin, valued by the set of
destinations for that origin.
"""
def get_graph_node_for_name(self, node: lal.Name) -> Optional[GraphNode]:
"""Returns the graph node for a name, creating it if none exists yet."""
key = get_node_key(node)
if key is None:
return None
if key not in self.nodes:
self.nodes[key] = node
return self.nodes[key]
def record_call(self, callee: lal.Name) -> None:
"""Records a witnessed static function/procedure call to callee."""
if self.caller_being_defined is not None:
caller_key = get_node_key(self.caller_being_defined)
if caller_key is None:
return
callee_node = self.get_graph_node_for_name(callee)
if callee_node is None:
return
callee_key = get_node_key(callee_node)
if callee_key is None:
return
if caller_key not in self.edges:
self.edges[caller_key] = set()
self.edges[caller_key].add(callee_key)
def locally_visit(
self,
caller_being_defined: Optional[GraphNode],
callback: Callable[[AdaVisitor], None]
) -> None:
"""
Do something with a visitor locally overriding the values of certain
variables.
"""
local_visitor = StaticCallGraphVisitor(
context=self.context,
caller_being_defined=caller_being_defined,
nodes=self.nodes,
edges=self.edges
)
callback(local_visitor)
# pylint: disable=invalid-name, missing-function-docstring
def visit_CallExpr(self, node: lal.CallExpr):
self.record_call(node.f_name)
# pylint: enable=invalid-name, missing-function-docstring
# pylint: disable=invalid-name, missing-function-docstring
def visit_CallStmt(self, node: lal.CallStmt):
self.record_call(node.f_call)
# pylint: enable=invalid-name, missing-function-docstring
# pylint: disable=invalid-name, missing-function-docstring
def visit_PackageBody(self, node: lal.PackageBody) -> None:
def callback(visitor):
visitor.generic_visit(node.f_decls)
visitor.generic_visit(node.f_stmts)
self.locally_visit(
caller_being_defined=self.caller_being_defined,
callback=callback
)
# pylint: enable=invalid-name, missing-function-docstring
# pylint: disable=invalid-name, missing-function-docstring
def visit_SubpBody(self, node: lal.SubpBody) -> None:
spec = node.f_subp_spec
name = spec.f_subp_name
def callback(visitor):
# assumption: the spec does not contain calls, skipping it
visitor.visit(node.f_decls)
visitor.visit(node.f_stmts)
self.locally_visit(
caller_being_defined=name,
callback=callback
)
# pylint: enable=invalid-name, missing-function-docstring
| [
"node_naming.get_node_key"
] | [((2851, 2869), 'node_naming.get_node_key', 'get_node_key', (['node'], {}), '(node)\n', (2863, 2869), False, 'from node_naming import GraphNode, NodeKey, get_node_key\n'), ((2327, 2361), 'node_naming.get_node_key', 'get_node_key', (['caller_being_defined'], {}), '(caller_being_defined)\n', (2339, 2361), False, 'from node_naming import GraphNode, NodeKey, get_node_key\n'), ((3223, 3262), 'node_naming.get_node_key', 'get_node_key', (['self.caller_being_defined'], {}), '(self.caller_being_defined)\n', (3235, 3262), False, 'from node_naming import GraphNode, NodeKey, get_node_key\n'), ((3468, 3493), 'node_naming.get_node_key', 'get_node_key', (['callee_node'], {}), '(callee_node)\n', (3480, 3493), False, 'from node_naming import GraphNode, NodeKey, get_node_key\n')] |
from peewee import Model, CharField, DateTimeField, ForeignKeyField
import os
from playhouse.db_url import connect
db = connect(os.environ.get('DATABASE_URL', 'sqlite:///my_database.db'))
class User(Model):
name = CharField(max_length=255, unique=True)
password = CharField(max_length=255)
class Meta:
database = db
class Task(Model):
name = CharField(max_length=255)
performed = DateTimeField(null=True)
performed_by = ForeignKeyField(model=User, null=True)
class Meta:
database = db | [
"peewee.DateTimeField",
"peewee.CharField",
"peewee.ForeignKeyField",
"os.environ.get"
] | [((135, 193), 'os.environ.get', 'os.environ.get', (['"""DATABASE_URL"""', '"""sqlite:///my_database.db"""'], {}), "('DATABASE_URL', 'sqlite:///my_database.db')\n", (149, 193), False, 'import os\n'), ((231, 269), 'peewee.CharField', 'CharField', ([], {'max_length': '(255)', 'unique': '(True)'}), '(max_length=255, unique=True)\n', (240, 269), False, 'from peewee import Model, CharField, DateTimeField, ForeignKeyField\n'), ((286, 311), 'peewee.CharField', 'CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (295, 311), False, 'from peewee import Model, CharField, DateTimeField, ForeignKeyField\n'), ((390, 415), 'peewee.CharField', 'CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (399, 415), False, 'from peewee import Model, CharField, DateTimeField, ForeignKeyField\n'), ((433, 457), 'peewee.DateTimeField', 'DateTimeField', ([], {'null': '(True)'}), '(null=True)\n', (446, 457), False, 'from peewee import Model, CharField, DateTimeField, ForeignKeyField\n'), ((478, 516), 'peewee.ForeignKeyField', 'ForeignKeyField', ([], {'model': 'User', 'null': '(True)'}), '(model=User, null=True)\n', (493, 516), False, 'from peewee import Model, CharField, DateTimeField, ForeignKeyField\n')] |
from __future__ import unicode_literals
from django.test import TestCase
from django.db import IntegrityError
import datetime, decimal
from .models import *
class SimpleLotteryTestCase(TestCase):
def setUp(self):
lt = SimpleLottery(name = "<NAME>", number_of_numbers = 3, max_val = 5, rollover = decimal.Decimal('0.00'), min_matches=1)
lt.save()
self.draw = Draw(lotterytype = lt, drawdate = datetime.datetime(2016,2,5,10,00), prize = decimal.Decimal('100.00'))
self.draw.save()
def testEnterNumbers(self):
'''Test the entry of numbers for the winning combination'''
# check that numbers are correctly converted to a string
self.draw.winning_combo = 1,2,3
self.draw.save()
d = Draw.objects.get(lotterytype = self.draw.lotterytype, drawdate = self.draw.drawdate)
self.assertEqual(str(d.winning_combo), '1,2,3')
# check that numbers are sorted
self.draw.winning_combo = 5,4,2
self.draw.save()
d = Draw.objects.get(lotterytype = self.draw.lotterytype, drawdate = self.draw.drawdate)
self.assertEqual(d.winning_combo, [2,4,5])
# check that too few numbers are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 1,2
self.draw.save()
# check that too many numbers are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 1,2,3,4
self.draw.save()
# check that duplicate numbers are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 1, 2, 1
self.draw.save()
# check that numbers outside the range are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 4,5,6
self.draw.save()
# check that non numeric values are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 'cat', 'dog', 'pig'
self.draw.save()
def testNoNumberEntry(self):
''' check that you cant save an entry without the numbers'''
p = Punter(name = '<NAME>', email='<EMAIL>')
p.save()
e = Entry(punter=p, draw=self.draw)
with self.assertRaises(exceptions.ValidationError):
e.save()
def testMakeEntry(self):
''' check that valid entries are accepted and invalid rejected'''
p = Punter(name = '<NAME>', email='<EMAIL>')
p.save()
# check creating an entry
e = Entry(punter=p, draw=self.draw)
e.entry = 1,2,3
self.assertEqual(e.punter, p)
self.assertEqual(e.draw, self.draw)
# check that numbers are correctly converted to a string
e.entry = 1,2,3
e.save()
f = Entry.objects.get(punter=p, draw=self.draw)
self.assertEqual(str(f.entry), '1,2,3')
# check that numbers are sorted
e.entry = 5,4,2
e.save()
f = Entry.objects.get(punter=p, draw=self.draw)
self.assertEqual(f.entry, [2,4,5])
# check that too few numbers are rejected
with self.assertRaises(ValueError):
e.entry = 1,2
e.save()
# check that too many numbers are rejected
with self.assertRaises(ValueError):
e.entry = 1,2,3,4
e.save()
# check that duplicate numbers are rejected
with self.assertRaises(ValueError):
e.entry = 1, 2, 1
e.save()
# check that numbers outside the range are rejected
with self.assertRaises(ValueError):
e.entry = 4,5,6
e.save()
# check that non numeric values are rejected
with self.assertRaises(ValueError):
e.entry = 'cat', 'dog', 'pig'
e.save()
def testDuplicateEntry(self):
''' check that same punter cannot create more than one entry'''
p = Punter(name = '<NAME>', email='<EMAIL>')
p.save()
e = Entry(punter=p, draw=self.draw)
e.entry = 3,4,5
e.save()
with self.assertRaises(exceptions.ValidationError):
e2 = Entry(punter=p, draw=self.draw)
e2.entry = 3,4,5
e2.save()
class SimpleLotteryResultTestCase(TestCase):
def setUp(self):
lt = SimpleLottery(name = "Test Lottery", number_of_numbers = 3, max_val = 10, rollover = decimal.Decimal('0.00'), min_matches=1)
lt.save()
self.draw = Draw(lotterytype = lt, drawdate = datetime.datetime(2016,2,5,10,00), prize = decimal.Decimal('100.00'))
self.draw.save()
p1 = Punter(name = '<NAME>', email='<EMAIL>')
p1.save()
self.e1 = Entry(punter=p1, draw=self.draw)
self.e1.entry='1,2,3'
self.e1.save()
p2 = Punter(name = '<NAME>', email='<EMAIL>')
p2.save()
self.e2 = Entry(punter=p2, draw=self.draw)
self.e2.entry='2,3,4'
self.e2.save()
p3 = Punter(name = '<NAME>', email='<EMAIL>')
p3.save()
self.e3 = Entry(punter=p3, draw=self.draw)
self.e3.entry='1,3,4'
self.e3.save()
def testDraw(self):
'''test the draw, and allocation of prizes'''
# test that the correct winning entries are found
self.draw.makeDraw(2,3,5)
winning_entries = self.draw.entry_set.filter(win__isnull=False) # use the orm to read the winning entries
self.assertEqual(len(winning_entries), 2)
self.assertIn(self.e1, winning_entries)
self.assertIn(self.e2, winning_entries)
# test that the prize allocated is correct
self.assertEqual(winning_entries[0].win.prize, self.draw.prize/2)
self.assertEqual(winning_entries[1].win.prize, self.draw.prize/2)
def testNoWin(self):
'''test that if the conditions for a win are not met, no winning entries are selected, and the prize is rolled over'''
self.draw.makeDraw(6,7,8)
winning_entries = self.draw.entry_set.filter(win__isnull=False) # use the orm to read the winning entries
# test that there are no winners
self.assertEqual(len(winning_entries), 0)
# test that the prize money has rolled over
self.assertEqual(self.draw.prize, self.draw.lotterytype.rollover)
def testRolloverAllocated(self):
'''test that when there is a rollover it is correctly applied and then reset'''
# set the rollover
self.draw.lotterytype.rollover = decimal.Decimal(1000)
self.draw.lotterytype.save()
self.assertEqual(self.draw.lotterytype.rollover, decimal.Decimal(1000.00))
# do the draw
self.draw.makeDraw(1,2,3)
winning_entries = self.draw.entry_set.filter(win__isnull=False) # use the orm to read the winning entries
# test that there is one winning entry
self.assertEqual(len(winning_entries), 1)
# test that the rollover has been added to the prize money allocated
self.assertEqual(winning_entries[0].win.prize, self.draw.prize+decimal.Decimal(1000.00))
# test that the rollover has been reset
self.assertEqual(self.draw.lotterytype.rollover, decimal.Decimal(0.00))
class MoreComplexLotteryTestCase(TestCase):
def setUp(self):
lt = MoreComplexLottery(name = "Test Lottery", number_of_numbers = 3, max_val = 5, rollover = decimal.Decimal('0.00'), min_matches=1, spotprize_nummatches=3, spotprize_value=decimal.Decimal('10.00'))
lt.save()
self.draw = Draw(lotterytype = lt, drawdate = datetime.datetime(2016,2,5,10,00), prize = decimal.Decimal('100.00'))
self.draw.save()
def testEnterNumbers(self):
'''Test the entry of numbers for the winning combination'''
# check that numbers are correctly converted to a string
self.draw.winning_combo = 1,2,3
self.assertEqual(self.draw.winning_combo, '1,2,3')
# check that numbers are sorted
self.draw.winning_combo = 5,4,2
self.assertEqual(self.draw.winning_combo, [2,4,5])
# check that too few numbers are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 1,2
# check that too many numbers are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 1,2,3,4
# check that duplicate numbers are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 1, 2, 1
# check that numbers outside the range are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 4,5,6
# check that non numeric values are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 'cat', 'dog', 'pig'
def testNoNumberEntry(self):
''' check that you cant save an entry without the numbers'''
p = Punter(name = '<NAME>', email='<EMAIL>')
p.save()
e = Entry(punter=p, draw=self.draw)
with self.assertRaises(exceptions.ValidationError):
e.save()
def testMakeEntry(self):
''' check that valid entries are accepted and invalid rejected'''
p = Punter(name = '<NAME>', email='<EMAIL>')
p.save()
# check creating an entry
e = Entry(punter=p, draw=self.draw)
e.entry = 1,2,3
e.save()
self.assertEqual(e.punter, p)
self.assertEqual(e.draw, self.draw)
# check that numbers are correctly converted to a string
f = Entry.objects.get(punter=p, draw=self.draw)
self.assertEqual(str(f.entry), '1,2,3')
# check that numbers are sorted
e.entry = 5,4,2
e.save()
f = Entry.objects.get(punter=p, draw=self.draw)
self.assertEqual(f.entry, [2,4,5])
# check that too few numbers are rejected
with self.assertRaises(ValueError):
e.entry = 1,2
e.save()
# check that too many numbers are rejected
with self.assertRaises(ValueError):
e.entry = 1,2,3,4
e.save()
# check that duplicate numbers are rejected
with self.assertRaises(ValueError):
e.entry = 1, 2, 1
e.save()
# check that numbers outside the range are rejected
with self.assertRaises(ValueError):
e.entry = 4,5,6
e.save()
# check that non numeric values are rejected
with self.assertRaises(ValueError):
e.entry = 'cat', 'dog', 'pig'
e.save()
def testDuplicateEntry(self):
''' check that same punter cannot create more than one entry'''
p = Punter(name = '<NAME>', email='<EMAIL>')
p.save()
e = Entry(punter=p, draw=self.draw)
e.entry = 3,4,5
e.save()
with self.assertRaises(exceptions.ValidationError):
e2 = Entry(punter=p, draw=self.draw)
e2.entry = 3,4,5
e2.save()
def testEnterNumbers(self):
'''Test the entry of numbers for the winning combination'''
# check that numbers are correctly converted to a string
self.draw.winning_combo = 1,2,3
self.draw.save()
d = Draw.objects.get(drawdate = self.draw.drawdate, lotterytype = self.draw.lotterytype)
self.assertEqual(str(d.winning_combo), '1,2,3')
# check that numbers are sorted
self.draw.winning_combo = 5,4,2
self.draw.save()
d = Draw.objects.get(drawdate = self.draw.drawdate, lotterytype = self.draw.lotterytype)
self.assertEqual(d.winning_combo, [2,4,5])
# check that too few numbers are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 1,2
self.draw.save()
# check that too many numbers are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 1,2,3,4
self.draw.save()
# check that duplicate numbers are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 1, 2, 1
self.draw.save()
# check that numbers outside the range are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 4,5,6
self.draw.save()
# check that non numeric values are rejected
with self.assertRaises(ValueError):
self.draw.winning_combo = 'cat', 'dog', 'pig'
self.draw.save()
class MoreComplexLotteryResultTestCase(TestCase):
def setUp(self):
lt = MoreComplexLottery(name = "Test Lottery", number_of_numbers = 3, max_val = 10, rollover = decimal.Decimal('0.00'), min_matches=1, spotprize_nummatches=1, spotprize_value=decimal.Decimal('10.00'))
lt.save()
self.draw = Draw(lotterytype = lt, drawdate = datetime.datetime(2016,2,5,10,00), prize = decimal.Decimal('100.00'))
self.draw.save()
p1 = Punter(name = '<NAME>', email='<EMAIL>')
p1.save()
self.e1 = Entry(punter=p1, draw=self.draw)
self.e1.entry='1,2,3'
self.e1.save()
p2 = Punter(name = '<NAME>', email='<EMAIL>')
p2.save()
self.e2 = Entry(punter=p2, draw=self.draw)
self.e2.entry='2,3,4'
self.e2.save()
p3 = Punter(name = '<NAME>', email='<EMAIL>')
p3.save()
self.e3 = Entry(punter=p3, draw=self.draw)
self.e3.entry='1,3,4'
self.e3.save()
def testDraw(self):
'''test the draw, and allocation of prizes'''
# test that the correct winning entries are found
self.draw.makeDraw(2,3,5)
# new we have 2 winners of the main prize, who get 50 each, and one winner of the spot prize who gets 10
winning_entries = self.draw.entry_set.filter(win__isnull=False) # use the orm to read the winning entries
self.assertEqual(len(winning_entries), 3)
self.assertIn(self.e1, winning_entries)
self.assertEqual(self.e1.win.wintype, Win.MAIN)
self.assertEqual(self.e1.win.prize, decimal.Decimal('50.00'))
self.assertIn(self.e2, winning_entries)
self.assertEqual(self.e2.win.wintype, Win.MAIN)
self.assertEqual(self.e2.win.prize, decimal.Decimal('50.00'))
self.assertIn(self.e3, winning_entries)
self.assertEqual(self.e3.win.wintype, Win.SPOTPRIZE)
self.assertEqual(self.e3.win.prize, decimal.Decimal('10.00'))
def testNoWin(self):
'''test that if the conditions for a win are not met, no winning entries are selected, and the prize is rolled over'''
self.draw.makeDraw(6,7,8)
winning_entries = self.draw.entry_set.filter(win__isnull=False) # use the orm to read the winning entries
# test that there are no winners
self.assertEqual(len(winning_entries), 0)
# test that the prize money has rolled over
self.assertEqual(self.draw.prize, self.draw.lotterytype.rollover)
def testRolloverAllocated(self):
'''test that when there is a rollover it is correctly applied and then reset'''
# set the rollover
self.draw.lotterytype.rollover = decimal.Decimal(1000)
self.draw.lotterytype.save()
self.assertEqual(self.draw.lotterytype.rollover, decimal.Decimal(1000.00))
# do the draw
self.draw.makeDraw(1,2,3)
winning_entries = self.draw.entry_set.filter(win__isnull=False, win__wintype=Win.MAIN) # use the orm to read the winning entries for the main prize
# test that there is one winning entry for the main prize
self.assertEqual(len(winning_entries), 1)
# test that the rollover has been added to the prize money allocated
self.assertEqual(winning_entries[0].win.prize, self.draw.prize+decimal.Decimal(1000.00))
# test that the rollover has been reset
self.assertEqual(self.draw.lotterytype.rollover, decimal.Decimal(0.00))
| [
"datetime.datetime",
"decimal.Decimal"
] | [((6485, 6506), 'decimal.Decimal', 'decimal.Decimal', (['(1000)'], {}), '(1000)\n', (6500, 6506), False, 'import datetime, decimal\n'), ((15128, 15149), 'decimal.Decimal', 'decimal.Decimal', (['(1000)'], {}), '(1000)\n', (15143, 15149), False, 'import datetime, decimal\n'), ((6601, 6624), 'decimal.Decimal', 'decimal.Decimal', (['(1000.0)'], {}), '(1000.0)\n', (6616, 6624), False, 'import datetime, decimal\n'), ((7173, 7193), 'decimal.Decimal', 'decimal.Decimal', (['(0.0)'], {}), '(0.0)\n', (7188, 7193), False, 'import datetime, decimal\n'), ((14037, 14061), 'decimal.Decimal', 'decimal.Decimal', (['"""50.00"""'], {}), "('50.00')\n", (14052, 14061), False, 'import datetime, decimal\n'), ((14211, 14235), 'decimal.Decimal', 'decimal.Decimal', (['"""50.00"""'], {}), "('50.00')\n", (14226, 14235), False, 'import datetime, decimal\n'), ((14390, 14414), 'decimal.Decimal', 'decimal.Decimal', (['"""10.00"""'], {}), "('10.00')\n", (14405, 14414), False, 'import datetime, decimal\n'), ((15244, 15267), 'decimal.Decimal', 'decimal.Decimal', (['(1000.0)'], {}), '(1000.0)\n', (15259, 15267), False, 'import datetime, decimal\n'), ((15877, 15897), 'decimal.Decimal', 'decimal.Decimal', (['(0.0)'], {}), '(0.0)\n', (15892, 15897), False, 'import datetime, decimal\n'), ((310, 333), 'decimal.Decimal', 'decimal.Decimal', (['"""0.00"""'], {}), "('0.00')\n", (325, 333), False, 'import datetime, decimal\n'), ((423, 459), 'datetime.datetime', 'datetime.datetime', (['(2016)', '(2)', '(5)', '(10)', '(0)'], {}), '(2016, 2, 5, 10, 0)\n', (440, 459), False, 'import datetime, decimal\n'), ((466, 491), 'decimal.Decimal', 'decimal.Decimal', (['"""100.00"""'], {}), "('100.00')\n", (481, 491), False, 'import datetime, decimal\n'), ((4407, 4430), 'decimal.Decimal', 'decimal.Decimal', (['"""0.00"""'], {}), "('0.00')\n", (4422, 4430), False, 'import datetime, decimal\n'), ((4520, 4556), 'datetime.datetime', 'datetime.datetime', (['(2016)', '(2)', '(5)', '(10)', '(0)'], {}), '(2016, 2, 5, 10, 0)\n', (4537, 4556), False, 'import datetime, decimal\n'), ((4563, 4588), 'decimal.Decimal', 'decimal.Decimal', (['"""100.00"""'], {}), "('100.00')\n", (4578, 4588), False, 'import datetime, decimal\n'), ((7042, 7065), 'decimal.Decimal', 'decimal.Decimal', (['(1000.0)'], {}), '(1000.0)\n', (7057, 7065), False, 'import datetime, decimal\n'), ((7365, 7388), 'decimal.Decimal', 'decimal.Decimal', (['"""0.00"""'], {}), "('0.00')\n", (7380, 7388), False, 'import datetime, decimal\n'), ((7445, 7469), 'decimal.Decimal', 'decimal.Decimal', (['"""10.00"""'], {}), "('10.00')\n", (7460, 7469), False, 'import datetime, decimal\n'), ((7543, 7579), 'datetime.datetime', 'datetime.datetime', (['(2016)', '(2)', '(5)', '(10)', '(0)'], {}), '(2016, 2, 5, 10, 0)\n', (7560, 7579), False, 'import datetime, decimal\n'), ((7586, 7611), 'decimal.Decimal', 'decimal.Decimal', (['"""100.00"""'], {}), "('100.00')\n", (7601, 7611), False, 'import datetime, decimal\n'), ((12639, 12662), 'decimal.Decimal', 'decimal.Decimal', (['"""0.00"""'], {}), "('0.00')\n", (12654, 12662), False, 'import datetime, decimal\n'), ((12719, 12743), 'decimal.Decimal', 'decimal.Decimal', (['"""10.00"""'], {}), "('10.00')\n", (12734, 12743), False, 'import datetime, decimal\n'), ((12818, 12854), 'datetime.datetime', 'datetime.datetime', (['(2016)', '(2)', '(5)', '(10)', '(0)'], {}), '(2016, 2, 5, 10, 0)\n', (12835, 12854), False, 'import datetime, decimal\n'), ((12861, 12886), 'decimal.Decimal', 'decimal.Decimal', (['"""100.00"""'], {}), "('100.00')\n", (12876, 12886), False, 'import datetime, decimal\n'), ((15746, 15769), 'decimal.Decimal', 'decimal.Decimal', (['(1000.0)'], {}), '(1000.0)\n', (15761, 15769), False, 'import datetime, decimal\n')] |
from typing import Dict
from agentos.utils import DUMMY_DEV_REGISTRY
from agentos.component import Component
from agentos.repo import Repo
def get_component(name: str) -> Component:
registry = Registry()
return registry.get_component(name)
class Registry:
"""
This encapsulates interactions with an external registry that contains
information about publicly-available Components.
"""
def __init__(self, registry=None):
self.registry = registry if registry else DUMMY_DEV_REGISTRY
self.latest_refs = self.registry["latest_refs"]
def get_component(self, name: str) -> Component:
instantiated = {}
identifier = Component.Identifier(name, self.latest_refs)
return self._get_component(identifier, instantiated)
def _get_component(
self, identifier: Component.Identifier, instantiated: Dict
) -> Component:
if identifier.full in instantiated:
return instantiated[identifier.full]
component_spec = self.registry["components"][identifier.full]
repo_name = component_spec["repo"]
repo_spec = self.registry["repos"][repo_name]
repo = Repo.from_spec(repo_name, repo_spec)
component = Component.get_from_repo(
repo=repo,
identifier=identifier,
class_name=component_spec["class_name"],
file_path=component_spec["file_path"],
)
instantiated[identifier.full] = component
for attr_name, dep_name in component_spec["dependencies"].items():
dep_id = Component.Identifier(dep_name, self.latest_refs)
dep_component = self._get_component(dep_id, instantiated)
component.add_dependency(dep_component, attribute_name=attr_name)
return component
| [
"agentos.component.Component.Identifier",
"agentos.repo.Repo.from_spec",
"agentos.component.Component.get_from_repo"
] | [((678, 722), 'agentos.component.Component.Identifier', 'Component.Identifier', (['name', 'self.latest_refs'], {}), '(name, self.latest_refs)\n', (698, 722), False, 'from agentos.component import Component\n'), ((1171, 1207), 'agentos.repo.Repo.from_spec', 'Repo.from_spec', (['repo_name', 'repo_spec'], {}), '(repo_name, repo_spec)\n', (1185, 1207), False, 'from agentos.repo import Repo\n'), ((1228, 1370), 'agentos.component.Component.get_from_repo', 'Component.get_from_repo', ([], {'repo': 'repo', 'identifier': 'identifier', 'class_name': "component_spec['class_name']", 'file_path': "component_spec['file_path']"}), "(repo=repo, identifier=identifier, class_name=\n component_spec['class_name'], file_path=component_spec['file_path'])\n", (1251, 1370), False, 'from agentos.component import Component\n'), ((1571, 1619), 'agentos.component.Component.Identifier', 'Component.Identifier', (['dep_name', 'self.latest_refs'], {}), '(dep_name, self.latest_refs)\n', (1591, 1619), False, 'from agentos.component import Component\n')] |
#! /usr/bin/env python
###############################################################################
# MCHE201_ControlBox.py
#
# Code to control the MCHE201 competition
# 1. Listens for switch to be pressed
# 2. When pressed, closes all 8 relays and runs motor for 30sec
#
#
# Created: 11/03/15
# - <NAME>
# - <EMAIL>
# - http://www.ucs.louisiana.edu/~jev9637
#
# Modified:
# * 03/31/16 - <NAME> - <EMAIL>
# - updated for Python 3
# - Replaced PiFace with Ocean Control USB relays
# * 04/05/16 - <NAME> - <EMAIL>
# - added second read of button, pushing after initial will cancel
# - Adding logging
# * 11/01/16 - <NAME> - <EMAIL>
# - Added motor control with Adafruit Raspberry Pi Hat
#
###############################################################################
# import from __future__ for Python 2 people
from __future__ import division, print_function, unicode_literals
import logging
import numpy as np
import serial
import time
from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor
logging.basicConfig(level=logging.DEBUG,
format='[%(levelname)s] (%(threadName)-10s) %(message)s',
)
# Configuration Parameters
ON_RASPPI_TRACK1 = False
ON_RASPPI_TRACK2 = True
DC_MOTOR_PIN = 1
hardware_start_switch = 4 # Define the digital input position of the hardware switch
class oceanControls(object):
""" Class to wrap the ASCII protocol for controlling the Ocean Controls
Relay module"""
def __init__(self, port, baudrate = 9600, address = 00):
self.ser = serial.Serial(port, baudrate,
bytesize=8, parity='N',
stopbits=1, timeout=0.1)
self.address = address
def turnRelayOn(self, relay_number):
""" Method to turn on an individual relay
Input arguments:
relay_number = The relay number to control
Returns:
nothing
Created: <NAME> - <EMAIL> - 03/15/16
"""
if relay_number in [1, 2, 3, 4, 5, 6, 7, 8]:
self.ser.write('@{:02d} ON {}\r'.format(self.address, relay_number).encode('utf-8'))
else:
raise ValueError('Please enter a relay number between 1 and 8.')
def turnRelayOff(self, relay_number):
""" Method to turn off an individual relay
Input arguments:
relay_number = The relay number to control
Returns:
nothing
Created: <NAME> - <EMAIL> - 03/15/16
"""
if relay_number in [1, 2, 3, 4, 5, 6, 7, 8]:
self.ser.write('@{:02d} OFF {}\r'.format(self.address, relay_number).encode('utf-8'))
else:
raise ValueError('Please enter a relay number between 1 and 8.')
def timedRelayOn(self, relay_number, time_on):
""" Method to turn on an individual relay for a set time
Input arguments:
relay_number = The relay number to control
time_on = the time the relay should remain on (s)
Returns:
nothing
Created: <NAME> - <EMAIL> - 03/15/16
"""
if relay_number in [1, 2, 3, 4, 5, 6, 7, 8]:
# Convert the time input (s) to the number of ms the relay should be on
time_tenths = int(time_on * 10)
if time_tenths < 1 or time_tenths > 255:
raise ValueError('The time must be between 0.1s and 25.5s')
if not np.isclose((time_on / 0.1) % 1, 0):
raise ValueError('The resolution of this command is only 0.1s.\n\
Please enter a value that is a multiple of 0.1s.')
self.ser.write('@{:02d} TR {} {:03d}\r'.format(self.address, relay_number, time_tenths).encode('utf-8'))
else:
raise ValueError('Please enter a relay number between 1 and 8.')
def turnAllOn(self):
""" Method to turn on all relays
Input arguments:
nothing
Returns:
nothing
Created: <NAME> - <EMAIL> - 03/15/16
"""
self.ser.write('@{:02d} ON {}\r'.format(self.address, 0).encode('utf-8'))
def turnAllOff(self):
""" Method to turn off all relays
Input arguments:
nothing
Returns:
nothing
Created: <NAME> - <EMAIL> - 03/15/16
"""
self.ser.write('@{:02d} OFF {}\r'.format(self.address, 0).encode('utf-8'))
def isDigitalInputOn(self, digital_input_number):
""" Method that checks the status of an individual digital input
Input Arugments:
digital_input_number = The input number to check
Returns:
Boolean indicating if input is High/On (True) or Low/Ooff (False)
Created: <NAME> - <EMAIL> - 03/16/16
"""
if digital_input_number in [1, 2, 3, 4]:
self.ser.flushInput()
# May need to change to below in versions of PySerial >= 3.0
# self.ser.reset_input_buffer()
self.ser.write('@{:02d} IS {:02d}\r'.format(self.address, digital_input_number).encode('utf-8'))
# TODO: Be more elegant about this
status_string = self.ser.readlines()[-1]
status = int(status_string.split()[-1])
if status:
return True
else:
return False
else:
raise ValueError('Please enter a digital input number between 1 and 4.')
def isRelayOn(self, relay_number):
""" Method that checks the status of an individual relay
Input Arugments:
relay_number = The relay number to control
Returns:
Boolean indicating if relay is on (True) or off (False)
Created: <NAME> - <EMAIL> - 03/15/16
"""
if relay_number in [1, 2, 3, 4, 5, 6, 7, 8]:
# self.ser.flushInput()
# May need to change to below in versions of PySerial >= 3.0
# self.ser.reset_input_buffer()
self.ser.write('@{:02d} RS {:02d}\r'.format(self.address, relay_number).encode('utf-8'))
# TODO: Be more elegant about this
status_string = self.ser.readlines()[-1]
status = int(status_string.split()[-1])
if status:
return True
else:
return False
else:
raise ValueError('Please enter a relay number between 1 and 8.')
def printRelayStatus(self, relay_number):
""" Method to print the status of an individual relay
Input Arugments:
relay_number = The relay number to control
Returns:
nothing
Created: <NAME> - <EMAIL> - 03/15/16
"""
if relay_number in [1, 2, 3, 4, 5, 6, 7, 8]:
if controller.isRelayOn(relay_number):
print('Relay {} is on.'.format(relay_number))
else:
print('Relay {} is off.'.format(relay_number))
else:
raise ValueError('Please enter a relay number between 1 and 8.')
def printDigitalInputStatus(self, digital_input_number):
""" Method to print the status of an individual digital input
Input Arugments:
relay_number = The digital input number to check
Returns:
nothing
Created: <NAME> - <EMAIL> - 03/16/16
"""
if digital_input_number in [1, 2, 3, 4]:
if controller.isDigitalInputOn(digital_input_number):
print('Input {} is High/On.'.format(digital_input_number))
else:
print('Input {} is Low/Off.'.format(digital_input_number))
else:
raise ValueError('Please enter a digital input number between 1 and 4.')
# recommended for auto-disabling motors on shutdown!
def turnOffAllMotors():
motorHat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
motorHat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
motorHat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
motorHat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
if __name__ == "__main__":
if ON_RASPPI_TRACK1:
# Define an instance of the oceanControls class for use on Rasp Pi
controller = oceanControls('/dev/ttyUSB0')
if ON_RASPPI_TRACK2: # Identical to Track 1 for now
# Define an instance of the oceanControls class for use on Rasp Pi
controller = oceanControls('/dev/ttyUSB0')
else:
# Define an instance of the oceanControls class on Dr. Vaughan's MacBook
controller = oceanControls('/dev/tty.usbserial-AL01H195')
# Create the motor controller instance and a DC motor object
motorHat = Adafruit_MotorHAT(addr=0x60)
spinMotor = motorHat.getMotor(DC_MOTOR_PIN)
# Now the relationship between the Ocean Controller outputs and the track
# Define the values for red then increment around the track CW
# Red - Blue - Black - Yellow
# Should allow easier changing in the future
red_relay = 1
red_LED = 5
blue_relay = red_relay + 1
blue_LED = red_LED + 1
black_relay = blue_relay + 1
black_LED = blue_LED + 1
yellow_relay = black_relay + 1
yellow_LED = black_LED + 1
try:
while True:
if controller.isDigitalInputOn(hardware_start_switch):
logging.debug('Starting round...')
# Close all the relays
controller.turnAllOn()
# Start the motor
spinMotor.run(Adafruit_MotorHAT.FORWARD)
spinMotor.setSpeed(255)
# Get the current time
start_time = time.time()
# Pause for 1s to keep from triggering stop
time.sleep(1)
# Keep the relays closed for 30 seconds
while (time.time() - start_time < 30):
time.sleep(0.1)
# Check to see if the switch is pressed to cancel
if controller.isDigitalInputOn(hardware_start_switch):
logging.debug('Switched pressed to cancel round.')
controller.turnAllOff()
spinMotor.run(Adafruit_MotorHAT.RELEASE)
break
# Open all the relays
controller.turnAllOff()
# Stop the motor
spinMotor.run(Adafruit_MotorHAT.RELEASE)
logging.debug('Finished 30 second round. Ready for next.')
# sleep 0.1s between checks of the start switch
time.sleep(0.1)
except(KeyboardInterrupt, SystemExit):
logging.debug('Exiting.')
turnOffAllMotors()
controller.turnAllOff()
controller.ser.close() | [
"logging.basicConfig",
"numpy.isclose",
"logging.debug",
"time.sleep",
"Adafruit_MotorHAT.Adafruit_MotorHAT",
"serial.Serial",
"time.time"
] | [((1059, 1162), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""[%(levelname)s] (%(threadName)-10s) %(message)s"""'}), "(level=logging.DEBUG, format=\n '[%(levelname)s] (%(threadName)-10s) %(message)s')\n", (1078, 1162), False, 'import logging\n'), ((9209, 9235), 'Adafruit_MotorHAT.Adafruit_MotorHAT', 'Adafruit_MotorHAT', ([], {'addr': '(96)'}), '(addr=96)\n', (9226, 9235), False, 'from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor\n'), ((1590, 1668), 'serial.Serial', 'serial.Serial', (['port', 'baudrate'], {'bytesize': '(8)', 'parity': '"""N"""', 'stopbits': '(1)', 'timeout': '(0.1)'}), "(port, baudrate, bytesize=8, parity='N', stopbits=1, timeout=0.1)\n", (1603, 1668), False, 'import serial\n'), ((11269, 11284), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (11279, 11284), False, 'import time\n'), ((11337, 11362), 'logging.debug', 'logging.debug', (['"""Exiting."""'], {}), "('Exiting.')\n", (11350, 11362), False, 'import logging\n'), ((3667, 3699), 'numpy.isclose', 'np.isclose', (['(time_on / 0.1 % 1)', '(0)'], {}), '(time_on / 0.1 % 1, 0)\n', (3677, 3699), True, 'import numpy as np\n'), ((9875, 9909), 'logging.debug', 'logging.debug', (['"""Starting round..."""'], {}), "('Starting round...')\n", (9888, 9909), False, 'import logging\n'), ((10254, 10265), 'time.time', 'time.time', ([], {}), '()\n', (10263, 10265), False, 'import time\n'), ((10360, 10373), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (10370, 10373), False, 'import time\n'), ((11121, 11179), 'logging.debug', 'logging.debug', (['"""Finished 30 second round. Ready for next."""'], {}), "('Finished 30 second round. Ready for next.')\n", (11134, 11179), False, 'import logging\n'), ((10522, 10537), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (10532, 10537), False, 'import time\n'), ((10470, 10481), 'time.time', 'time.time', ([], {}), '()\n', (10479, 10481), False, 'import time\n'), ((10708, 10758), 'logging.debug', 'logging.debug', (['"""Switched pressed to cancel round."""'], {}), "('Switched pressed to cancel round.')\n", (10721, 10758), False, 'import logging\n')] |
"""This module contains some basic utilities to locate Ren'Py distributables and to find the macOS
version of a VN.
"""
import os
import warnings
from typing import Optional, Callable
from functools import wraps
def deprecated(message: str = ""):
"""Mark a function as deprecated.
This is used as a decorator to mark some functions as deprecated without needing to import
warnings repeatedly. The function that uses the decorator will be called but will display
a deprecation warning with a supplied message.
Arguments:
message (str): The message or reason for deprecation. Defaults to a generic statement
generated by the function's name.
Returns:
warnable (Callable): The function with a warning wrapper.
"""
def warnable(call: Callable):
@wraps(call)
def do_call(*args, **kwargs):
warnings.warn(message if message else call.__name__ + " is deprecated.",
category=DeprecationWarning)
call(*args, **kwargs)
return do_call
return warnable
@deprecated(message="Please use isfile or is from the os.path module.")
def file_exists(directory: str = os.getcwd(), item: str = '') -> Optional[str]:
"""Determine whether a file exists in a certain directory.
**Note**: This function is being deprecated in favor of the utilities provided in the `os`
module.
Args:
dir (str): The directory to search in.
item (str): The item to search for in the directory.
Returns:
fname (str): File name if it's found or `None` if it doesn't find anything
"""
cwd = os.listdir(directory)
for listed_item in cwd:
if item in listed_item:
return listed_item
return None
@deprecated("This function is no longer supported.")
def verify_built_files(directory: str = os.getcwd()) -> Optional[str]:
"""Determine if the Ren'Py distributions have been built already by looking for the `-dists`
directory.
This function will check if the directory exists in itself.
Args:
dir (str): The directory to search.
Returns:
isdir (bool): Whether the directory exists or not.
"""
return file_exists(directory=directory, item="-dists")
@deprecated("This function is longer supported.")
def find_mac_build(directory: str) -> bool:
"""Determine whether the macOS builds have been created.
Args:
dir (str): The directory to search in
Returns:
isfile (bool): Whether the macOS ZIP file exists.
"""
return file_exists(directory=directory, item="-mac")
| [
"warnings.warn",
"os.listdir",
"functools.wraps",
"os.getcwd"
] | [((1201, 1212), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1210, 1212), False, 'import os\n'), ((1657, 1678), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (1667, 1678), False, 'import os\n'), ((1881, 1892), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1890, 1892), False, 'import os\n'), ((827, 838), 'functools.wraps', 'wraps', (['call'], {}), '(call)\n', (832, 838), False, 'from functools import wraps\n'), ((889, 994), 'warnings.warn', 'warnings.warn', (["(message if message else call.__name__ + ' is deprecated.')"], {'category': 'DeprecationWarning'}), "(message if message else call.__name__ + ' is deprecated.',\n category=DeprecationWarning)\n", (902, 994), False, 'import warnings\n')] |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models, fields
class AccountMoveCashBasis(models.Model):
_inherit = 'account.move'
tax_cash_basis_rec_id = fields.Many2one(
'account.partial.reconcile',
string='Tax Cash Basis Entry of',
help="Technical field used to keep track of the tax cash basis reconciliation."
"This is needed when cancelling the source: it will post the inverse journal entry to cancel that part too.")
class AccountMoveLine(models.Model):
_inherit = "account.move.line"
@api.model
def create(self, vals):
taxes = False
if vals.get('tax_line_id'):
taxes = [{'use_cash_basis': self.env['account.tax'].browse(vals['tax_line_id']).use_cash_basis}]
if vals.get('tax_ids'):
taxes = self.env['account.move.line'].resolve_2many_commands('tax_ids', vals['tax_ids'])
if taxes and any([tax['use_cash_basis'] for tax in taxes]) and not vals.get('tax_exigible'):
vals['tax_exigible'] = False
return super(AccountMoveLine, self).create(vals)
| [
"odoo.fields.Many2one"
] | [((240, 520), 'odoo.fields.Many2one', 'fields.Many2one', (['"""account.partial.reconcile"""'], {'string': '"""Tax Cash Basis Entry of"""', 'help': '"""Technical field used to keep track of the tax cash basis reconciliation.This is needed when cancelling the source: it will post the inverse journal entry to cancel that part too."""'}), "('account.partial.reconcile', string=\n 'Tax Cash Basis Entry of', help=\n 'Technical field used to keep track of the tax cash basis reconciliation.This is needed when cancelling the source: it will post the inverse journal entry to cancel that part too.'\n )\n", (255, 520), False, 'from odoo import api, models, fields\n')] |
#!/usr/bin/env python3
"""Finetune a toxsmi predictor on a new task."""
import argparse
import json
import logging
import os
import sys
from time import time
import numpy as np
import torch
from paccmann_predictor.utils.hyperparams import OPTIMIZER_FACTORY
from paccmann_predictor.utils.utils import get_device
from pytoda.datasets import AnnotatedDataset, SMILESDataset
from pytoda.smiles.smiles_language import SMILESLanguage
from sklearn.metrics import (
auc, average_precision_score, precision_recall_curve, roc_curve
)
from toxsmi.models import MODEL_FACTORY
from toxsmi.utils import disable_rdkit_logging
from toxsmi.utils.transferlearning import update_mca_model
# setup logging
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument(
'train_scores_filepath',
type=str,
help='Path to the training toxicity scores (.csv)'
)
parser.add_argument(
'test_scores_filepath',
type=str,
help='Path to the test toxicity scores (.csv)'
)
parser.add_argument(
'smi_filepath', type=str, help='Path to the SMILES data (.smi)'
)
parser.add_argument('model_path', type=str, help='Directory of trained model.')
parser.add_argument(
'params_filepath',
type=str,
help='Path to the .json with params for transfer learning'
)
parser.add_argument('training_name', type=str, help='Name for the training.')
def main(
train_scores_filepath, test_scores_filepath, smi_filepath, model_path,
params_filepath, training_name
):
logging.basicConfig(level=logging.INFO, format='%(message)s')
logger = logging.getLogger(f'{training_name}')
logger.setLevel(logging.INFO)
disable_rdkit_logging()
device = get_device()
# Restore pretrained model
logger.info('Start model restoring.')
try:
with open(os.path.join(model_path, 'model_params.json'), 'r') as fp:
params = json.load(fp)
smiles_language = SMILESLanguage.load(
os.path.join(model_path, 'smiles_language.pkl')
)
model = MODEL_FACTORY[params.get('model_fn', 'mca')](params).to(device)
# Try weight restoring
try:
weight_path = os.path.join(
model_path, 'weights',
params.get('weights_name', 'best_ROC-AUC_mca.pt')
)
model.load(weight_path)
except Exception:
try:
wp = os.listdir(os.path.join(model_path, 'weights'))[0]
logger.info(
f"Weights {weight_path} not found. Try restore {wp}"
)
model.load(os.path.join(model_path, 'weights', wp))
except Exception:
raise Exception('Error in weight loading.')
except Exception:
raise Exception(
'Error in model restoring. model_path should point to the model root '
'folder that contains a model_params.json, a smiles_language.pkl and a '
'weights folder.'
)
logger.info('Model restored. Now starting to craft it for the task')
# Process parameter file:
model_params = {}
with open(params_filepath) as fp:
model_params.update(json.load(fp))
model = update_mca_model(model, model_params)
logger.info('Model set up.')
for idx, (name, param) in enumerate(model.named_parameters()):
logger.info(
(idx, name, param.shape, f'Gradients: {param.requires_grad}')
)
ft_model_path = os.path.join(model_path, 'finetuned')
os.makedirs(os.path.join(ft_model_path, 'weights'), exist_ok=True)
os.makedirs(os.path.join(ft_model_path, 'results'), exist_ok=True)
logger.info('Now start data preprocessing...')
# Assemble datasets
smiles_dataset = SMILESDataset(
smi_filepath,
smiles_language=smiles_language,
padding_length=params.get('smiles_padding_length', None),
padding=params.get('padd_smiles', True),
add_start_and_stop=params.get('add_start_stop_token', True),
augment=params.get('augment_smiles', False),
canonical=params.get('canonical', False),
kekulize=params.get('kekulize', False),
all_bonds_explicit=params.get('all_bonds_explicit', False),
all_hs_explicit=params.get('all_hs_explicit', False),
randomize=params.get('randomize', False),
remove_bonddir=params.get('remove_bonddir', False),
remove_chirality=params.get('remove_chirality', False),
selfies=params.get('selfies', False),
sanitize=params.get('sanitize', True)
)
train_dataset = AnnotatedDataset(
annotations_filepath=train_scores_filepath,
dataset=smiles_dataset,
device=get_device()
)
train_loader = torch.utils.data.DataLoader(
dataset=train_dataset,
batch_size=params['batch_size'],
shuffle=True,
drop_last=True,
num_workers=params.get('num_workers', 0)
)
# Generally, if sanitize is True molecules are de-kekulized. Augmentation
# preserves the "kekulization", so if it is used, test data should be
# sanitized or canonicalized.
smiles_test_dataset = SMILESDataset(
smi_filepath,
smiles_language=smiles_language,
padding_length=params.get('smiles_padding_length', None),
padding=params.get('padd_smiles', True),
add_start_and_stop=params.get('add_start_stop_token', True),
augment=params.get('augment_test_smiles', False),
canonical=params.get('test_canonical', False),
kekulize=params.get('test_kekulize', False),
all_bonds_explicit=params.get('test_all_bonds_explicit', False),
all_hs_explicit=params.get('test_all_hs_explicit', False),
randomize=False,
remove_bonddir=params.get('test_remove_bonddir', False),
remove_chirality=params.get('test_remove_chirality', False),
selfies=params.get('selfies', False),
sanitize=params.get('test_sanitize', False)
)
# Dump eventually modified SMILES Language
smiles_language.save(os.path.join(ft_model_path, 'smiles_language.pkl'))
test_dataset = AnnotatedDataset(
annotations_filepath=test_scores_filepath,
dataset=smiles_test_dataset,
device=get_device()
)
test_loader = torch.utils.data.DataLoader(
dataset=test_dataset,
batch_size=params['batch_size'],
shuffle=False,
drop_last=False,
num_workers=params.get('num_workers', 0)
)
save_top_model = os.path.join(ft_model_path, 'weights/{}_{}_{}.pt')
num_t_params = sum(
p.numel() for p in model.parameters() if p.requires_grad
)
num_params = sum(p.numel() for p in model.parameters())
params.update({'params': num_params, 'trainable_params': num_t_params})
logger.info(
f'Number of parameters: {num_params} (trainable {num_t_params}).'
)
# Define optimizer only for those layers which require gradients
optimizer = (
OPTIMIZER_FACTORY[params.get('optimizer', 'adam')](
filter(lambda p: p.requires_grad, model.parameters()),
lr=params.get('lr', 0.00001)
)
)
# Dump params.json file with updated parameters.
with open(os.path.join(ft_model_path, 'model_params.json'), 'w') as fp:
json.dump(params, fp)
# Start training
logger.info('Training about to start...\n')
t = time()
min_loss, max_roc_auc = 1000000, 0
max_precision_recall_score = 0
for epoch in range(params['epochs']):
model.train()
logger.info(params_filepath.split('/')[-1])
logger.info(f"== Epoch [{epoch}/{params['epochs']}] ==")
train_loss = 0
for ind, (smiles, y) in enumerate(train_loader):
smiles = torch.squeeze(smiles.to(device))
y_hat, pred_dict = model(smiles)
loss = model.loss(y_hat, y.to(device))
optimizer.zero_grad()
loss.backward()
optimizer.step()
train_loss += loss.item()
logger.info(
'\t **** TRAINING **** '
f"Epoch [{epoch + 1}/{params['epochs']}], "
f'loss: {train_loss / len(train_loader):.5f}. '
f'This took {time() - t:.1f} secs.'
)
t = time()
# Measure validation performance
model.eval()
with torch.no_grad():
test_loss = 0
predictions = []
labels = []
for ind, (smiles, y) in enumerate(test_loader):
smiles = torch.squeeze(smiles.to(device))
y_hat, pred_dict = model(smiles)
predictions.append(y_hat)
# Copy y tensor since loss function applies downstream modification
labels.append(y.clone())
loss = model.loss(y_hat, y.to(device))
test_loss += loss.item()
predictions = torch.cat(predictions, dim=0).flatten().cpu().numpy()
labels = torch.cat(labels, dim=0).flatten().cpu().numpy()
# Remove NaNs from labels to compute scores
predictions = predictions[~np.isnan(labels)]
labels = labels[~np.isnan(labels)]
test_loss_a = test_loss / len(test_loader)
fpr, tpr, _ = roc_curve(labels, predictions)
test_roc_auc_a = auc(fpr, tpr)
# calculations for visualization plot
precision, recall, _ = precision_recall_curve(labels, predictions)
# score for precision vs accuracy
test_precision_recall_score = average_precision_score(
labels, predictions
)
logger.info(
f"\t **** TEST **** Epoch [{epoch + 1}/{params['epochs']}], "
f'loss: {test_loss_a:.5f}, , roc_auc: {test_roc_auc_a:.5f}, '
f'avg precision-recall score: {test_precision_recall_score:.5f}'
)
info = {
'test_auc': test_roc_auc_a,
'train_loss': train_loss / len(train_loader),
'test_loss': test_loss_a,
'test_auc': test_roc_auc_a,
'best_test_auc': max_roc_auc,
'test_precision_recall_score': test_precision_recall_score,
'best_precision_recall_score': max_precision_recall_score,
}
def save(path, metric, typ, val=None):
model.save(path.format(typ, metric, params.get('model_fn', 'mca')))
if typ == 'best':
logger.info(
f'\t New best performance in {metric}'
f' with value : {val:.7f} in epoch: {epoch+1}'
)
if test_roc_auc_a > max_roc_auc:
max_roc_auc = test_roc_auc_a
info.update({'best_test_auc': max_roc_auc})
save(save_top_model, 'ROC-AUC', 'best', max_roc_auc)
np.save(
os.path.join(ft_model_path, 'results', 'best_predictions.npy'),
predictions
)
with open(
os.path.join(ft_model_path, 'results', 'metrics.json'), 'w'
) as f:
json.dump(info, f)
if test_precision_recall_score > max_precision_recall_score:
max_precision_recall_score = test_precision_recall_score
info.update(
{'best_precision_recall_score': max_precision_recall_score}
)
save(
save_top_model, 'precision-recall score', 'best',
max_precision_recall_score
)
if test_loss_a < min_loss:
min_loss = test_loss_a
save(save_top_model, 'loss', 'best', min_loss)
ep_loss = epoch
if (epoch + 1) % params.get('save_model', 100) == 0:
save(save_top_model, 'epoch', str(epoch))
logger.info(
'Overall best performances are: \n \t'
f'Loss = {min_loss:.4f} in epoch {ep_loss} '
)
save(save_top_model, 'training', 'done')
logger.info('Done with training, models saved, shutting down.')
if __name__ == '__main__':
args = parser.parse_args()
main(
args.train_scores_filepath, args.test_scores_filepath,
args.smi_filepath, args.model_path, args.params_filepath,
args.training_name
)
| [
"logging.basicConfig",
"logging.getLogger",
"argparse.ArgumentParser",
"sklearn.metrics.average_precision_score",
"sklearn.metrics.auc",
"json.dump",
"os.path.join",
"sklearn.metrics.precision_recall_curve",
"toxsmi.utils.transferlearning.update_mca_model",
"torch.cat",
"sklearn.metrics.roc_curve",
"numpy.isnan",
"toxsmi.utils.disable_rdkit_logging",
"json.load",
"torch.no_grad",
"time.time",
"paccmann_predictor.utils.utils.get_device"
] | [((692, 751), 'logging.basicConfig', 'logging.basicConfig', ([], {'stream': 'sys.stdout', 'level': 'logging.DEBUG'}), '(stream=sys.stdout, level=logging.DEBUG)\n', (711, 751), False, 'import logging\n'), ((761, 786), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (784, 786), False, 'import argparse\n'), ((1526, 1587), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(message)s"""'}), "(level=logging.INFO, format='%(message)s')\n", (1545, 1587), False, 'import logging\n'), ((1601, 1638), 'logging.getLogger', 'logging.getLogger', (['f"""{training_name}"""'], {}), "(f'{training_name}')\n", (1618, 1638), False, 'import logging\n'), ((1677, 1700), 'toxsmi.utils.disable_rdkit_logging', 'disable_rdkit_logging', ([], {}), '()\n', (1698, 1700), False, 'from toxsmi.utils import disable_rdkit_logging\n'), ((1714, 1726), 'paccmann_predictor.utils.utils.get_device', 'get_device', ([], {}), '()\n', (1724, 1726), False, 'from paccmann_predictor.utils.utils import get_device\n'), ((3227, 3264), 'toxsmi.utils.transferlearning.update_mca_model', 'update_mca_model', (['model', 'model_params'], {}), '(model, model_params)\n', (3243, 3264), False, 'from toxsmi.utils.transferlearning import update_mca_model\n'), ((3492, 3529), 'os.path.join', 'os.path.join', (['model_path', '"""finetuned"""'], {}), "(model_path, 'finetuned')\n", (3504, 3529), False, 'import os\n'), ((6536, 6586), 'os.path.join', 'os.path.join', (['ft_model_path', '"""weights/{}_{}_{}.pt"""'], {}), "(ft_model_path, 'weights/{}_{}_{}.pt')\n", (6548, 6586), False, 'import os\n'), ((7425, 7431), 'time.time', 'time', ([], {}), '()\n', (7429, 7431), False, 'from time import time\n'), ((3546, 3584), 'os.path.join', 'os.path.join', (['ft_model_path', '"""weights"""'], {}), "(ft_model_path, 'weights')\n", (3558, 3584), False, 'import os\n'), ((3617, 3655), 'os.path.join', 'os.path.join', (['ft_model_path', '"""results"""'], {}), "(ft_model_path, 'results')\n", (3629, 3655), False, 'import os\n'), ((6080, 6130), 'os.path.join', 'os.path.join', (['ft_model_path', '"""smiles_language.pkl"""'], {}), "(ft_model_path, 'smiles_language.pkl')\n", (6092, 6130), False, 'import os\n'), ((7325, 7346), 'json.dump', 'json.dump', (['params', 'fp'], {}), '(params, fp)\n', (7334, 7346), False, 'import json\n'), ((8298, 8304), 'time.time', 'time', ([], {}), '()\n', (8302, 8304), False, 'from time import time\n'), ((9274, 9304), 'sklearn.metrics.roc_curve', 'roc_curve', (['labels', 'predictions'], {}), '(labels, predictions)\n', (9283, 9304), False, 'from sklearn.metrics import auc, average_precision_score, precision_recall_curve, roc_curve\n'), ((9330, 9343), 'sklearn.metrics.auc', 'auc', (['fpr', 'tpr'], {}), '(fpr, tpr)\n', (9333, 9343), False, 'from sklearn.metrics import auc, average_precision_score, precision_recall_curve, roc_curve\n'), ((9422, 9465), 'sklearn.metrics.precision_recall_curve', 'precision_recall_curve', (['labels', 'predictions'], {}), '(labels, predictions)\n', (9444, 9465), False, 'from sklearn.metrics import auc, average_precision_score, precision_recall_curve, roc_curve\n'), ((9546, 9590), 'sklearn.metrics.average_precision_score', 'average_precision_score', (['labels', 'predictions'], {}), '(labels, predictions)\n', (9569, 9590), False, 'from sklearn.metrics import auc, average_precision_score, precision_recall_curve, roc_curve\n'), ((1908, 1921), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (1917, 1921), False, 'import json\n'), ((1981, 2028), 'os.path.join', 'os.path.join', (['model_path', '"""smiles_language.pkl"""'], {}), "(model_path, 'smiles_language.pkl')\n", (1993, 2028), False, 'import os\n'), ((3199, 3212), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (3208, 3212), False, 'import json\n'), ((4723, 4735), 'paccmann_predictor.utils.utils.get_device', 'get_device', ([], {}), '()\n', (4733, 4735), False, 'from paccmann_predictor.utils.utils import get_device\n'), ((6273, 6285), 'paccmann_predictor.utils.utils.get_device', 'get_device', ([], {}), '()\n', (6283, 6285), False, 'from paccmann_predictor.utils.utils import get_device\n'), ((7255, 7303), 'os.path.join', 'os.path.join', (['ft_model_path', '"""model_params.json"""'], {}), "(ft_model_path, 'model_params.json')\n", (7267, 7303), False, 'import os\n'), ((8381, 8396), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (8394, 8396), False, 'import torch\n'), ((1828, 1873), 'os.path.join', 'os.path.join', (['model_path', '"""model_params.json"""'], {}), "(model_path, 'model_params.json')\n", (1840, 1873), False, 'import os\n'), ((9140, 9156), 'numpy.isnan', 'np.isnan', (['labels'], {}), '(labels)\n', (9148, 9156), True, 'import numpy as np\n'), ((9183, 9199), 'numpy.isnan', 'np.isnan', (['labels'], {}), '(labels)\n', (9191, 9199), True, 'import numpy as np\n'), ((10830, 10892), 'os.path.join', 'os.path.join', (['ft_model_path', '"""results"""', '"""best_predictions.npy"""'], {}), "(ft_model_path, 'results', 'best_predictions.npy')\n", (10842, 10892), False, 'import os\n'), ((11071, 11089), 'json.dump', 'json.dump', (['info', 'f'], {}), '(info, f)\n', (11080, 11089), False, 'import json\n'), ((10975, 11029), 'os.path.join', 'os.path.join', (['ft_model_path', '"""results"""', '"""metrics.json"""'], {}), "(ft_model_path, 'results', 'metrics.json')\n", (10987, 11029), False, 'import os\n'), ((2620, 2659), 'os.path.join', 'os.path.join', (['model_path', '"""weights"""', 'wp'], {}), "(model_path, 'weights', wp)\n", (2632, 2659), False, 'import os\n'), ((8253, 8259), 'time.time', 'time', ([], {}), '()\n', (8257, 8259), False, 'from time import time\n'), ((2433, 2468), 'os.path.join', 'os.path.join', (['model_path', '"""weights"""'], {}), "(model_path, 'weights')\n", (2445, 2468), False, 'import os\n'), ((8932, 8961), 'torch.cat', 'torch.cat', (['predictions'], {'dim': '(0)'}), '(predictions, dim=0)\n', (8941, 8961), False, 'import torch\n'), ((9003, 9027), 'torch.cat', 'torch.cat', (['labels'], {'dim': '(0)'}), '(labels, dim=0)\n', (9012, 9027), False, 'import torch\n')] |
"""
2D Graphers called in html2D.py
"""
from typing import List, Tuple
from plotly import graph_objects as go
from plotly import express as px
from plotly.subplots import make_subplots
from scipy import signal as sg
from plotly.offline import plot
from ipywidgets import widgets
from datetime import datetime
import pandas as pd
import csv
import os
import numpy as np
from private.logs import logDecorator
from private.helpers import kwargsHelper
MODULE_NAME = "graphers2D"
genericLog = logDecorator.genericLog(MODULE_NAME)
@genericLog
def plot_POI(df: "data frame"):
"""
This graph, like the one below it, does not
use the xAxis nor indices params, only there
as part of graph creation standardization
"""
# Pull Specific Variables
fs = df['fs'].tolist()[0]
head = df['Heading'].tolist()
p = df['Depth'].tolist()
roll = df['Roll'].tolist()
pitch = df['Pitch'].tolist()
# Calculate time
numData = len(p)
t = [x/fs for x in range(numData)]
t_hr = [x/3600 for x in t]
# Scaling Factor to reduce amount of data
scale = 10
# Reduce Data
sP = sg.decimate(p,scale).copy()
sRoll = sg.decimate(roll,scale).copy()
sPitch = sg.decimate(pitch,scale).copy()
sHead = sg.decimate(head,scale).copy()
# Calculate Reduced time
numData = len(sP)
sT = [x/(fs/scale) for x in range(numData)]
sT_hr = [x/3600 for x in sT]
tView = 10 # seconds
N = int(fs*tView/2) # Number of points
# Create a list of Points of Interest
xPOI = [1, 1.5, 2.2] #hour
# Empty list for y coordinates of points of interest to be populated
yPOI = []
#Create zoomed in x and y data sets
zoomT = [[] for x in range(len(xPOI))]
zoomP = [[] for x in range(len(xPOI))]
zoomR = [[] for x in range(len(xPOI))]
# Create a set of Data for each of the points of interest
for k in range(len(xPOI)):
ind = 3600*xPOI[k]*fs # Get starting Index
# Add y of Points of Interest
yPOI.append(p[int(ind)])
# Zoomed Data
xdata = p
# Create a range of indexes across tView centered on the point selected
# Must take into account end conditions
if ind < int(tView/2)*fs:
indRange = np.arange(0, int(tView)*fs, 1)
elif ind > len(xdata) - int(tView/2)*fs - 1:
indRange = np.arange(len(xdata)-1-(tView)*fs, len(xdata)-1, 1)
else:
indRange = np.arange(int(ind - tView/2*fs), int(ind + tView/2*fs), 1)
# Loop through indRange and add data to respective points of interest
for i in indRange:
# Select even indexs to reduce animation time
if i%2 == 0:
zoomT[k].append(t_hr[i])
zoomP[k].append(p[i])
rollL = [roll[i], roll[i], (roll[i]+90), roll[i]+180, roll[i], roll[i]+180]
zoomR[k].append(rollL)
# fig = go.FigureWidget(make_subplots(
fig = go.Figure(make_subplots(
rows = 2, cols=2,
# Define what plot goes where and the type of plot
specs = [[{"rowspan":2},{"type":"polar"}],
[None, {}]]
)
)
# Add traces to Figure
trace = go.Scattergl(x=sT_hr, y=sP, mode='lines', name = "Depth")
fig.add_trace(trace, row = 1, col = 1)
fig.add_trace(go.Scattergl(x = xPOI, y = yPOI, mode = 'markers',
marker = dict(color = 'green', symbol = 'square', size = 10),
name = "Points of Interest"), row = 1, col = 1)
# Loop through points of interest and create traces for each for the subplots
for k in range(len(xPOI)):
nameK = "Depth of POI " + str(k)
# Polar Plot
fig.add_trace(go.Scatterpolar(r = [1, 0.25, 0.25, 0.25, 0.25, 1], theta = zoomR[k][0], mode = "lines", visible = False), row = 1, col = 2)
# Zoomed Depth Plot
fig.add_trace(go.Scattergl(x = zoomT[k], y = zoomP[k], mode = "lines", visible = False, name = nameK), row = 2, col = 2)
# Third Trace is for animation purposes
fig.add_trace(go.Scattergl(x= [], y = [], mode = 'markers', marker = dict(color="red", size = 10), visible = False), row = 2, col = 2)
'''
Update the layout of subplots
Have to go axis by axis
'''
# Update x-axis
fig.update_xaxes(title_text = "Time (hr)", row = 1, col = 1)
fig.update_xaxes(title_text = "Time (hr)", row = 1, col = 2)
# Update y-axis
fig.update_yaxes(title = "Depth (m)", autorange = "reversed", row = 1, col = 1)
fig.update_yaxes(title = "Depth (m)", autorange = "reversed", row = 2, col = 2)
# Create Button list based on POI
# Add the initial play button for animation
buttonList = [dict(label="Play",
method="animate",
args=[None]
)]
# Create a "visible" list for button creation based on number of POI
visibleList = [False]*(2 + 3*len(xPOI))
# The first two traces will always be visible
visibleList[0] = True
visibleList[1] = True
# Add a None button
buttonList.append(dict(label = 'None',
method = 'update',
args = [{'visible': visibleList},
{'title': 'No Selected Point',
'showlegend':True}]
))
# Loop through POI and add buttons
for k in range(len(xPOI)):
labelK = "POI " + str(k)
titleK = "POI " + str(k) + " Selected"
# Copy visible list
visibleListK = visibleList.copy()
# Create a list of the indexes of the traces associated with xPOI[k]
inds = [2+(3*k), 3+(3*k), 4+(3*k)]
# Flip visibilities to True
for i in inds:
visibleListK[i] = True
# Add button
buttonList.append(dict(label = labelK,
method = 'update',
args = [{'visible': visibleListK},
{'title': titleK,
'showlegend':True}]
))
# Update Entire Fig Layout
fig.update_layout(
title="Select Point of Interest",
# Add in buttons to select which point of interest to display
updatemenus=[dict(
type="buttons",
buttons = buttonList
)]
)
return [fig]
@genericLog
def plot_timeline(df: "data frame"):
"""
Note that this graph does not use xAxis nor indices params
Only there as part of graph standardization in creating
"""
# Pull Specific Variables
fs = df['fs'].tolist()[0]
head = df['Heading'].tolist()
p = df['Depth'].tolist()
roll = df['Roll'].tolist()
pitch = df['Pitch'].tolist()
# Calculate time
numData = len(p)
t = [x/fs for x in range(numData)]
t_hr = [x/3600 for x in t]
'''
Added code to reduce the lag of the Figure
'''
# Scaling Factor to reduce amount of data
# A factor of 10 will reduce the data for example from 50 Hz to 5 Hz
scale = 10
# Reduce Data
sP = sg.decimate(p,scale).copy()
sRoll = sg.decimate(roll,scale).copy()
sPitch = sg.decimate(pitch,scale).copy()
sHead = sg.decimate(head,scale).copy()
# Calculate time - Reduced
numData = len(sP)
sT = [x/(fs/scale) for x in range(numData)]
sT_hr = [x/3600 for x in sT]
# Make Widget Figure
fig = go.Figure(
make_subplots(
# Deifne dimensions of subplot
rows = 2, cols=1,
# Define what plot goes where and the type of plot
specs = [[{}],
[{}]],
shared_xaxes = True
)
)
# Create traces for the data and add to figure
fig.add_trace(go.Scattergl(x = sT_hr, y = sP, mode = "lines", name = "Depth"), row = 1, col = 1)
fig.add_trace(go.Scattergl(x = sT_hr, y = sHead, mode = "lines", name = "Head"), row = 2, col = 1)
fig.add_trace(go.Scattergl(x = sT_hr, y = sPitch, mode = "lines", name = "Pitch"), row = 2, col = 1)
fig.add_trace(go.Scattergl(x = sT_hr, y = sRoll, mode = "lines", name = "Roll" ), row = 2, col = 1)
# Update x-axis
fig.update_xaxes(title = "Time (hr)", rangeslider = dict(visible = True), row = 2, col = 1)
# Update y-axis
fig.update_yaxes(title = "Depth (m)", autorange = "reversed", row = 1, col = 1)
return [fig]
# ! skipping this because it did not work
@genericLog
def all_graphs_sep(df: "data frame", xAxis: List[int], indices: Tuple[int]):
startIndex, endIndex = indices
plots = make_subplots(rows=3, cols=2,
subplot_titles=("Heading", "Pitch",
"X Acceleration", "Roll", "Y Acceleration", "Z Acceleration"))
headData = df.Heading[startIndex:endIndex]
plots.add_trace(
go.Scatter(x=xAxis, y=headData, name = "heading"),
row=1, col=1
)
pitchData = df.Pitch[startIndex:endIndex]
plots.add_trace(
go.Scatter(x=xAxis, y=pitchData, name = "pitch"),
row=1, col=2
)
xAccelerationData = df["Accel_X"][startIndex:endIndex]
plots.add_trace(
go.Scatter(x=xAxis, y=xAccelerationData, name = "x-acceleration"),
row=2, col=1
)
rollData = df.Roll[startIndex:endIndex]
plots.add_trace(
go.Scatter(x=xAxis, y=rollData, name = "roll"),
row=2, col=2
)
yAccelerationData = df["Accel_Y"][startIndex:endIndex]
plots.add_trace(
go.Scatter(x=xAxis, y=yAccelerationData, name = "y-acceleration"),
row=3, col=1
)
zAccelerationData = df["Accel_Z"][startIndex:endIndex]
plots.add_trace(
go.Scatter(x=xAxis, y=zAccelerationData, name = "z-acceleration"),
row=3, col=2
)
plots.update_layout(height = 1500, width=1500, title_text="2D Plots")
return [plots]
@genericLog
def head(df: "data frame", xAxis: List[int], indices: Tuple[int]):
startIndex, endIndex = indices
headData = df.Heading[startIndex:endIndex]
graph = px.line(x=xAxis, y=headData, labels={'x':'Time', 'y':'Heading'})
return [graph]
@genericLog
def accelerationX(df: "data frame", xAxis: List[int], indices: Tuple[int]):
startIndex, endIndex = indices
xAccelerationData = df["Accel_X"][startIndex:endIndex]
graph = px.line(x=xAxis, y=xAccelerationData, labels={'x':'Time', 'y':'Acceleration in the x direction'})
return [graph]
@genericLog
def accelerationY(df: "data frame", xAxis: List[int], indices: Tuple[int]):
startIndex, endIndex = indices
yAccelerationData = df["Accel_Y"][startIndex:endIndex]
graph = px.line(x=xAxis, y=yAccelerationData, labels={'x':'Time', 'y':'Acceleration in the y direction'})
return [graph]
@genericLog
def accelerationZ(df: "data frame", xAxis: List[int], indices: Tuple[int]):
startIndex, endIndex = indices
zAccelerationData = df["Accel_Z"][startIndex:endIndex]
graph = px.line(x=xAxis, y=zAccelerationData, labels={'x':'Time', 'y':'Acceleration in the z direction'})
return [graph]
@genericLog
def pitch(df: "data frame", xAxis: List[int], indices: Tuple[int]):
startIndex, endIndex = indices
rollData = df.Roll[startIndex:endIndex]
graph1 = px.line(x=xAxis, y=rollData, labels={'x':'Time', 'y':'Roll'})
pitchData = df.Pitch[startIndex:endIndex]
graph2 = px.line(x=xAxis, y=pitchData, labels={'x':'Time', 'y':'Pitch'})
graph3 = go.Figure()
graph3.add_trace(go.Scatter(x=xAxis, y=pitchData,
mode='lines',
name='pitch'))
graph3.add_trace(go.Scatter(x=xAxis, y=rollData,
mode='lines',
name='roll'))
return [graph1, graph2, graph3]
"""
Three added Jerk plots
Input df has to be to from "...._calculations.csv"
"""
@genericLog
def jerkX(df: "data frame", xAxis: List[int], indices: Tuple[int]):
startIndex, endIndex = indices
xJerkData = df["Jerk_X"][startIndex:endIndex]
graph = px.line(x=xAxis, y=xJerkData, labels={'x':'Time', 'y':'Jerk in the x direction'})
return [graph]
@genericLog
def jerkY(df: "data frame", xAxis: List[int], indices: Tuple[int]):
startIndex, endIndex = indices
yJerkData = df["Jerk_Y"][startIndex:endIndex]
graph = px.line(x=xAxis, y=yJerkData, labels={'x':'Time', 'y':'Jerk in the y direction'})
return [graph]
@genericLog
def jerkZ(df: "data frame", xAxis: List[int], indices: Tuple[int]):
startIndex, endIndex = indices
zJerkData = df["Jerk_Z"][startIndex:endIndex]
graph = px.line(x=xAxis, y=zJerkData, labels={'x':'Time', 'y':'Jerk in the z direction'})
return [graph]
DATA_AXIS_INDICES_KWARG = kwargsHelper.getGrapherDataAxisIndicesKwarg()
PRECALC_AXIS_INDICES_KWARG = kwargsHelper.getGrapherPreCalcAxisIndicesKwarg()
DATA_FILE_KWARG = kwargsHelper.getGrapherDataFileKwarg()
GRAPHERS = {
DATA_AXIS_INDICES_KWARG: [(head, 'heading.html'),
(pitch, 'roll.html', 'pitch.html', 'pitchroll.html'), (accelerationX, 'xAccel.html'),
(accelerationY, 'yAccel.html'), (accelerationZ, 'zAccel.html')],
PRECALC_AXIS_INDICES_KWARG: [(jerkX,'xJerk.html'),
(jerkY,'yJerk.html'),(jerkZ,'zJerk.html')],
DATA_FILE_KWARG: [(plot_POI, 'plot_POI.html'), (plot_timeline, 'plot_timeline.html')]
} | [
"plotly.subplots.make_subplots",
"private.helpers.kwargsHelper.getGrapherDataFileKwarg",
"plotly.graph_objects.Scattergl",
"private.helpers.kwargsHelper.getGrapherPreCalcAxisIndicesKwarg",
"private.helpers.kwargsHelper.getGrapherDataAxisIndicesKwarg",
"private.logs.logDecorator.genericLog",
"plotly.express.line",
"plotly.graph_objects.Figure",
"plotly.graph_objects.Scatter",
"scipy.signal.decimate",
"plotly.graph_objects.Scatterpolar"
] | [((492, 528), 'private.logs.logDecorator.genericLog', 'logDecorator.genericLog', (['MODULE_NAME'], {}), '(MODULE_NAME)\n', (515, 528), False, 'from private.logs import logDecorator\n'), ((12853, 12898), 'private.helpers.kwargsHelper.getGrapherDataAxisIndicesKwarg', 'kwargsHelper.getGrapherDataAxisIndicesKwarg', ([], {}), '()\n', (12896, 12898), False, 'from private.helpers import kwargsHelper\n'), ((12928, 12976), 'private.helpers.kwargsHelper.getGrapherPreCalcAxisIndicesKwarg', 'kwargsHelper.getGrapherPreCalcAxisIndicesKwarg', ([], {}), '()\n', (12974, 12976), False, 'from private.helpers import kwargsHelper\n'), ((12995, 13033), 'private.helpers.kwargsHelper.getGrapherDataFileKwarg', 'kwargsHelper.getGrapherDataFileKwarg', ([], {}), '()\n', (13031, 13033), False, 'from private.helpers import kwargsHelper\n'), ((3216, 3271), 'plotly.graph_objects.Scattergl', 'go.Scattergl', ([], {'x': 'sT_hr', 'y': 'sP', 'mode': '"""lines"""', 'name': '"""Depth"""'}), "(x=sT_hr, y=sP, mode='lines', name='Depth')\n", (3228, 3271), True, 'from plotly import graph_objects as go\n'), ((8724, 8856), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(3)', 'cols': '(2)', 'subplot_titles': "('Heading', 'Pitch', 'X Acceleration', 'Roll', 'Y Acceleration',\n 'Z Acceleration')"}), "(rows=3, cols=2, subplot_titles=('Heading', 'Pitch',\n 'X Acceleration', 'Roll', 'Y Acceleration', 'Z Acceleration'))\n", (8737, 8856), False, 'from plotly.subplots import make_subplots\n'), ((10185, 10251), 'plotly.express.line', 'px.line', ([], {'x': 'xAxis', 'y': 'headData', 'labels': "{'x': 'Time', 'y': 'Heading'}"}), "(x=xAxis, y=headData, labels={'x': 'Time', 'y': 'Heading'})\n", (10192, 10251), True, 'from plotly import express as px\n'), ((10466, 10569), 'plotly.express.line', 'px.line', ([], {'x': 'xAxis', 'y': 'xAccelerationData', 'labels': "{'x': 'Time', 'y': 'Acceleration in the x direction'}"}), "(x=xAxis, y=xAccelerationData, labels={'x': 'Time', 'y':\n 'Acceleration in the x direction'})\n", (10473, 10569), True, 'from plotly import express as px\n'), ((10780, 10883), 'plotly.express.line', 'px.line', ([], {'x': 'xAxis', 'y': 'yAccelerationData', 'labels': "{'x': 'Time', 'y': 'Acceleration in the y direction'}"}), "(x=xAxis, y=yAccelerationData, labels={'x': 'Time', 'y':\n 'Acceleration in the y direction'})\n", (10787, 10883), True, 'from plotly import express as px\n'), ((11094, 11197), 'plotly.express.line', 'px.line', ([], {'x': 'xAxis', 'y': 'zAccelerationData', 'labels': "{'x': 'Time', 'y': 'Acceleration in the z direction'}"}), "(x=xAxis, y=zAccelerationData, labels={'x': 'Time', 'y':\n 'Acceleration in the z direction'})\n", (11101, 11197), True, 'from plotly import express as px\n'), ((11386, 11449), 'plotly.express.line', 'px.line', ([], {'x': 'xAxis', 'y': 'rollData', 'labels': "{'x': 'Time', 'y': 'Roll'}"}), "(x=xAxis, y=rollData, labels={'x': 'Time', 'y': 'Roll'})\n", (11393, 11449), True, 'from plotly import express as px\n'), ((11508, 11573), 'plotly.express.line', 'px.line', ([], {'x': 'xAxis', 'y': 'pitchData', 'labels': "{'x': 'Time', 'y': 'Pitch'}"}), "(x=xAxis, y=pitchData, labels={'x': 'Time', 'y': 'Pitch'})\n", (11515, 11573), True, 'from plotly import express as px\n'), ((11586, 11597), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (11595, 11597), True, 'from plotly import graph_objects as go\n'), ((12161, 12248), 'plotly.express.line', 'px.line', ([], {'x': 'xAxis', 'y': 'xJerkData', 'labels': "{'x': 'Time', 'y': 'Jerk in the x direction'}"}), "(x=xAxis, y=xJerkData, labels={'x': 'Time', 'y':\n 'Jerk in the x direction'})\n", (12168, 12248), True, 'from plotly import express as px\n'), ((12442, 12529), 'plotly.express.line', 'px.line', ([], {'x': 'xAxis', 'y': 'yJerkData', 'labels': "{'x': 'Time', 'y': 'Jerk in the y direction'}"}), "(x=xAxis, y=yJerkData, labels={'x': 'Time', 'y':\n 'Jerk in the y direction'})\n", (12449, 12529), True, 'from plotly import express as px\n'), ((12723, 12810), 'plotly.express.line', 'px.line', ([], {'x': 'xAxis', 'y': 'zJerkData', 'labels': "{'x': 'Time', 'y': 'Jerk in the z direction'}"}), "(x=xAxis, y=zJerkData, labels={'x': 'Time', 'y':\n 'Jerk in the z direction'})\n", (12730, 12810), True, 'from plotly import express as px\n'), ((2977, 3068), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(2)', 'cols': '(2)', 'specs': "[[{'rowspan': 2}, {'type': 'polar'}], [None, {}]]"}), "(rows=2, cols=2, specs=[[{'rowspan': 2}, {'type': 'polar'}], [\n None, {}]])\n", (2990, 3068), False, 'from plotly.subplots import make_subplots\n'), ((7555, 7623), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(2)', 'cols': '(1)', 'specs': '[[{}], [{}]]', 'shared_xaxes': '(True)'}), '(rows=2, cols=1, specs=[[{}], [{}]], shared_xaxes=True)\n', (7568, 7623), False, 'from plotly.subplots import make_subplots\n'), ((7910, 7965), 'plotly.graph_objects.Scattergl', 'go.Scattergl', ([], {'x': 'sT_hr', 'y': 'sP', 'mode': '"""lines"""', 'name': '"""Depth"""'}), "(x=sT_hr, y=sP, mode='lines', name='Depth')\n", (7922, 7965), True, 'from plotly import graph_objects as go\n'), ((8011, 8068), 'plotly.graph_objects.Scattergl', 'go.Scattergl', ([], {'x': 'sT_hr', 'y': 'sHead', 'mode': '"""lines"""', 'name': '"""Head"""'}), "(x=sT_hr, y=sHead, mode='lines', name='Head')\n", (8023, 8068), True, 'from plotly import graph_objects as go\n'), ((8114, 8173), 'plotly.graph_objects.Scattergl', 'go.Scattergl', ([], {'x': 'sT_hr', 'y': 'sPitch', 'mode': '"""lines"""', 'name': '"""Pitch"""'}), "(x=sT_hr, y=sPitch, mode='lines', name='Pitch')\n", (8126, 8173), True, 'from plotly import graph_objects as go\n'), ((8219, 8276), 'plotly.graph_objects.Scattergl', 'go.Scattergl', ([], {'x': 'sT_hr', 'y': 'sRoll', 'mode': '"""lines"""', 'name': '"""Roll"""'}), "(x=sT_hr, y=sRoll, mode='lines', name='Roll')\n", (8231, 8276), True, 'from plotly import graph_objects as go\n'), ((8986, 9033), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'xAxis', 'y': 'headData', 'name': '"""heading"""'}), "(x=xAxis, y=headData, name='heading')\n", (8996, 9033), True, 'from plotly import graph_objects as go\n'), ((9140, 9186), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'xAxis', 'y': 'pitchData', 'name': '"""pitch"""'}), "(x=xAxis, y=pitchData, name='pitch')\n", (9150, 9186), True, 'from plotly import graph_objects as go\n'), ((9306, 9369), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'xAxis', 'y': 'xAccelerationData', 'name': '"""x-acceleration"""'}), "(x=xAxis, y=xAccelerationData, name='x-acceleration')\n", (9316, 9369), True, 'from plotly import graph_objects as go\n'), ((9474, 9518), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'xAxis', 'y': 'rollData', 'name': '"""roll"""'}), "(x=xAxis, y=rollData, name='roll')\n", (9484, 9518), True, 'from plotly import graph_objects as go\n'), ((9638, 9701), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'xAxis', 'y': 'yAccelerationData', 'name': '"""y-acceleration"""'}), "(x=xAxis, y=yAccelerationData, name='y-acceleration')\n", (9648, 9701), True, 'from plotly import graph_objects as go\n'), ((9821, 9884), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'xAxis', 'y': 'zAccelerationData', 'name': '"""z-acceleration"""'}), "(x=xAxis, y=zAccelerationData, name='z-acceleration')\n", (9831, 9884), True, 'from plotly import graph_objects as go\n'), ((11619, 11679), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'xAxis', 'y': 'pitchData', 'mode': '"""lines"""', 'name': '"""pitch"""'}), "(x=xAxis, y=pitchData, mode='lines', name='pitch')\n", (11629, 11679), True, 'from plotly import graph_objects as go\n'), ((11750, 11808), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'xAxis', 'y': 'rollData', 'mode': '"""lines"""', 'name': '"""roll"""'}), "(x=xAxis, y=rollData, mode='lines', name='roll')\n", (11760, 11808), True, 'from plotly import graph_objects as go\n'), ((1125, 1146), 'scipy.signal.decimate', 'sg.decimate', (['p', 'scale'], {}), '(p, scale)\n', (1136, 1146), True, 'from scipy import signal as sg\n'), ((1165, 1189), 'scipy.signal.decimate', 'sg.decimate', (['roll', 'scale'], {}), '(roll, scale)\n', (1176, 1189), True, 'from scipy import signal as sg\n'), ((1209, 1234), 'scipy.signal.decimate', 'sg.decimate', (['pitch', 'scale'], {}), '(pitch, scale)\n', (1220, 1234), True, 'from scipy import signal as sg\n'), ((1253, 1277), 'scipy.signal.decimate', 'sg.decimate', (['head', 'scale'], {}), '(head, scale)\n', (1264, 1277), True, 'from scipy import signal as sg\n'), ((3750, 3852), 'plotly.graph_objects.Scatterpolar', 'go.Scatterpolar', ([], {'r': '[1, 0.25, 0.25, 0.25, 0.25, 1]', 'theta': 'zoomR[k][0]', 'mode': '"""lines"""', 'visible': '(False)'}), "(r=[1, 0.25, 0.25, 0.25, 0.25, 1], theta=zoomR[k][0], mode=\n 'lines', visible=False)\n", (3765, 3852), True, 'from plotly import graph_objects as go\n'), ((3925, 4002), 'plotly.graph_objects.Scattergl', 'go.Scattergl', ([], {'x': 'zoomT[k]', 'y': 'zoomP[k]', 'mode': '"""lines"""', 'visible': '(False)', 'name': 'nameK'}), "(x=zoomT[k], y=zoomP[k], mode='lines', visible=False, name=nameK)\n", (3937, 4002), True, 'from plotly import graph_objects as go\n'), ((7202, 7223), 'scipy.signal.decimate', 'sg.decimate', (['p', 'scale'], {}), '(p, scale)\n', (7213, 7223), True, 'from scipy import signal as sg\n'), ((7242, 7266), 'scipy.signal.decimate', 'sg.decimate', (['roll', 'scale'], {}), '(roll, scale)\n', (7253, 7266), True, 'from scipy import signal as sg\n'), ((7286, 7311), 'scipy.signal.decimate', 'sg.decimate', (['pitch', 'scale'], {}), '(pitch, scale)\n', (7297, 7311), True, 'from scipy import signal as sg\n'), ((7330, 7354), 'scipy.signal.decimate', 'sg.decimate', (['head', 'scale'], {}), '(head, scale)\n', (7341, 7354), True, 'from scipy import signal as sg\n')] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# TRV Controller © Autolog 2020 - 2022
# noinspection PyUnresolvedReferences, PyPep8Naming
try:
# noinspection PyUnresolvedReferences
import indigo
except ImportError:
pass
import collections
import datetime
import logging
import platform
import queue
import operator
import sys
import threading
import traceback
import xml.etree.ElementTree as eTree
from constants import *
from trvHandler import ThreadTrvHandler
from delayHandler import ThreadDelayHandler
from zwave_interpreter.zwave_interpreter import *
from zwave_interpreter.zwave_command_class_wake_up import *
from zwave_interpreter.zwave_command_class_switch_multilevel import *
ZW_THERMOSTAT_SETPOINT_SET = 0x01
ZW_THERMOSTAT_MODE_SET = 0x01
# noinspection PyPep8Naming
def convertListToHexStr(byteList):
return ' '.join(["%02X" % byte for byte in byteList])
# noinspection PyPep8Naming
def secondsFromHHMM(hhmm):
# Convert str('HH:MM' to INT(seconds))
hh = int(hhmm[0:2])
mm = int(hhmm[3:5])
seconds = (hh * 3600) + (mm * 60)
return seconds
# noinspection PyPep8Naming
def calculateSecondsUntilSchedulesRestated():
# Calculate number of seconds until five minutes after next midnight
tomorrow = datetime.datetime.now() + datetime.timedelta(1)
midnight = datetime.datetime(year=tomorrow.year, month=tomorrow.month, day=tomorrow.day, hour=0, minute=0, second=0)
secondsToMidnight = int((midnight - datetime.datetime.now()).seconds) # Seconds to midnight
# secondsSinceMidnight = (24 * 60 * 60) - secondsToMidnight
secondsInFiveMinutes = (5 * 60) # 5 minutes in seconds
secondsUntilSchedulesRestated = secondsToMidnight + secondsInFiveMinutes # Calculate number of seconds until 5 minutes after next midnight
# secondsUntilSchedulesRestated = 60 # TESTING = 1 Minute
return secondsUntilSchedulesRestated
# noinspection PyPep8Naming
class Plugin(indigo.PluginBase):
def __init__(self, pluginId, pluginDisplayName, pluginVersion, pluginPrefs):
indigo.PluginBase.__init__(self, pluginId, pluginDisplayName, pluginVersion, pluginPrefs)
# Initialise dictionary to store plugin Globals
self.globals = dict()
self.globals['zwave'] = dict()
self.globals['zwave']['addressToDevice'] = dict()
self.globals['zwave']['WatchList'] = set() # TRVs, Valves and Remotes associated with a TRV Controllers will get added to this SET on TRV Controller device start
self.globals['zwave']['node_to_device_name'] = dict()
# # Initialise Indigo plugin info
# self.globals[PLUGIN_INFO] = {}
# self.globals[PLUGIN_INFO][PLUGIN_ID] = plugin_id
# self.globals[PLUGIN_INFO][PLUGIN_DISPLAY_NAME] = plugin_display_name
# self.globals[PLUGIN_INFO][PLUGIN_VERSION] = plugin_version
# self.globals[PLUGIN_INFO][PATH] = indigo.server.getInstallFolderPath()
# self.globals[PLUGIN_INFO][API_VERSION] = indigo.server.apiVersion
# self.globals[PLUGIN_INFO][ADDRESS] = indigo.server.address
# Initialise Indigo plugin info
self.globals['pluginInfo'] = dict()
self.globals['pluginInfo']['pluginId'] = pluginId
self.globals['pluginInfo']['pluginDisplayName'] = pluginDisplayName
self.globals['pluginInfo']['pluginVersion'] = pluginVersion
self.globals['pluginInfo']['path'] = indigo.server.getInstallFolderPath() # e.g. '/Library/Application Support/Perceptive Automation/Indigo 7.2'
self.globals['pluginInfo']['apiVersion'] = indigo.server.apiVersion
self.globals['pluginInfo']['address'] = indigo.server.address
# Initialise dictionary for debug in plugin Globals
self.globals['debug'] = dict()
self.globals['debug']['general'] = logging.INFO # For general debugging of the main thread
self.globals['debug']['trvHandler'] = logging.INFO # For debugging TRV handler thread
self.globals['debug']['delayHandler'] = logging.INFO # For debugging Delay handler thread
self.globals['debug']['polling'] = logging.INFO # For polling debugging
self.globals['debug']['previousGeneral'] = logging.INFO # For general debugging of the main thread
self.globals['debug']['previousTrvHandler'] = logging.INFO # For debugging TRV handler thread
self.globals['debug']['previousDelayHandler'] = logging.INFO # For debugging Delay handler thread
self.globals['debug']['previousPolling'] = logging.INFO # For polling debugging
# Setup Logging - Logging info:
# self.indigo_log_handler - writes log messages to Indigo Event Log
# self.plugin_file_handler - writes log messages to the plugin log
log_format = logging.Formatter("%(asctime)s.%(msecs)03d\t%(levelname)-12s\t%(name)s.%(funcName)-25s %(msg)s", datefmt="%Y-%m-%d %H:%M:%S")
self.plugin_file_handler.setFormatter(log_format)
self.plugin_file_handler.setLevel(K_LOG_LEVEL_INFO) # Logging Level for plugin log file
self.indigo_log_handler.setLevel(K_LOG_LEVEL_INFO) # Logging level for Indigo Event Log
self.logger = logging.getLogger("Plugin.TRV")
# Now logging is set-up, output Initialising Message
startup_message_ui = "\n" # Start with a line break
startup_message_ui += f"{' Initialising TRV Controller Plugin Plugin ':={'^'}130}\n"
startup_message_ui += f"{'Plugin Name:':<31} {self.globals['pluginInfo']['pluginDisplayName']}\n"
startup_message_ui += f"{'Plugin Version:':<31} {self.globals['pluginInfo']['pluginVersion']}\n"
startup_message_ui += f"{'Plugin ID:':<31} {self.globals['pluginInfo']['pluginId']}\n"
startup_message_ui += f"{'Indigo Version:':<31} {indigo.server.version}\n"
startup_message_ui += f"{'Indigo License:':<31} {indigo.server.licenseStatus}\n"
startup_message_ui += f"{'Indigo API Version:':<31} {indigo.server.apiVersion}\n"
machine = platform.machine()
startup_message_ui += f"{'Architecture:':<31} {machine}\n"
sys_version = sys.version.replace("\n", "")
startup_message_ui += f"{'Python Version:':<31} {sys_version}\n"
startup_message_ui += f"{'Mac OS Version:':<31} {platform.mac_ver()[0]}\n"
startup_message_ui += f"{'':={'^'}130}\n"
self.logger.info(startup_message_ui)
# Initialise dictionary to store configuration info
self.globals['config'] = dict()
# Initialise dictionary to store internal details about TRV Controller devices
self.globals['trvc'] = dict()
# Initialise dictionary to store internal details about heating (Boiler) devices and variables
self.globals['heaterDevices'] = dict()
self.globals['heaterVariables'] = dict()
# Initialise dictionary to store message queues
self.globals['queues'] = dict()
self.globals['queues']['trvHandler'] = dict()
self.globals['queues']['delay'] = dict()
self.globals['queues']['initialised'] = False
# Initialise dictionary to store heating schedules
self.globals['schedules'] = dict()
# Initialise count of device updates detected - used for debugging purposes
self.globals['deviceUpdatedSequenceCount'] = 0
# Initialise dictionary to store timers
self.globals['timers'] = dict()
self.globals['timers']['heaters'] = dict()
self.globals['timers']['heatingSchedules'] = dict()
self.globals['timers']['command'] = dict()
self.globals['timers']['SpiritPolling'] = dict()
self.globals['timers']['SpiritValveCommands'] = dict()
self.globals['timers']['advanceCancel'] = dict()
self.globals['timers']['boost'] = dict()
self.globals['timers']['raceCondition'] = dict()
self.globals['timers']['zwaveWakeupCheck'] = dict()
# Initialise dictionary to store threads
self.globals['threads'] = dict()
self.globals['threads']['polling'] = dict() # There is only one 'polling' thread for all TRV devices
self.globals['threads']['trvHandler'] = dict() # There is only one 'trvHandler' thread for all TRV devices
self.globals['threads']['delayHandler'] = dict() # There is only one 'delayHandler' thread for all TRV devices
self.globals['threads']['runConcurrentActive'] = False
self.globals['lock'] = threading.Lock()
self.globals['devicesToTrvControllerTable'] = dict()
# Initialise dictionary for constants
self.globals['constant'] = dict()
self.globals['constant']['defaultDatetime'] = datetime.datetime.strptime('2000-01-01', '%Y-%m-%d')
# Setup dictionary of supported TRV models
xmlFile = f'{self.globals["pluginInfo"]["path"]}/Plugins/TRV.indigoPlugin/Contents/Resources/supportedThermostatModels.xml'
tree = eTree.parse(xmlFile)
root = tree.getroot()
self.globals['supportedTrvModels'] = dict()
for model in root.findall('model'):
trv_model_name = model.get('name')
self.globals['supportedTrvModels'][trv_model_name] = dict()
self.globals['supportedTrvModels'][trv_model_name]['supportsWakeup'] = bool(True if model.find('supportsWakeup').text == 'true' else False)
self.globals['supportedTrvModels'][trv_model_name]['supportsTemperatureReporting'] = bool(True if model.find('supportsTemperatureReporting').text == 'true' else False)
self.globals['supportedTrvModels'][trv_model_name]['supportsHvacOnOff'] = bool(True if model.find('supportsHvacOnOff').text == 'true' else False)
self.globals['supportedTrvModels'][trv_model_name]['supportsValveControl'] = bool(True if model.find('supportsValveControl').text == 'true' else False)
self.globals['supportedTrvModels'][trv_model_name]['supportsManualSetpoint'] = bool(True if model.find('supportsManualSetpoint').text == 'true' else False)
self.globals['supportedTrvModels'][trv_model_name]['setpointHeatMinimum'] = float(model.find('setpointHeatMinimum').text)
self.globals['supportedTrvModels'][trv_model_name]['setpointHeatMaximum'] = float(model.find('setpointHeatMaximum').text)
# self.logger.error(f'XML [SUPPORTED TRV MODEL] =\n{self.globals["supportedTrvModels"][trv_model_name]}')
# Setup dictionary of fully supported Heat Source Controller Devices
xmlFile = f'{self.globals["pluginInfo"]["path"]}/Plugins/TRV.indigoPlugin/Contents/Resources/supportedHeatSourceControllers.xml'
tree = eTree.parse(xmlFile)
root = tree.getroot()
self.globals['supportedHeatSourceControllers'] = dict()
for model in root.findall('model'):
heat_source_controller_model_name = model.get('name')
self.globals['supportedHeatSourceControllers'][heat_source_controller_model_name] = ''
# self.logger.error(f'XML [SUPPORTED HEAT SOURCE CONTROLLER] =\n{heat_source_controller_model_name}')
# Set Plugin Config Values
self.closedPrefsConfigUi(pluginPrefs, False)
# TODO: Remove below as actioned in startup method - 18-March-2022
#self.zwi = ZwaveInterpreter(self.logger, indigo.devices) # Instantiate and initialise Z-Wave Interpreter Class
# Instantiate and initialise Z-Wave Interpreter Class
# self.zwi = ZwaveInterpreter(self.exception_handler, self.logger, indigo.devices) # noqa [Defined outside __init__] Instantiate and initialise Z-Wave Interpreter Object
def __del__(self):
indigo.PluginBase.__del__(self)
def exception_handler(self, exception_error_message, log_failing_statement):
filename, line_number, method, statement = traceback.extract_tb(sys.exc_info()[2])[-1]
module = filename.split('/')
log_message = f"'{exception_error_message}' in module '{module[-1]}', method '{method}'"
if log_failing_statement:
log_message = log_message + f"\n Failing statement [line {line_number}]: '{statement}'"
else:
log_message = log_message + f" at line {line_number}"
self.logger.error(log_message)
def actionControlThermostat(self, action, dev):
self.logger.debug(f' Thermostat \'{dev.name}\', Action received: \'{action.description}\'')
self.logger.debug(f'... Action details:\n{action}\n')
trvCtlrDevId = dev.id
# ##### SET HVAC MODE ######
if action.thermostatAction == indigo.kThermostatAction.SetHvacMode:
hvacMode = action.actionMode
if hvacMode == HVAC_COOL or hvacMode == HVAC_AUTO: # Don't allow HVAC Mode of Cool or Auto
self.logger.error(f'TRV Controller \'{dev.name}\' does not support action \'{action.description}\' - request ignored')
else:
# dev.updateStateOnServer('hvacOperationMode', action.actionMode)
queuedCommand = CMD_UPDATE_TRV_CONTROLLER_STATES
updateList = dict()
updateList[UPDATE_CONTROLLER_HVAC_OPERATION_MODE] = hvacMode
updateList[UPDATE_CONTROLLER_MODE] = CONTROLLER_MODE_UI
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, self.globals['deviceUpdatedSequenceCount'], queuedCommand, trvCtlrDevId, [updateList, ]])
# ###### DECREASE HEAT SETPOINT ######
elif action.thermostatAction == indigo.kThermostatAction.DecreaseHeatSetpoint:
newSetpoint = dev.heatSetpoint - action.actionValue
if newSetpoint < float(self.globals['trvc'][dev.id]['setpointHeatMinimum']):
if dev.heatSetpoint > float(self.globals['trvc'][dev.id]['setpointHeatMinimum']):
newSetpoint = float(self.globals['trvc'][dev.id]['setpointHeatMinimum'])
else:
self.logger.info(f'TRV Controller \'{dev.name}\' Minimum Heat Setpoint is \'{self.globals["trvc"][dev.id]["setpointHeatMinimum"]}\' - Decrease Heat Setpoint request ignored')
return
# keyValueList = [
# {'key': 'controllerMode', 'value': CONTROLLER_MODE_UI},
# {'key': 'controllerModeUi', 'value': CONTROLLER_MODE_TRANSLATION[CONTROLLER_MODE_UI]},
# {'key': 'setpointHeat', 'value': newSetpoint}
# ]
# dev.updateStatesOnServer(keyValueList)
queuedCommand = CMD_UPDATE_TRV_CONTROLLER_STATES
updateList = dict()
updateList[UPDATE_CONTROLLER_HEAT_SETPOINT] = newSetpoint
updateList[UPDATE_CONTROLLER_MODE] = CONTROLLER_MODE_UI
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, self.globals['deviceUpdatedSequenceCount'], queuedCommand, trvCtlrDevId, [updateList, ]])
# ###### INCREASE HEAT SETPOINT ######
elif action.thermostatAction == indigo.kThermostatAction.IncreaseHeatSetpoint:
newSetpoint = dev.heatSetpoint + action.actionValue
if newSetpoint > float(self.globals['trvc'][dev.id]['setpointHeatMaximum']):
if dev.heatSetpoint < float(self.globals['trvc'][dev.id]['setpointHeatMaximum']):
newSetpoint = float(self.globals['trvc'][dev.id]['setpointHeatMaximum'])
else:
self.logger.info(f'TRV Controller \'{dev.name}\' Maximum Heat Setpoint is \'{self.globals["trvc"][dev.id]["setpointHeatMaximum"]}\' - Increase Heat Setpoint request ignored')
return
# keyValueList = [
# {'key': 'controllerMode', 'value': CONTROLLER_MODE_UI},
# {'key': 'controllerModeUi', 'value': CONTROLLER_MODE_TRANSLATION[CONTROLLER_MODE_UI]},
# {'key': 'setpointHeat', 'value': newSetpoint}
# ]
# dev.updateStatesOnServer(keyValueList)
queuedCommand = CMD_UPDATE_TRV_CONTROLLER_STATES
updateList = dict()
updateList[UPDATE_CONTROLLER_HEAT_SETPOINT] = newSetpoint
updateList[UPDATE_CONTROLLER_MODE] = CONTROLLER_MODE_UI
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, self.globals['deviceUpdatedSequenceCount'], queuedCommand, trvCtlrDevId, [updateList, ]])
# ###### SET HEAT SETPOINT ######
elif action.thermostatAction == indigo.kThermostatAction.SetHeatSetpoint:
newSetpoint = action.actionValue
# keyValueList = [
# {'key': 'controllerMode', 'value': CONTROLLER_MODE_UI},
# {'key': 'controllerModeUi', 'value': CONTROLLER_MODE_TRANSLATION[CONTROLLER_MODE_UI]},
# {'key': 'setpointHeat', 'value': newSetpoint}
# ]
# dev.updateStatesOnServer(keyValueList)
queuedCommand = CMD_UPDATE_TRV_CONTROLLER_STATES
updateList = dict()
updateList[UPDATE_CONTROLLER_HEAT_SETPOINT] = newSetpoint
updateList[UPDATE_CONTROLLER_MODE] = CONTROLLER_MODE_UI
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, self.globals['deviceUpdatedSequenceCount'], queuedCommand, trvCtlrDevId, [updateList, ]])
# ###### REQUEST STATUS ALL ETC ######
elif action.thermostatAction in [indigo.kThermostatAction.RequestStatusAll, indigo.kThermostatAction.RequestMode,
indigo.kThermostatAction.RequestEquipmentState, indigo.kThermostatAction.RequestTemperatures, indigo.kThermostatAction.RequestHumidities,
indigo.kThermostatAction.RequestDeadbands, indigo.kThermostatAction.RequestSetpoints]:
if self.globals['trvc'][action.deviceId]['trvDevId'] != 0:
indigo.device.statusRequest(self.globals['trvc'][action.deviceId]['trvDevId'])
if self.globals['trvc'][action.deviceId]['remoteDevId'] != 0:
indigo.device.statusRequest(self.globals['trvc'][action.deviceId]['remoteDevId'])
else:
self.logger.error(f'Unknown Action for TRV Controller \'{dev.name}\': Action \'{action.description}\' Ignored')
# noinspection PyUnusedLocal
def closedDeviceConfigUi(self, valuesDict, userCancelled, typeId, trvCtlrDevId):
# valuesDict, typeId, trvCtlrDevId arguments are not used
try:
self.logger.debug(f'\'closedDeviceConfigUi\' called with userCancelled = {str(userCancelled)}')
if userCancelled:
return
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def closedPrefsConfigUi(self, valuesDict, userCancelled):
try:
self.logger.threaddebug(f'\'closePrefsConfigUi\' called with userCancelled = {str(userCancelled)}')
if userCancelled:
return
self.globals['config']['disableHeatSourceDeviceListFilter'] = valuesDict.get('disableHeatSourceDeviceListFilter', False)
# Delay Queue Options
self.globals['config']['delayQueueSeconds'] = int(valuesDict.get("delayQueueSeconds", 0))
# CSV File Handling (for e.g. Matplotlib plugin)
self.globals['config']['csvStandardEnabled'] = valuesDict.get("csvStandardEnabled", False)
self.globals['config']['csvPostgresqlEnabled'] = valuesDict.get("csvPostgresqlEnabled", False)
self.globals['config']['postgresqlUser'] = valuesDict.get("postgresqlUser", '')
self.globals['config']['postgresqlPassword'] = valuesDict.get("postgresqlPassword", '')
self.globals['config']['datagraphCliPath'] = valuesDict.get("datagraphCliPath", '')
self.globals['config']['datagraphGraphTemplatesPath'] = valuesDict.get("datagraphGraphTemplatesPath", '')
self.globals['config']['datagraphImagesPath'] = valuesDict.get("datagraphImagesPath", '')
self.globals['config']['csvPath'] = valuesDict.get("csvPath", '')
self.globals['config']['csvPrefix'] = valuesDict.get("csvPrefix", 'TRV_Plugin')
# Create TRV Variable folder name (if required)
self.globals['config']['trvVariableFolderName'] = valuesDict.get("trvVariableFolderName", 'TRV')
if self.globals['config']['trvVariableFolderName'] == '':
self.globals['config']['trvVariableFolderId'] = 0 # Not required
else:
if self.globals['config']['trvVariableFolderName'] not in indigo.variables.folders:
indigo.variables.folder.create(self.globals['config']['trvVariableFolderName'])
self.globals['config']['trvVariableFolderId'] = indigo.variables.folders.getId(self.globals['config']['trvVariableFolderName'])
# Check monitoring / debug / filtered IP address options
# Get required Event Log and Plugin Log logging levels
plugin_log_level = int(valuesDict.get("pluginLogLevel", K_LOG_LEVEL_INFO))
event_log_level = int(valuesDict.get("eventLogLevel", K_LOG_LEVEL_INFO))
# Ensure following logging level messages are output
self.indigo_log_handler.setLevel(K_LOG_LEVEL_INFO)
self.plugin_file_handler.setLevel(K_LOG_LEVEL_INFO)
# Output required logging levels and TP Message Monitoring requirement to logs
self.logger.info(f"Logging to Indigo Event Log at the '{K_LOG_LEVEL_TRANSLATION[event_log_level]}' level")
self.logger.info(f"Logging to Plugin Event Log at the '{K_LOG_LEVEL_TRANSLATION[plugin_log_level]}' level")
# Now set required logging levels
self.indigo_log_handler.setLevel(event_log_level)
self.plugin_file_handler.setLevel(plugin_log_level)
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def deviceStartComm(self, trvcDev):
try:
trvCtlrDevId = trvcDev.id
# Following code makes sure that the polling Sequence is maintained across Device start
# pollingSequence = 0
# if trvCtlrDevId in self.globals['trvc']:
# if 'pollingSequence' in self.globals['trvc'][trvCtlrDevId]:
# pollingSequence = self.globals['trvc'][trvCtlrDevId]['pollingSequence']
self.globals['trvc'][trvCtlrDevId] = dict()
# self.globals['trvc'][trvCtlrDevId]['pollingSequence'] = pollingSequence
self.globals['trvc'][trvCtlrDevId]['deviceStarted'] = False
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector'] = dict()
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['trvController'] = dict()
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['trv'] = dict()
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['remote'] = dict()
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['radiator'] = dict()
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['trvController']['updateSecondsSinceMidnight'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['trvController']['updatesInLastSecond'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['trvController']['updatesInLastSecondMaximum'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['trv']['updateSecondsSinceMidnight'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['trv']['updatesInLastSecond'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['trv']['updatesInLastSecondMaximum'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['remote']['updateSecondsSinceMidnight'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['remote']['updatesInLastSecond'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['remote']['updatesInLastSecondMaximum'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['radiator']['updateSecondsSinceMidnight'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['radiator']['updatesInLastSecond'] = 0
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector']['radiator']['updatesInLastSecondMaximum'] = 0
if (trvcDev.pluginProps.get('version', '0.0')) != self.globals['pluginInfo']['pluginVersion']:
pluginProps = trvcDev.pluginProps
pluginProps["version"] = self.globals['pluginInfo']['pluginVersion']
trvcDev.replacePluginPropsOnServer(pluginProps)
return
currentTime = indigo.server.getTime()
trvcDev.stateListOrDisplayStateIdChanged() # Ensure latest devices.xml is being used
self.globals['trvc'][trvCtlrDevId]['lastSuccessfulComm'] = 'N/A'
self.globals['trvc'][trvCtlrDevId]['lastSuccessfulCommTrv'] = 'N/A'
self.globals['trvc'][trvCtlrDevId]['lastSuccessfulCommRemote'] = 'N/A'
self.globals['trvc'][trvCtlrDevId]['eventReceivedCountRemote'] = 0
self.globals['trvc'][trvCtlrDevId]['lastSuccessfulCommRadiator'] = 'N/A'
self.globals['trvc'][trvCtlrDevId]['eventReceivedCountRadiator'] = 0
self.globals['trvc'][trvCtlrDevId]['hideTempBroadcast'] = bool(trvcDev.pluginProps.get('hideTempBroadcast', False)) # Hide Temperature Broadcast in Event Log Flag
self.globals['trvc'][trvCtlrDevId]['trvDevId'] = int(trvcDev.pluginProps.get('trvDevId', 0)) # ID of TRV device
# self.globals['trvc'][trvCtlrDevId]['trvDeltaMax'] = float(trvcDev.pluginProps.get('trvDeltaMax', 0.0))
self.globals['trvc'][trvCtlrDevId]['valveDevId'] = 0
self.globals['trvc'][trvCtlrDevId]['valvePercentageOpen'] = 0
self.globals['trvc'][trvCtlrDevId]['csvCreationMethod'] = 0
self.globals['trvc'][trvCtlrDevId]['csvStandardMode'] = 1
self.globals['trvc'][trvCtlrDevId]['updateCsvFile'] = False
self.globals['trvc'][trvCtlrDevId]['updateAllCsvFiles'] = False
self.globals['trvc'][trvCtlrDevId]['updateAllCsvFilesViaPostgreSQL'] = False
self.globals['trvc'][trvCtlrDevId]['updateDatagraphCsvFileViaPostgreSQL'] = False
self.globals['trvc'][trvCtlrDevId]['csvCreationMethod'] = int(trvcDev.pluginProps.get('csvCreationMethod', 0))
if self.globals['config']['csvStandardEnabled']:
if self.globals['trvc'][trvCtlrDevId]['csvCreationMethod'] == 1:
self.globals['trvc'][trvCtlrDevId]['updateCsvFile'] = True
if self.globals['trvc'][trvCtlrDevId]['csvStandardMode'] == 2:
self.globals['trvc'][trvCtlrDevId]['updateAllCsvFiles'] = True
if self.globals['config']['csvPostgresqlEnabled']:
if self.globals['trvc'][trvCtlrDevId]['csvCreationMethod'] == 2 or self.globals['trvc'][trvCtlrDevId]['csvCreationMethod'] == 3:
if self.globals['trvc'][trvCtlrDevId]['csvCreationMethod'] == 2:
self.globals['trvc'][trvCtlrDevId]['updateAllCsvFilesViaPostgreSQL'] = True
else:
self.globals['trvc'][trvCtlrDevId]['updateDatagraphCsvFileViaPostgreSQL'] = True
self.globals['trvc'][trvCtlrDevId]['postgresqlUser'] = self.globals['config']['postgresqlUser']
self.globals['trvc'][trvCtlrDevId]['postgresqlPassword'] = self.globals['config']['postgresqlPassword']
self.globals['trvc'][trvCtlrDevId]['csvShortName'] = trvcDev.pluginProps.get('csvShortName', '')
self.globals['trvc'][trvCtlrDevId]['csvRetentionPeriodHours'] = int(trvcDev.pluginProps.get('csvRetentionPeriodHours', 24))
self.globals['trvc'][trvCtlrDevId]['pollingScheduleActive'] = float(int(trvcDev.pluginProps.get('pollingScheduleActive', 5)) * 60.0)
self.globals['trvc'][trvCtlrDevId]['pollingScheduleInactive'] = float(int(trvcDev.pluginProps.get('pollingScheduleInactive', 20)) * 60.0)
self.globals['trvc'][trvCtlrDevId]['pollingSchedulesNotEnabled'] = float(int(trvcDev.pluginProps.get('pollingSchedulesNotEnabled', 30)) * 60.0)
self.globals['trvc'][trvCtlrDevId]['pollingBoostEnabled'] = float(int(trvcDev.pluginProps.get('pollingBoostEnabled', 5)) * 60.0)
self.globals['trvc'][trvCtlrDevId]['pollingSeconds'] = 0.0
self.globals['trvc'][trvCtlrDevId]['advancedOption'] = ADVANCED_OPTION_NONE
self.globals['trvc'][trvCtlrDevId]['enableTrvOnOff'] = False
if self.globals['trvc'][trvCtlrDevId]['trvDevId'] != 0:
if trvcDev.address != indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].address:
pluginProps = trvcDev.pluginProps
pluginProps["address"] = indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].address
trvcDev.replacePluginPropsOnServer(pluginProps)
return
self.globals['trvc'][trvCtlrDevId]['supportsHvacOnOff'] = bool(trvcDev.pluginProps.get('supportsHvacOnOff', False))
if self.globals['trvc'][trvCtlrDevId]['supportsHvacOnOff']:
self.globals['trvc'][trvCtlrDevId]['enableTrvOnOff'] = bool(trvcDev.pluginProps.get('enableTrvOnOff', False))
self.globals['trvc'][trvCtlrDevId]['trvSupportsManualSetpoint'] = bool(trvcDev.pluginProps.get('supportsManualSetpoint', False))
self.globals['trvc'][trvCtlrDevId]['trvSupportsTemperatureReporting'] = bool(trvcDev.pluginProps.get('supportsTemperatureReporting', False))
self.logger.debug(
f'TRV SUPPORTS TEMPERATURE REPORTING: \'{indigo.devices[self.globals["trvc"][trvCtlrDevId]["trvDevId"]].name}\' = {self.globals["trvc"][trvCtlrDevId]["trvSupportsTemperatureReporting"]} ')
self.globals['zwave']['addressToDevice'][int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].address)] = dict()
self.globals['zwave']['addressToDevice'][int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].address)]['devId'] = self.globals['trvc'][trvCtlrDevId]['trvDevId']
self.globals['zwave']['addressToDevice'][int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].address)]['type'] = TRV
self.globals['zwave']['addressToDevice'][int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].address)]['trvcId'] = trvCtlrDevId
self.globals['zwave']['WatchList'].add(int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].address))
for dev in indigo.devices:
if dev.address == trvcDev.address and dev.id != self.globals['trvc'][trvCtlrDevId]['trvDevId']:
if dev.model == 'Thermostat (Spirit)':
advancedOption = int(trvcDev.pluginProps.get('advancedOption', ADVANCED_OPTION_NOT_SET))
if advancedOption == ADVANCED_OPTION_NOT_SET:
valveAssistance = bool(trvcDev.pluginProps.get('valveAssistance', True))
if valveAssistance:
advancedOption = ADVANCED_OPTION_VALVE_ASSISTANCE
else:
advancedOption = ADVANCED_OPTION_NONE
self.globals['trvc'][trvCtlrDevId]['advancedOption'] = advancedOption
if advancedOption == ADVANCED_OPTION_FIRMWARE_WORKAROUND or advancedOption == ADVANCED_OPTION_VALVE_ASSISTANCE:
self.globals['trvc'][trvCtlrDevId]['valveDevId'] = dev.id
self.globals['trvc'][trvCtlrDevId]['valvePercentageOpen'] = int(dev.states['brightnessLevel'])
# advancedOptionUi = ''
if (self.globals['trvc'][trvCtlrDevId]['advancedOption'] == ADVANCED_OPTION_FIRMWARE_WORKAROUND
or self.globals['trvc'][trvCtlrDevId]['advancedOption'] == ADVANCED_OPTION_VALVE_ASSISTANCE):
advancedOptionUi = ADVANCED_OPTION_UI[self.globals['trvc'][trvCtlrDevId]['advancedOption']]
self.logger.debug(
f'Found Valve device for \'{trvcDev.name}\': \'{dev.name}\' - Valve percentage open = {self.globals["trvc"][trvCtlrDevId]["valvePercentageOpen"]}% [{advancedOptionUi}]')
else:
# Work out how to handle this error situation !!!
return
self.globals['schedules'][trvCtlrDevId] = dict()
self.globals['schedules'][trvCtlrDevId]['default'] = dict() # setup from device configuration
self.globals['schedules'][trvCtlrDevId]['running'] = dict() # based on 'default' and potentially modified by change schedule actions
self.globals['schedules'][trvCtlrDevId]['dynamic'] = dict() # based on 'running' and potentially modified in response to Boost / Advance / Extend actions
self.globals['trvc'][trvCtlrDevId]['radiatorDevId'] = 0 # Assume no radiator temperature monitoring
self.globals['trvc'][trvCtlrDevId]['radiatorMonitoringEnabled'] = bool(trvcDev.pluginProps.get('radiatorMonitoringEnabled', False))
if self.globals['trvc'][trvCtlrDevId]['radiatorMonitoringEnabled']:
self.globals['trvc'][trvCtlrDevId]['radiatorDevId'] = int(trvcDev.pluginProps.get('radiatorDevId', 0)) # ID of Radiator Temperature Sensor device
self.globals['trvc'][trvCtlrDevId]['remoteDevId'] = 0 # Assume no remote thermostat control
self.globals['trvc'][trvCtlrDevId]['remoteThermostatControlEnabled'] = bool(trvcDev.pluginProps.get('remoteThermostatControlEnabled', False))
if self.globals['trvc'][trvCtlrDevId]['remoteThermostatControlEnabled']:
self.globals['trvc'][trvCtlrDevId]['remoteDevId'] = int(trvcDev.pluginProps.get('remoteDevId', 0)) # ID of Remote Thermostat device
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] != 0:
if indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].protocol == indigo.kProtocol.ZWave:
self.globals['zwave']['addressToDevice'][int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].address)] = dict()
self.globals['zwave']['addressToDevice'][int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].address)]['devId'] = self.globals['trvc'][trvCtlrDevId][
'remoteDevId']
self.globals['zwave']['addressToDevice'][int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].address)]['type'] = REMOTE
self.globals['zwave']['addressToDevice'][int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].address)]['trvcId'] = trvCtlrDevId
self.globals['zwave']['WatchList'].add(int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].address))
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] != 0 and self.globals['trvc'][trvCtlrDevId]['trvSupportsTemperatureReporting']:
if trvcDev.pluginProps.get('NumTemperatureInputs', 0) != 2:
pluginProps = trvcDev.pluginProps
pluginProps["NumTemperatureInputs"] = 2
trvcDev.replacePluginPropsOnServer(pluginProps)
return
else:
if trvcDev.pluginProps.get('NumTemperatureInputs', 0) != 1:
pluginProps = trvcDev.pluginProps
pluginProps["NumTemperatureInputs"] = 1
trvcDev.replacePluginPropsOnServer(pluginProps)
return
self.globals['trvc'][trvCtlrDevId]['trvSupportsHvacOperationMode'] = bool(indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].supportsHvacOperationMode)
self.logger.debug(
f'TRV \'{indigo.devices[self.globals["trvc"][trvCtlrDevId]["trvDevId"]].name}\' supports HVAC Operation Mode = {self.globals["trvc"][trvCtlrDevId]["trvSupportsHvacOperationMode"]}')
self.globals['trvc'][trvCtlrDevId]['heatingId'] = int(trvcDev.pluginProps.get('heatingId', 0)) # ID of Heat Source Controller device
if self.globals['trvc'][trvCtlrDevId]['heatingId'] != 0 and self.globals['trvc'][trvCtlrDevId]['heatingId'] not in self.globals['heaterDevices'].keys():
self.globals['heaterDevices'][self.globals['trvc'][trvCtlrDevId]['heatingId']] = dict()
self.globals['heaterDevices'][self.globals['trvc'][trvCtlrDevId]['heatingId']][
'thermostatsCallingForHeat'] = set() # A set of TRVs calling for heat from this heat source [None at the moment]
self.globals['heaterDevices'][self.globals['trvc'][trvCtlrDevId]['heatingId']]['heaterControlType'] = HEAT_SOURCE_NOT_FOUND # Default to No Heating Source
dev = indigo.devices[self.globals['trvc'][trvCtlrDevId]['heatingId']]
if 'hvacOperationMode' in dev.states:
self.globals['heaterDevices'][self.globals['trvc'][trvCtlrDevId]['heatingId']]['heaterControlType'] = HEAT_SOURCE_CONTROL_HVAC # hvac
self.globals['heaterDevices'][self.globals['trvc'][trvCtlrDevId]['heatingId']]['onState'] = HEAT_SOURCE_INITIALISE
elif 'onOffState' in dev.states:
self.globals['heaterDevices'][self.globals['trvc'][trvCtlrDevId]['heatingId']]['heaterControlType'] = HEAT_SOURCE_CONTROL_RELAY # relay device
self.globals['heaterDevices'][self.globals['trvc'][trvCtlrDevId]['heatingId']]['onState'] = HEAT_SOURCE_INITIALISE
else:
indigo.server.error(f'Error detected by TRV Plugin for device [{trvcDev.name}] - Unknown Heating Source Device Type with Id: {self.globals["trvc"][trvCtlrDevId]["heatingId"]}')
if self.globals['heaterDevices'][self.globals['trvc'][trvCtlrDevId]['heatingId']]['heaterControlType'] != HEAT_SOURCE_NOT_FOUND:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, 0, CMD_KEEP_HEAT_SOURCE_CONTROLLER_ALIVE, None, [self.globals['trvc'][trvCtlrDevId]['heatingId'], ]])
self.globals['trvc'][trvCtlrDevId]['heatingVarId'] = int(trvcDev.pluginProps.get('heatingVarId', 0)) # ID of Heat Source Controller device
if self.globals['trvc'][trvCtlrDevId]['heatingVarId'] != 0 and self.globals['trvc'][trvCtlrDevId]['heatingVarId'] not in self.globals['heaterVariables'].keys():
self.globals['heaterVariables'][self.globals['trvc'][trvCtlrDevId]['heatingVarId']] = dict()
self.globals['heaterVariables'][self.globals['trvc'][trvCtlrDevId]['heatingVarId']][
'thermostatsCallingForHeat'] = set() # A set of TRVs calling for heat from this heat source [None at the moment]
indigo.variable.updateValue(self.globals['trvc'][trvCtlrDevId]['heatingVarId'], value="false") # Variable indicator to show that heating is NOT being requested
# Battery level setup
self.globals['trvc'][trvCtlrDevId]['batteryLevel'] = 0
self.globals['trvc'][trvCtlrDevId]['batteryLevelTrv'] = 0
if self.globals['trvc'][trvCtlrDevId]['trvDevId'] != 0:
if 'batteryLevel' in indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].states:
self.globals['trvc'][trvCtlrDevId]['batteryLevelTrv'] = indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].batteryLevel
self.globals['trvc'][trvCtlrDevId]['batteryLevel'] = self.globals['trvc'][trvCtlrDevId]['batteryLevelTrv']
self.globals['trvc'][trvCtlrDevId]['batteryLevelRemote'] = 0
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] != 0:
if 'batteryLevel' in indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].states:
self.globals['trvc'][trvCtlrDevId]['batteryLevelRemote'] = indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].batteryLevel
if 0 < self.globals['trvc'][trvCtlrDevId]['batteryLevelRemote'] < \
self.globals['trvc'][trvCtlrDevId]['batteryLevelTrv']:
self.globals['trvc'][trvCtlrDevId]['batteryLevel'] = self.globals['trvc'][trvCtlrDevId]['batteryLevelRemote']
self.globals['trvc'][trvCtlrDevId]['batteryLevelRadiator'] = 0
if self.globals['trvc'][trvCtlrDevId]['radiatorDevId'] != 0:
if 'batteryLevel' in indigo.devices[self.globals['trvc'][trvCtlrDevId]['radiatorDevId']].states:
self.globals['trvc'][trvCtlrDevId]['batteryLevelRadiator'] = indigo.devices[self.globals['trvc'][trvCtlrDevId]['radiatorDevId']].batteryLevel
if 0 < self.globals['trvc'][trvCtlrDevId]['batteryLevelRadiator'] < \
self.globals['trvc'][trvCtlrDevId]['batteryLevelTrv']:
self.globals['trvc'][trvCtlrDevId]['batteryLevel'] = self.globals['trvc'][trvCtlrDevId]['batteryLevelRadiator']
self.globals['trvc'][trvCtlrDevId]['setpointHeatOnDefault'] = float(trvcDev.pluginProps['setpointHeatOnDefault'])
self.globals['trvc'][trvCtlrDevId]['setpointHeatMinimum'] = float(trvcDev.pluginProps['setpointHeatMinimum'])
self.globals['trvc'][trvCtlrDevId]['setpointHeatMaximum'] = float(trvcDev.pluginProps['setpointHeatMaximum'])
self.globals['trvc'][trvCtlrDevId]['setpointHeatDeviceStartMethod'] = int(trvcDev.pluginProps.get('setpointHeatDeviceStartMethod', 1))
self.globals['trvc'][trvCtlrDevId]['setpointHeatDeviceStartDefault'] = float(trvcDev.pluginProps.get('setpointHeatDeviceStartDefault', 8))
self.globals['trvc'][trvCtlrDevId]['nextScheduleExecutionTime'] = 'Not yet evaluated'
self.globals['trvc'][trvCtlrDevId]['schedule1Enabled'] = bool(trvcDev.pluginProps.get('schedule1Enabled', False))
if self.globals['trvc'][trvCtlrDevId]['schedule1Enabled']:
self.globals['trvc'][trvCtlrDevId]['schedule1TimeOn'] = trvcDev.pluginProps.get('schedule1TimeOn', '00:00')
self.globals['trvc'][trvCtlrDevId]['schedule1TimeOff'] = trvcDev.pluginProps.get('schedule1TimeOff', '00:00')
self.globals['trvc'][trvCtlrDevId]['schedule1SetpointHeat'] = float(trvcDev.pluginProps.get('schedule1SetpointHeat', 0.0))
else:
self.globals['trvc'][trvCtlrDevId]['schedule1TimeOn'] = '00:00'
self.globals['trvc'][trvCtlrDevId]['schedule1TimeOff'] = '00:00'
self.globals['trvc'][trvCtlrDevId]['schedule1SetpointHeat'] = 0.0
if not self.globals['trvc'][trvCtlrDevId]['schedule1Enabled'] or self.globals['trvc'][trvCtlrDevId]['schedule1SetpointHeat'] == 0.0:
self.globals['trvc'][trvCtlrDevId]['schedule1SetpointHeatUi'] = 'Not Set'
self.globals['trvc'][trvCtlrDevId]['schedule1TimeUi'] = 'Inactive'
else:
self.globals['trvc'][trvCtlrDevId]['schedule1SetpointHeatUi'] = f'{self.globals["trvc"][trvCtlrDevId]["schedule1SetpointHeat"]} °C'
self.globals['trvc'][trvCtlrDevId]['schedule1TimeUi'] = f'{self.globals["trvc"][trvCtlrDevId]["schedule1TimeOn"]} - {self.globals["trvc"][trvCtlrDevId]["schedule1TimeOff"]}'
self.globals['trvc'][trvCtlrDevId]['schedule2Enabled'] = bool(trvcDev.pluginProps.get('schedule2Enabled', False))
if self.globals['trvc'][trvCtlrDevId]['schedule2Enabled']:
self.globals['trvc'][trvCtlrDevId]['schedule2TimeOn'] = trvcDev.pluginProps.get('schedule2TimeOn', '00:00')
self.globals['trvc'][trvCtlrDevId]['schedule2TimeOff'] = trvcDev.pluginProps.get('schedule2TimeOff', '00:00')
self.globals['trvc'][trvCtlrDevId]['schedule2SetpointHeat'] = float(trvcDev.pluginProps.get('schedule2SetpointHeat', 0.0))
else:
self.globals['trvc'][trvCtlrDevId]['schedule2TimeOn'] = '00:00'
self.globals['trvc'][trvCtlrDevId]['schedule2TimeOff'] = '00:00'
self.globals['trvc'][trvCtlrDevId]['schedule2SetpointHeat'] = 0.0
if not self.globals['trvc'][trvCtlrDevId]['schedule2Enabled'] or self.globals['trvc'][trvCtlrDevId]['schedule2SetpointHeat'] == 0.0:
self.globals['trvc'][trvCtlrDevId]['schedule2SetpointHeatUi'] = 'Not Set'
self.globals['trvc'][trvCtlrDevId]['schedule2TimeUi'] = 'Inactive'
else:
self.globals['trvc'][trvCtlrDevId]['schedule2SetpointHeatUi'] = f'{self.globals["trvc"][trvCtlrDevId]["schedule2SetpointHeat"]} °C'
self.globals['trvc'][trvCtlrDevId]['schedule2TimeUi'] = f'{self.globals["trvc"][trvCtlrDevId]["schedule2TimeOn"]} - {self.globals["trvc"][trvCtlrDevId]["schedule2TimeOff"]}'
self.globals['trvc'][trvCtlrDevId]['schedule3Enabled'] = bool(trvcDev.pluginProps.get('schedule3Enabled', False))
if self.globals['trvc'][trvCtlrDevId]['schedule3Enabled']:
self.globals['trvc'][trvCtlrDevId]['schedule3TimeOn'] = trvcDev.pluginProps.get('schedule3TimeOn', '00:00')
self.globals['trvc'][trvCtlrDevId]['schedule3TimeOff'] = trvcDev.pluginProps.get('schedule3TimeOff', '00:00')
self.globals['trvc'][trvCtlrDevId]['schedule3SetpointHeat'] = float(trvcDev.pluginProps.get('schedule3SetpointHeat', 0.0))
else:
self.globals['trvc'][trvCtlrDevId]['schedule3TimeOn'] = '00:00'
self.globals['trvc'][trvCtlrDevId]['schedule3TimeOff'] = '00:00'
self.globals['trvc'][trvCtlrDevId]['schedule3SetpointHeat'] = 0.0
if not self.globals['trvc'][trvCtlrDevId]['schedule3Enabled'] or self.globals['trvc'][trvCtlrDevId]['schedule3SetpointHeat'] == 0.0:
self.globals['trvc'][trvCtlrDevId]['schedule3SetpointHeatUi'] = 'Not Set'
self.globals['trvc'][trvCtlrDevId]['schedule3TimeUi'] = 'Inactive'
else:
self.globals['trvc'][trvCtlrDevId]['schedule3SetpointHeatUi'] = f'{self.globals["trvc"][trvCtlrDevId]["schedule3SetpointHeat"]} °C'
self.globals['trvc'][trvCtlrDevId]['schedule3TimeUi'] = f'{self.globals["trvc"][trvCtlrDevId]["schedule3TimeOn"]} - {self.globals["trvc"][trvCtlrDevId]["schedule3TimeOff"]}'
self.globals['trvc'][trvCtlrDevId]['schedule4Enabled'] = bool(trvcDev.pluginProps.get('schedule4Enabled', False))
if self.globals['trvc'][trvCtlrDevId]['schedule4Enabled']:
self.globals['trvc'][trvCtlrDevId]['schedule4TimeOn'] = trvcDev.pluginProps.get('schedule4TimeOn', '00:00')
self.globals['trvc'][trvCtlrDevId]['schedule4TimeOff'] = trvcDev.pluginProps.get('schedule4TimeOff', '00:00')
self.globals['trvc'][trvCtlrDevId]['schedule4SetpointHeat'] = float(trvcDev.pluginProps.get('schedule4SetpointHeat', 0.0))
else:
self.globals['trvc'][trvCtlrDevId]['schedule4TimeOn'] = '00:00'
self.globals['trvc'][trvCtlrDevId]['schedule4TimeOff'] = '00:00'
self.globals['trvc'][trvCtlrDevId]['schedule4SetpointHeat'] = 0.0
if not self.globals['trvc'][trvCtlrDevId]['schedule4Enabled'] or self.globals['trvc'][trvCtlrDevId]['schedule4SetpointHeat'] == 0.0:
self.globals['trvc'][trvCtlrDevId]['schedule4SetpointHeatUi'] = 'Not Set'
self.globals['trvc'][trvCtlrDevId]['schedule4TimeUi'] = 'Inactive'
else:
self.globals['trvc'][trvCtlrDevId]['schedule4SetpointHeatUi'] = f'{self.globals["trvc"][trvCtlrDevId]["schedule4SetpointHeat"]} °C'
self.globals['trvc'][trvCtlrDevId]['schedule4TimeUi'] = f'{self.globals["trvc"][trvCtlrDevId]["schedule4TimeOn"]} - {self.globals["trvc"][trvCtlrDevId]["schedule4TimeOff"]}'
# Following section of code is to save the values if the schedule is reset to as defined in the device configuration
self.globals['trvc'][trvCtlrDevId]['scheduleReset1Enabled'] = self.globals['trvc'][trvCtlrDevId]['schedule1Enabled']
self.globals['trvc'][trvCtlrDevId]['scheduleReset1TimeOn'] = self.globals['trvc'][trvCtlrDevId]['schedule1TimeOn']
self.globals['trvc'][trvCtlrDevId]['scheduleReset1TimeOff'] = self.globals['trvc'][trvCtlrDevId]['schedule1TimeOff']
self.globals['trvc'][trvCtlrDevId]['scheduleReset1TimeUi'] = self.globals['trvc'][trvCtlrDevId]['schedule1TimeUi']
self.globals['trvc'][trvCtlrDevId]['scheduleReset1HeatSetpoint'] = self.globals['trvc'][trvCtlrDevId]['schedule1SetpointHeat']
self.globals['trvc'][trvCtlrDevId]['scheduleReset2Enabled'] = self.globals['trvc'][trvCtlrDevId]['schedule2Enabled']
self.globals['trvc'][trvCtlrDevId]['scheduleReset2TimeOn'] = self.globals['trvc'][trvCtlrDevId]['schedule2TimeOn']
self.globals['trvc'][trvCtlrDevId]['scheduleReset2TimeOff'] = self.globals['trvc'][trvCtlrDevId]['schedule2TimeOff']
self.globals['trvc'][trvCtlrDevId]['scheduleReset2TimeUi'] = self.globals['trvc'][trvCtlrDevId]['schedule2TimeUi']
self.globals['trvc'][trvCtlrDevId]['scheduleReset2HeatSetpoint'] = self.globals['trvc'][trvCtlrDevId]['schedule2SetpointHeat']
self.globals['trvc'][trvCtlrDevId]['scheduleReset3Enabled'] = self.globals['trvc'][trvCtlrDevId]['schedule3Enabled']
self.globals['trvc'][trvCtlrDevId]['scheduleReset3TimeOn'] = self.globals['trvc'][trvCtlrDevId]['schedule3TimeOn']
self.globals['trvc'][trvCtlrDevId]['scheduleReset3TimeOff'] = self.globals['trvc'][trvCtlrDevId]['schedule3TimeOff']
self.globals['trvc'][trvCtlrDevId]['scheduleReset3TimeUi'] = self.globals['trvc'][trvCtlrDevId]['schedule3TimeUi']
self.globals['trvc'][trvCtlrDevId]['scheduleReset3HeatSetpoint'] = self.globals['trvc'][trvCtlrDevId]['schedule3SetpointHeat']
self.globals['trvc'][trvCtlrDevId]['scheduleReset4Enabled'] = self.globals['trvc'][trvCtlrDevId]['schedule4Enabled']
self.globals['trvc'][trvCtlrDevId]['scheduleReset4TimeOn'] = self.globals['trvc'][trvCtlrDevId]['schedule4TimeOn']
self.globals['trvc'][trvCtlrDevId]['scheduleReset4TimeOff'] = self.globals['trvc'][trvCtlrDevId]['schedule4TimeOff']
self.globals['trvc'][trvCtlrDevId]['scheduleReset4TimeUi'] = self.globals['trvc'][trvCtlrDevId]['schedule4TimeUi']
self.globals['trvc'][trvCtlrDevId]['scheduleReset4HeatSetpoint'] = self.globals['trvc'][trvCtlrDevId]['schedule4SetpointHeat']
self.globals['trvc'][trvCtlrDevId]['schedule1Fired'] = False # NOT SURE IF THESES WILL BE USED ???
self.globals['trvc'][trvCtlrDevId]['schedule2Fired'] = False
self.globals['trvc'][trvCtlrDevId]['schedule3Fired'] = False
self.globals['trvc'][trvCtlrDevId]['schedule4Fired'] = False
self.globals['trvc'][trvCtlrDevId]['schedule1Active'] = False
self.globals['trvc'][trvCtlrDevId]['schedule2Active'] = False
self.globals['trvc'][trvCtlrDevId]['schedule3Active'] = False
self.globals['trvc'][trvCtlrDevId]['schedule4Active'] = False
self.globals['trvc'][trvCtlrDevId]['advanceActive'] = False
self.globals['trvc'][trvCtlrDevId]['advanceStatusUi'] = ''
self.globals['trvc'][trvCtlrDevId]['advanceActivatedTime'] = "Inactive"
self.globals['trvc'][trvCtlrDevId]['advanceToScheduleTime'] = "Inactive"
self.globals['trvc'][trvCtlrDevId]['boostMode'] = BOOST_MODE_INACTIVE
self.globals['trvc'][trvCtlrDevId]['boostModeUi'] = BOOST_MODE_TRANSLATION[self.globals['trvc'][trvCtlrDevId]['boostMode']]
self.globals['trvc'][trvCtlrDevId]['boostStatusUi'] = ''
self.globals['trvc'][trvCtlrDevId]['boostActive'] = False
self.globals['trvc'][trvCtlrDevId]['boostDeltaT'] = 0.0
self.globals['trvc'][trvCtlrDevId]['boostSetpoint'] = 0.0
self.globals['trvc'][trvCtlrDevId]['boostMinutes'] = 0
self.globals['trvc'][trvCtlrDevId]['boostTimeEnd'] = "Inactive"
self.globals['trvc'][trvCtlrDevId]['boostTimeStart'] = "Inactive"
self.globals['trvc'][trvCtlrDevId]['boostSetpointToRestore'] = 0.0
self.globals['trvc'][trvCtlrDevId]['boostSetpointInvokeRestore'] = False
self.globals['trvc'][trvCtlrDevId]['deviceStartDatetime'] = str(currentTime)
self.globals['trvc'][trvCtlrDevId]['extendActive'] = False
self.globals['trvc'][trvCtlrDevId]['extendStatusUi'] = ''
self.globals['trvc'][trvCtlrDevId]['extendIncrementMinutes'] = 0
self.globals['trvc'][trvCtlrDevId]['extendMaximumMinutes'] = 0
self.globals['trvc'][trvCtlrDevId]['extendMinutes'] = 0
self.globals['trvc'][trvCtlrDevId]['extendActivatedTime'] = "Inactive"
self.globals['trvc'][trvCtlrDevId]['extendScheduleOriginalTime'] = "Inactive"
self.globals['trvc'][trvCtlrDevId]['extendScheduleNewTime'] = "Inactive"
self.globals['trvc'][trvCtlrDevId]['extendLimitReached'] = False
self.globals['trvc'][trvCtlrDevId]['setpointHeatTrv'] = float(indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['trvDevId'])].heatSetpoint)
if self.globals['trvc'][trvCtlrDevId]['setpointHeatDeviceStartMethod'] == DEVICE_START_SETPOINT_DEVICE_MINIMUM:
self.globals['trvc'][trvCtlrDevId]['setpointHeat'] = float(trvcDev.pluginProps['setpointHeatMinimum'])
self.logger.info(f'\'{trvcDev.name}\' Heat Setpoint set to device minimum value i.e. \'{self.globals["trvc"][trvCtlrDevId]["setpointHeat"]}\'')
elif self.globals['trvc'][trvCtlrDevId]['setpointHeatDeviceStartMethod'] == DEVICE_START_SETPOINT_LEAVE_AS_IS:
self.globals['trvc'][trvCtlrDevId]['setpointHeat'] = float(indigo.devices[trvCtlrDevId].heatSetpoint)
self.logger.info(f'\'{trvcDev.name}\' Heat Setpoint left unchanged i.e. \'{self.globals["trvc"][trvCtlrDevId]["setpointHeat"]}\'')
elif self.globals['trvc'][trvCtlrDevId]['setpointHeatDeviceStartMethod'] == DEVICE_START_SETPOINT_SPECIFIED:
self.globals['trvc'][trvCtlrDevId]['setpointHeat'] = float(self.globals['trvc'][trvCtlrDevId]['setpointHeatDeviceStartDefault'])
self.logger.info(f'\'{trvcDev.name}\' Heat Setpoint set to specified \'Device Start\' value i.e. \'{self.globals["trvc"][trvCtlrDevId]["setpointHeat"]}\'')
else:
self.logger.error(
f'Error detected by TRV Plugin for device [{trvcDev.name}] - Unknown method \'{self.globals["trvc"][trvCtlrDevId]["setpointHeatDeviceStartMethod"]}\' to set Device Start Heat Setpoint')
return
self.globals['trvc'][trvCtlrDevId]['heatSetpointAdvance'] = 0
self.globals['trvc'][trvCtlrDevId]['heatSetpointBoost'] = 0
if self.globals['trvc'][trvCtlrDevId]['enableTrvOnOff']:
self.globals['trvc'][trvCtlrDevId]['hvacOperationModeTrv'] = HVAC_OFF
self.globals['trvc'][trvCtlrDevId]['hvacOperationMode'] = HVAC_OFF
else:
self.globals['trvc'][trvCtlrDevId]['hvacOperationModeTrv'] = HVAC_HEAT
self.globals['trvc'][trvCtlrDevId]['hvacOperationMode'] = HVAC_HEAT
self.globals['trvc'][trvCtlrDevId]['controllerMode'] = CONTROLLER_MODE_INITIALISATION
self.globals['trvc'][trvCtlrDevId]['modeDatetimeChanged'] = currentTime
if self.globals['trvc'][trvCtlrDevId]['trvSupportsTemperatureReporting']:
self.globals['trvc'][trvCtlrDevId]['temperatureTrv'] = float(indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['trvDevId'])].temperatures[0])
else:
self.globals['trvc'][trvCtlrDevId]['temperatureTrv'] = 0.0
self.globals['trvc'][trvCtlrDevId]['temperatureRadiator'] = float(0.0)
if self.globals['trvc'][trvCtlrDevId]['radiatorDevId'] != 0:
try:
self.globals['trvc'][trvCtlrDevId]['temperatureRadiator'] = float(
indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['radiatorDevId'])].temperatures[0]) # e.g. Radiator Thermostat (HRT4-ZW)
except AttributeError:
try:
self.globals['trvc'][trvCtlrDevId]['temperatureRadiator'] = float(
indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['radiatorDevId'])].states['sensorValue']) # e.g. Aeon 4 in 1 / Fibaro FGMS-001
except (AttributeError, KeyError):
try:
self.globals['trvc'][trvCtlrDevId]['temperatureRadiator'] = float(
indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['radiatorDevId'])].states['temperature']) # e.g. Oregon Scientific Temp Sensor
except (AttributeError, KeyError):
try:
self.globals['trvc'][trvCtlrDevId]['temperatureRadiator'] = float(
indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['radiatorDevId'])].states['Temperature']) # e.g. Netatmo
except (AttributeError, KeyError):
indigo.server.error(
f'\'{indigo.devices[self.globals["trvc"][trvCtlrDevId]["radiatorDevId"]].name}\' is an unknown Radiator Temperature Sensor type - Radiator Temperature Sensor support disabled for TRV \'{trvcDev.name}\'')
self.globals['trvc'][trvCtlrDevId]['radiatorDevId'] = 0 # Disable Radiator Temperature Sensor Support
self.globals['trvc'][trvCtlrDevId]['temperatureRemote'] = float(0.0)
self.globals['trvc'][trvCtlrDevId]['temperatureRemotePreOffset'] = float(0.0)
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] != 0:
try:
self.globals['trvc'][trvCtlrDevId]['temperatureRemote'] = float(
indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['remoteDevId'])].temperatures[0]) # e.g. Radiator Thermostat (HRT4-ZW)
except AttributeError:
try:
self.globals['trvc'][trvCtlrDevId]['temperatureRemote'] = float(
indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['remoteDevId'])].states['sensorValue']) # e.g. Aeon 4 in 1 / Fibaro FGMS-001
except (AttributeError, KeyError):
try:
self.globals['trvc'][trvCtlrDevId]['temperatureRemote'] = float(
indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['remoteDevId'])].states['temperature']) # e.g. Oregon Scientific Temp Sensor
except (AttributeError, KeyError):
try:
self.globals['trvc'][trvCtlrDevId]['temperatureRemote'] = float(
indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['remoteDevId'])].states['Temperature']) # e.g. Netatmo
except (AttributeError, KeyError):
indigo.server.error(
f'\'{indigo.devices[self.globals["trvc"][trvCtlrDevId]["remoteDevId"]].name}\' is an unknown Remote Thermostat type - Remote support disabled for TRV \'{trvcDev.name}\'')
self.globals['trvc'][trvCtlrDevId]['remoteDevId'] = 0 # Disable Remote Support
self.globals['trvc'][trvCtlrDevId]['setpointHeatRemote'] = 0
self.globals['trvc'][trvCtlrDevId]['remoteSetpointHeatControl'] = bool(trvcDev.pluginProps.get('remoteSetpointHeatControl', False))
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] == 0:
self.globals['trvc'][trvCtlrDevId]['remoteSetpointHeatControl'] = False
self.globals['trvc'][trvCtlrDevId]['temperature'] = float(self.globals['trvc'][trvCtlrDevId]['temperatureTrv'])
else:
self.globals['trvc'][trvCtlrDevId]['remoteTempOffset'] = float(trvcDev.pluginProps.get('remoteTempOffset', 0.0))
self.globals['trvc'][trvCtlrDevId]['temperatureRemotePreOffset'] = float(self.globals['trvc'][trvCtlrDevId]['temperatureRemote'])
self.globals['trvc'][trvCtlrDevId]['temperatureRemote'] = float(self.globals['trvc'][trvCtlrDevId]['temperatureRemote']) + float(self.globals['trvc'][trvCtlrDevId]['remoteTempOffset'])
self.globals['trvc'][trvCtlrDevId]['temperature'] = float(self.globals['trvc'][trvCtlrDevId]['temperatureRemote'])
self.globals['trvc'][trvCtlrDevId]['remoteDeltaMax'] = float(trvcDev.pluginProps.get('remoteDeltaMax', 5.0))
if self.globals['trvc'][trvCtlrDevId]['remoteSetpointHeatControl']:
try:
setpoint = float(indigo.devices[int(self.globals['trvc'][trvCtlrDevId]['remoteDevId'])].heatSetpoint)
if float(setpoint) < float(self.globals['trvc'][trvCtlrDevId]['setpointHeatMinimum']):
setpoint = float(self.globals['trvc'][trvCtlrDevId]['setpointHeatMinimum'])
elif float(setpoint) > float(self.globals['trvc'][trvCtlrDevId]['setpointHeatMaximum']):
setpoint = float(self.globals['trvc'][trvCtlrDevId]['setpointHeatMaximum'])
self.globals['trvc'][trvCtlrDevId]['setpointHeat'] = setpoint
self.globals['trvc'][trvCtlrDevId]['setpointHeatRemote'] = setpoint
except Exception:
self.globals['trvc'][trvCtlrDevId]['remoteSetpointHeatControl'] = False
self.globals['trvc'][trvCtlrDevId]['zwaveEventWakeUpSentDisplayFix'] = '' # Used to flip the Z-wave reporting around for Wakeup command (Indigo fix)
self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountTrv'] = 0
self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountPreviousTrv'] = 0
self.globals['trvc'][trvCtlrDevId]['zwaveSentCountTrv'] = 0
self.globals['trvc'][trvCtlrDevId]['zwaveSentCountPreviousTrv'] = 0
self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeTrv'] = 'N/A'
self.globals['trvc'][trvCtlrDevId]['zwaveEventSentDateTimeTrv'] = 'N/A'
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupDelayTrv'] = False
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalTrv'] = int(
indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].globalProps["com.perceptiveautomation.indigoplugin.zwave"]["zwWakeInterval"])
if self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalTrv'] > 0:
trvDevId = self.globals['trvc'][trvCtlrDevId]['trvDevId']
nextWakeupMissedSeconds = (self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalTrv'] + 2) * 60 # Add 2 minutes to next expected wakeup
if trvDevId in self.globals['timers']['zwaveWakeupCheck']:
self.globals['timers']['zwaveWakeupCheck'][trvDevId].cancel()
self.globals['timers']['zwaveWakeupCheck'][trvDevId] = threading.Timer(float(nextWakeupMissedSeconds), self.zwaveWakeupMissedTriggered, [trvCtlrDevId, TRV, trvDevId])
self.globals['timers']['zwaveWakeupCheck'][trvDevId].daemon = True
self.globals['timers']['zwaveWakeupCheck'][trvDevId].start()
self.globals['trvc'][trvCtlrDevId]['zwaveLastSentCommandTrv'] = ''
self.globals['trvc'][trvCtlrDevId]['zwaveLastReceivedCommandTrv'] = ''
self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountRemote'] = 0
self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountPreviousRemote'] = 0
self.globals['trvc'][trvCtlrDevId]['zwaveSentCountRemote'] = 0
self.globals['trvc'][trvCtlrDevId]['zwaveSentCountPreviousRemote'] = 0
self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeRemote'] = 'N/A'
self.globals['trvc'][trvCtlrDevId]['zwaveEventSentDateTimeRemote'] = 'N/A'
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupDelayRemote'] = False
self.globals['trvc'][trvCtlrDevId]['zwaveMonitoringEnabledRemote'] = False
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalRemote'] = int(0)
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] != 0:
remoteDevId = self.globals['trvc'][trvCtlrDevId]['remoteDevId']
if indigo.devices[remoteDevId].protocol == indigo.kProtocol.ZWave:
try:
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalRemote'] = int(indigo.devices[remoteDevId].globalProps["com.perceptiveautomation.indigoplugin.zwave"]["zwWakeInterval"])
self.globals['trvc'][trvCtlrDevId]['zwaveMonitoringEnabledRemote'] = True
if self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalRemote'] > 0:
nextWakeupMissedSeconds = (self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalRemote'] + 2) * 60 # Add 2 minutes to next expected wakeup
if remoteDevId in self.globals['timers']['zwaveWakeupCheck']:
self.globals['timers']['zwaveWakeupCheck'][remoteDevId].cancel()
self.globals['timers']['zwaveWakeupCheck'][remoteDevId] = threading.Timer(float(nextWakeupMissedSeconds), self.zwaveWakeupMissedTriggered,
[trvCtlrDevId, REMOTE, remoteDevId])
self.globals['timers']['zwaveWakeupCheck'][remoteDevId].daemon = True
self.globals['timers']['zwaveWakeupCheck'][remoteDevId].start()
except Exception:
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalRemote'] = int(0)
else:
# self.logger.debug("Protocol for device %s is '%s'" % (indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].name, indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].protocol))
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalRemote'] = int(0)
self.globals['trvc'][trvCtlrDevId]['zwaveLastSentCommandRemote'] = ''
self.globals['trvc'][trvCtlrDevId]['zwaveLastReceivedCommandRemote'] = ''
self.globals['trvc'][trvCtlrDevId]['zwavePendingHvac'] = False # Used to differentiate between internally generated Z-Wave hvac command and UI generated Z-Wave hvac commands
self.globals['trvc'][trvCtlrDevId][
'zwavePendingTrvSetpointFlag'] = False # Used to differentiate between internally generated Z-Wave setpoint command and UI generated Z-Wave setpoint commands
self.globals['trvc'][trvCtlrDevId]['zwavePendingTrvSetpointSequence'] = 0
self.globals['trvc'][trvCtlrDevId]['zwavePendingTrvSetpointValue'] = 0.0
self.globals['trvc'][trvCtlrDevId][
'zwavePendingRemoteSetpointFlag'] = False # Used to differentiate between internally generated Z-Wave setpoint command and UI generated Z-Wave setpoint commands
self.globals['trvc'][trvCtlrDevId]['zwavePendingRemoteSetpointSequence'] = 0
self.globals['trvc'][trvCtlrDevId]['zwavePendingRemoteSetpointValue'] = 0.0
self.globals['trvc'][trvCtlrDevId]['deltaIncreaseHeatSetpoint'] = 0.0
self.globals['trvc'][trvCtlrDevId]['deltaIDecreaseHeatSetpoint'] = 0.0
self.globals['trvc'][trvCtlrDevId]['callingForHeat'] = False
self.globals['trvc'][trvCtlrDevId]['callingForHeatTrueSSM'] = 0 # Calling For Heat True Seconds Since Midnight
self.globals['trvc'][trvCtlrDevId]['callingForHeatFalseSSM'] = 0 # Calling For Heat False Seconds Since Midnight
# Update device states
keyValueList = [{'key': 'hvacOperationMode', 'value': self.globals['trvc'][trvCtlrDevId]['hvacOperationMode']},
{'key': 'nextScheduleExecutionTime', 'value': self.globals['trvc'][trvCtlrDevId]['nextScheduleExecutionTime']},
{'key': 'schedule1Active', 'value': self.globals['trvc'][trvCtlrDevId]['schedule1Active']},
{'key': 'schedule1Enabled', 'value': self.globals['trvc'][trvCtlrDevId]['schedule1Enabled']},
{'key': 'schedule1TimeOn', 'value': self.globals['trvc'][trvCtlrDevId]['schedule1TimeOn']},
{'key': 'schedule1TimeOff', 'value': self.globals['trvc'][trvCtlrDevId]['schedule1TimeOff']},
{'key': 'schedule1TimeUi', 'value': self.globals['trvc'][trvCtlrDevId]['schedule1TimeUi']},
{'key': 'schedule1SetpointHeat', 'value': self.globals['trvc'][trvCtlrDevId]['schedule1SetpointHeatUi']},
{'key': 'schedule2Active', 'value': self.globals['trvc'][trvCtlrDevId]['schedule2Active']},
{'key': 'schedule2Enabled', 'value': self.globals['trvc'][trvCtlrDevId]['schedule2Enabled']},
{'key': 'schedule2TimeOn', 'value': self.globals['trvc'][trvCtlrDevId]['schedule2TimeOn']},
{'key': 'schedule2TimeOff', 'value': self.globals['trvc'][trvCtlrDevId]['schedule2TimeOff']},
{'key': 'schedule2TimeUi', 'value': self.globals['trvc'][trvCtlrDevId]['schedule2TimeUi']},
{'key': 'schedule2SetpointHeat', 'value': self.globals['trvc'][trvCtlrDevId]['schedule2SetpointHeatUi']},
{'key': 'schedule3Active', 'value': self.globals['trvc'][trvCtlrDevId]['schedule3Active']},
{'key': 'schedule3Enabled', 'value': self.globals['trvc'][trvCtlrDevId]['schedule3Enabled']},
{'key': 'schedule3TimeOn', 'value': self.globals['trvc'][trvCtlrDevId]['schedule3TimeOn']},
{'key': 'schedule3TimeOff', 'value': self.globals['trvc'][trvCtlrDevId]['schedule3TimeOff']},
{'key': 'schedule3TimeUi', 'value': self.globals['trvc'][trvCtlrDevId]['schedule3TimeUi']},
{'key': 'schedule3SetpointHeat', 'value': self.globals['trvc'][trvCtlrDevId]['schedule3SetpointHeatUi']},
{'key': 'schedule4Active', 'value': self.globals['trvc'][trvCtlrDevId]['schedule4Active']},
{'key': 'schedule4Enabled', 'value': self.globals['trvc'][trvCtlrDevId]['schedule4Enabled']},
{'key': 'schedule4TimeOn', 'value': self.globals['trvc'][trvCtlrDevId]['schedule4TimeOn']},
{'key': 'schedule4TimeOff', 'value': self.globals['trvc'][trvCtlrDevId]['schedule4TimeOff']},
{'key': 'schedule4TimeUi', 'value': self.globals['trvc'][trvCtlrDevId]['schedule4TimeUi']},
{'key': 'schedule4SetpointHeat', 'value': self.globals['trvc'][trvCtlrDevId]['schedule4SetpointHeatUi']},
{'key': 'setpointHeatOnDefault', 'value': self.globals['trvc'][trvCtlrDevId]['setpointHeatOnDefault']},
{'key': 'setpointHeatMinimum', 'value': self.globals['trvc'][trvCtlrDevId]['setpointHeatMinimum']},
{'key': 'setpointHeatMaximum', 'value': self.globals['trvc'][trvCtlrDevId]['setpointHeatMaximum']},
{'key': 'setpointHeatTrv', 'value': self.globals['trvc'][trvCtlrDevId]['setpointHeatTrv']},
{'key': 'setpointHeatRemote', 'value': self.globals['trvc'][trvCtlrDevId]['setpointHeatRemote']},
{'key': 'temperature', 'value': self.globals['trvc'][trvCtlrDevId]['temperature']},
{'key': 'temperatureRemote', 'value': self.globals['trvc'][trvCtlrDevId]['temperatureRemote']},
{'key': 'temperatureRemotePreOffset', 'value': self.globals['trvc'][trvCtlrDevId]['temperatureRemotePreOffset']},
{'key': 'temperatureTrv', 'value': self.globals['trvc'][trvCtlrDevId]['temperatureTrv']},
{'key': 'advanceActive', 'value': self.globals['trvc'][trvCtlrDevId]['advanceActive']},
{'key': 'advanceStatusUi', 'value': self.globals['trvc'][trvCtlrDevId]['advanceStatusUi']},
{'key': 'advanceActivatedTime', 'value': self.globals['trvc'][trvCtlrDevId]['advanceActivatedTime']},
{'key': 'advanceToScheduleTime', 'value': self.globals['trvc'][trvCtlrDevId]['advanceToScheduleTime']},
{'key': 'boostActive', 'value': self.globals['trvc'][trvCtlrDevId]['boostActive']}, {'key': 'boostMode', 'value': self.globals['trvc'][trvCtlrDevId]['boostMode']},
{'key': 'boostModeUi', 'value': self.globals['trvc'][trvCtlrDevId]['boostModeUi']}, {'key': 'boostStatusUi', 'value': self.globals['trvc'][trvCtlrDevId]['boostStatusUi']},
{'key': 'boostDeltaT', 'value': self.globals['trvc'][trvCtlrDevId]['boostDeltaT']},
{'key': 'boostSetpoint', 'value': int(self.globals['trvc'][trvCtlrDevId]['boostSetpoint'])},
{'key': 'boostMinutes', 'value': self.globals['trvc'][trvCtlrDevId]['boostMinutes']},
{'key': 'boostTimeStart', 'value': self.globals['trvc'][trvCtlrDevId]['boostTimeStart']},
{'key': 'boostTimeEnd', 'value': self.globals['trvc'][trvCtlrDevId]['boostTimeEnd']}, {'key': 'extendActive', 'value': self.globals['trvc'][trvCtlrDevId]['extendActive']},
{'key': 'extendStatusUi', 'value': self.globals['trvc'][trvCtlrDevId]['extendStatusUi']},
{'key': 'extendMinutes', 'value': self.globals['trvc'][trvCtlrDevId]['extendMinutes']},
{'key': 'extendActivatedTime', 'value': self.globals['trvc'][trvCtlrDevId]['extendActivatedTime']},
{'key': 'extendScheduleOriginalTime', 'value': self.globals['trvc'][trvCtlrDevId]['extendScheduleOriginalTime']},
{'key': 'extendScheduleNewTime', 'value': self.globals['trvc'][trvCtlrDevId]['extendScheduleNewTime']},
{'key': 'extendLimitReached', 'value': self.globals['trvc'][trvCtlrDevId]['extendLimitReached']},
{'key': 'callingForHeat', 'value': self.globals['trvc'][trvCtlrDevId]['callingForHeat']},
{'key': 'callingForHeatTrueSSM', 'value': self.globals['trvc'][trvCtlrDevId]['callingForHeatTrueSSM']},
{'key': 'callingForHeatFalseSSM', 'value': self.globals['trvc'][trvCtlrDevId]['callingForHeatFalseSSM']},
{'key': 'eventReceivedDateTimeRemote', 'value': self.globals['trvc'][trvCtlrDevId]['lastSuccessfulCommRemote']},
{'key': 'zwaveEventReceivedDateTimeTrv', 'value': self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeTrv']},
{'key': 'zwaveEventReceivedDateTimeRemote', 'value': self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeRemote']},
{'key': 'zwaveEventSentDateTimeTrv', 'value': self.globals['trvc'][trvCtlrDevId]['zwaveEventSentDateTimeTrv']},
{'key': 'zwaveEventSentDateTimeRemote', 'value': self.globals['trvc'][trvCtlrDevId]['zwaveEventSentDateTimeRemote']},
{'key': 'valvePercentageOpen', 'value': self.globals['trvc'][trvCtlrDevId]['valvePercentageOpen']}, {'key': 'hvacHeaterIsOn', 'value': False},
{'key': 'setpointHeat', 'value': self.globals['trvc'][trvCtlrDevId]['setpointHeat']},
dict(key='batteryLevel', value=int(self.globals['trvc'][trvCtlrDevId]['batteryLevel']), uiValue=f'{self.globals["trvc"][trvCtlrDevId]["batteryLevel"]}%'),
dict(key='batteryLevelTrv', value=int(self.globals['trvc'][trvCtlrDevId]['batteryLevelTrv']), uiValue=f'{self.globals["trvc"][trvCtlrDevId]["batteryLevelTrv"]}%'),
dict(key='batteryLevelRemote', value=int(self.globals['trvc'][trvCtlrDevId]['batteryLevelRemote']),
uiValue=f'{self.globals["trvc"][trvCtlrDevId]["batteryLevelRemote"]}%'),
{'key': 'hvacOperationModeTrv', 'value': self.globals['trvc'][trvCtlrDevId]['hvacOperationModeTrv']},
{'key': 'hvacOperationMode', 'value': self.globals['trvc'][trvCtlrDevId]['hvacOperationMode']},
{'key': 'controllerMode', 'value': self.globals['trvc'][trvCtlrDevId]['controllerMode']},
{'key': 'controllerModeUi', 'value': CONTROLLER_MODE_TRANSLATION[self.globals['trvc'][trvCtlrDevId]['controllerMode']]},
{'key': 'temperatureInput1', 'value': self.globals['trvc'][trvCtlrDevId]['temperature'], 'uiValue': f'{self.globals["trvc"][trvCtlrDevId]["temperature"]:.1f} °C'}]
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] != 0:
if self.globals['trvc'][trvCtlrDevId]['trvSupportsTemperatureReporting']:
keyValueList.append({'key': 'temperatureInput2', 'value': self.globals['trvc'][trvCtlrDevId]['temperatureTrv'],
'uiValue': f'{self.globals["trvc"][trvCtlrDevId]["temperatureTrv"]:.1f} °C'})
keyValueList.append({'key': 'temperatureUi',
'value': f'R: {self.globals["trvc"][trvCtlrDevId]["temperatureRemote"]:.1f} °C, T: {self.globals["trvc"][trvCtlrDevId]["temperatureTrv"]:.1f} °C'})
else:
keyValueList.append({'key': 'temperatureUi', 'value': f'R: {self.globals["trvc"][trvCtlrDevId]["temperatureRemote"]:.1f} °C'})
else:
keyValueList.append({'key': 'temperatureUi', 'value': f'T: {self.globals["trvc"][trvCtlrDevId]["temperatureTrv"]:.1f} °C'})
trvcDev.updateStatesOnServer(keyValueList)
trvcDev.updateStateImageOnServer(indigo.kStateImageSel.HvacAutoMode) # HvacOff - HvacHeatMode - HvacHeating - HvacAutoMode
# Check if CSV Files need initialising
if self.globals['trvc'][trvCtlrDevId]['updateCsvFile']:
if self.globals['trvc'][trvCtlrDevId]['updateAllCsvFiles']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_ALL_CSV_FILES, trvCtlrDevId, None])
else:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['setpointHeat', float(self.globals['trvc'][trvCtlrDevId]['setpointHeat'])]])
self.globals['queues']['trvHandler'].put(
[QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['temperatureTrv', float(self.globals['trvc'][trvCtlrDevId]['temperatureTrv'])]])
self.globals['queues']['trvHandler'].put(
[QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['setpointHeatTrv', float(self.globals['trvc'][trvCtlrDevId]['setpointHeatTrv'])]])
if self.globals['trvc'][trvCtlrDevId]['valveDevId'] != 0:
self.globals['queues']['trvHandler'].put(
[QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['valvePercentageOpen', int(self.globals['trvc'][trvCtlrDevId]['valvePercentageOpen'])]])
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] != 0:
self.globals['queues']['trvHandler'].put(
[QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['temperatureRemote', float(self.globals['trvc'][trvCtlrDevId]['temperatureRemote'])]])
if self.globals['trvc'][trvCtlrDevId]['remoteSetpointHeatControl']:
self.globals['queues']['trvHandler'].put(
[QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['setpointHeatRemote', float(self.globals['trvc'][trvCtlrDevId]['setpointHeatRemote'])]])
# Set-up schedules
scheduleSetpointOff = float(self.globals['trvc'][trvCtlrDevId]['setpointHeatMinimum'])
self.globals['schedules'][trvCtlrDevId]['default'][0] = ('00:00', scheduleSetpointOff, 0, False) # Start of Day
self.globals['schedules'][trvCtlrDevId]['default'][240000] = ('24:00', scheduleSetpointOff, 9, False) # End of Day
if self.globals['trvc'][trvCtlrDevId]['schedule1Enabled']:
scheduleTimeOnUi = self.globals['trvc'][trvCtlrDevId]['schedule1TimeOn']
scheduleTimeOn = int(scheduleTimeOnUi.replace(':', '')) * 100 # Add in Seconds
scheduleTimeOffUi = self.globals['trvc'][trvCtlrDevId]['schedule1TimeOff']
scheduleTimeOff = int(scheduleTimeOffUi.replace(':', '')) * 100 # Add in Seconds
scheduleSetpointOn = float(self.globals['trvc'][trvCtlrDevId]['schedule1SetpointHeat'])
self.globals['schedules'][trvCtlrDevId]['default'][scheduleTimeOn] = (scheduleTimeOnUi, scheduleSetpointOn, 1, True)
self.globals['schedules'][trvCtlrDevId]['default'][scheduleTimeOff] = (scheduleTimeOffUi, scheduleSetpointOff, 1, False)
if self.globals['trvc'][trvCtlrDevId]['schedule2Enabled']:
scheduleTimeOnUi = self.globals['trvc'][trvCtlrDevId]['schedule2TimeOn']
scheduleTimeOn = int(scheduleTimeOnUi.replace(':', '')) * 100 # Add in Seconds
scheduleTimeOffUi = self.globals['trvc'][trvCtlrDevId]['schedule2TimeOff']
scheduleTimeOff = int(scheduleTimeOffUi.replace(':', '')) * 100 # Add in Seconds
scheduleSetpointOn = float(self.globals['trvc'][trvCtlrDevId]['schedule2SetpointHeat'])
self.globals['schedules'][trvCtlrDevId]['default'][scheduleTimeOn] = (scheduleTimeOnUi, scheduleSetpointOn, 2, True)
self.globals['schedules'][trvCtlrDevId]['default'][scheduleTimeOff] = (scheduleTimeOffUi, scheduleSetpointOff, 2, False)
if self.globals['trvc'][trvCtlrDevId]['schedule3Enabled']:
scheduleTimeOnUi = self.globals['trvc'][trvCtlrDevId]['schedule3TimeOn']
scheduleTimeOn = int(scheduleTimeOnUi.replace(':', '')) * 100 # Add in Seconds
scheduleTimeOffUi = self.globals['trvc'][trvCtlrDevId]['schedule3TimeOff']
scheduleTimeOff = int(scheduleTimeOffUi.replace(':', '')) * 100 # Add in Seconds
scheduleSetpointOn = float(self.globals['trvc'][trvCtlrDevId]['schedule3SetpointHeat'])
self.globals['schedules'][trvCtlrDevId]['default'][scheduleTimeOn] = (scheduleTimeOnUi, scheduleSetpointOn, 3, True)
self.globals['schedules'][trvCtlrDevId]['default'][scheduleTimeOff] = (scheduleTimeOffUi, scheduleSetpointOff, 3, False)
if self.globals['trvc'][trvCtlrDevId]['schedule4Enabled']:
scheduleTimeOnUi = self.globals['trvc'][trvCtlrDevId]['schedule4TimeOn']
scheduleTimeOn = int(scheduleTimeOnUi.replace(':', '')) * 100 # Add in Seconds
scheduleTimeOffUi = self.globals['trvc'][trvCtlrDevId]['schedule4TimeOff']
scheduleTimeOff = int(scheduleTimeOffUi.replace(':', '')) * 100 # Add in Seconds
scheduleSetpointOn = float(self.globals['trvc'][trvCtlrDevId]['schedule4SetpointHeat'])
self.globals['schedules'][trvCtlrDevId]['default'][scheduleTimeOn] = (scheduleTimeOnUi, scheduleSetpointOn, 4, True)
self.globals['schedules'][trvCtlrDevId]['default'][scheduleTimeOff] = (scheduleTimeOffUi, scheduleSetpointOff, 4, False)
self.globals['schedules'][trvCtlrDevId]['default'] = collections.OrderedDict(sorted(self.globals['schedules'][trvCtlrDevId]['default'].items()))
self.globals['schedules'][trvCtlrDevId]['running'] = self.globals['schedules'][trvCtlrDevId]['default'].copy()
self.globals['schedules'][trvCtlrDevId]['dynamic'] = self.globals['schedules'][trvCtlrDevId]['default'].copy()
if int(self.globals['trvc'][trvCtlrDevId]['trvDevId']) not in self.globals['devicesToTrvControllerTable'].keys():
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['trvDevId']] = dict()
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['trvDevId']]['type'] = TRV
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['trvDevId']]['trvControllerId'] = int(trvCtlrDevId)
if self.globals['trvc'][trvCtlrDevId]['valveDevId'] != 0:
if int(self.globals['trvc'][trvCtlrDevId]['valveDevId']) not in self.globals['devicesToTrvControllerTable'].keys():
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['valveDevId']] = dict()
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['valveDevId']]['type'] = VALVE
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['valveDevId']]['trvControllerId'] = int(trvCtlrDevId)
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] != 0:
if int(self.globals['trvc'][trvCtlrDevId]['remoteDevId']) not in self.globals['devicesToTrvControllerTable'].keys():
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['remoteDevId']] = dict()
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['remoteDevId']]['type'] = REMOTE
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['remoteDevId']]['trvControllerId'] = int(trvCtlrDevId)
if self.globals['trvc'][trvCtlrDevId]['radiatorDevId'] != 0:
if int(self.globals['trvc'][trvCtlrDevId]['radiatorDevId']) not in self.globals['devicesToTrvControllerTable'].keys():
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['radiatorDevId']] = dict()
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['radiatorDevId']]['type'] = RADIATOR
self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['radiatorDevId']]['trvControllerId'] = int(trvCtlrDevId)
try:
heatingId = int(self.globals['trvc'][trvCtlrDevId]['heatingId'])
if heatingId == 0:
heatingDeviceUi = 'No Device Heat Source control required.'
else:
heatingDeviceUi = f'Device Heat Source \'{indigo.devices[int(self.globals["trvc"][trvCtlrDevId]["heatingId"])].name}\''
heatingVarId = int(self.globals['trvc'][trvCtlrDevId]['heatingVarId'])
if heatingVarId == 0:
heatingVarUi = 'No Variable Heat Source control required.'
else:
heatingVarUi = f'Variable Heat Source \'{indigo.variables[int(self.globals["trvc"][trvCtlrDevId]["heatingVarId"])].name}\''
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] == 0:
if not self.globals['trvc'][trvCtlrDevId]['trvSupportsTemperatureReporting']:
self.logger.error(f'TRV Controller can\'t control TRV \'{trvcDev.name}\' as the TRV does not report temperature and there is no Remote Stat defined!')
self.globals['trvc'][trvCtlrDevId]['deviceStarted'] = True
return
else:
self.logger.info(f'Started \'{trvcDev.name}\': Controlling TRV \'{indigo.devices[int(self.globals["trvc"][trvCtlrDevId]["trvDevId"])].name}\';\n{heatingDeviceUi}')
else:
self.logger.info(f'Started \'{trvcDev.name}\': Controlling TRV \'{indigo.devices[int(self.globals["trvc"][trvCtlrDevId]["trvDevId"])].name}\'; '
f'Remote thermostat \'{indigo.devices[int(self.globals["trvc"][trvCtlrDevId]["remoteDevId"])].name}\'; {heatingDeviceUi};\n{heatingVarUi}')
self.globals['trvc'][trvCtlrDevId]['deviceStarted'] = True
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, 0, CMD_DELAY_COMMAND, trvCtlrDevId, [CMD_PROCESS_HEATING_SCHEDULE, 2.0, None]])
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def deviceStopComm(self, trvcDev):
try:
trvCtlrDevId = trvcDev.id
if not self.globals['trvc'][trvCtlrDevId]['deviceStarted']:
self.logger.debug(f'controlTrv: \'{trvcDev.name}\' device stopping but startup not yet completed')
self.globals['trvc'][trvCtlrDevId]['deviceStarted'] = False
if 'trvDevId' in self.globals['trvc'][trvCtlrDevId] and self.globals['trvc'][trvCtlrDevId]['trvDevId'] != 0:
self.globals['zwave']['WatchList'].discard(int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].address))
if 'remoteDevId' in self.globals['trvc'][trvCtlrDevId] and self.globals['trvc'][trvCtlrDevId]['remoteDevId'] != 0:
if indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].protocol == indigo.kProtocol.ZWave:
self.globals['zwave']['WatchList'].discard(int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['remoteDevId']].address))
self.logger.info(f"Stopping '{trvcDev.name}'")
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def deviceUpdated(self, origDev, newDev):
def secondsSinceMidnight():
utcnow = datetime.datetime.utcnow()
midnight_utc = datetime.datetime.combine(utcnow.date(), datetime.time(0))
delta = utcnow - midnight_utc
return int(delta.seconds)
try:
def check_for_race_condition(device_key, device_name, device_description):
race_condition = False
race_seconds = secondsSinceMidnight()
if self.globals['trvc'][trvCtlrDevId]['raceConditionDetector'][device_key]['updateSecondsSinceMidnight'] != race_seconds:
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector'][device_key]['updateSecondsSinceMidnight'] = race_seconds
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector'][device_key]['updatesInLastSecond'] = 1
self.logger.threaddebug(f'=======> RACE DETECTION FOR {device_name} \'{newDev.name}\': SECONDS SINCE MIDNIGHT = \'{race_seconds}\', COUNT RESET TO 1')
else:
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector'][device_key]['updatesInLastSecond'] += 1
if self.globals['trvc'][trvCtlrDevId]['raceConditionDetector'][device_key]['updatesInLastSecond'] > \
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector'][device_key]['updatesInLastSecondMaximum']:
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector'][device_key]['updatesInLastSecondMaximum'] = \
self.globals['trvc'][trvCtlrDevId]['raceConditionDetector'][device_key]['updatesInLastSecond']
self.logger.threaddebug(
f'=======> RACE DETECTION FOR {device_name} \'{newDev.name}\': SECONDS SINCE MIDNIGHT = \'{race_seconds}\', COUNT = \'{self.globals["trvc"][trvCtlrDevId]["raceConditionDetector"][device_key]["updatesInLastSecond"]}\' [MAX = \'{self.globals["trvc"][trvCtlrDevId]["raceConditionDetector"][device_key]["updatesInLastSecondMaximum"]}\'] <=======')
if self.globals['trvc'][trvCtlrDevId]['raceConditionDetector'][device_key]['updatesInLastSecond'] > RACE_CONDITION_LIMIT:
self.logger.error(
f'Potential race condition detected for {device_description} \'{newDev.name}\' in TRV Plugin [deviceUpdated] - TRV Controller device being disabled for 60 seconds!')
indigo.device.enable(trvCtlrDevId, value=False)
# setting a timer to re-enable after 60 seconds
self.globals['timers']['raceCondition'][trvCtlrDevId] = threading.Timer(60.0, self.deviceRaceConditionReEnableTriggered, [trvCtlrDevId])
self.globals['timers']['raceCondition'][trvCtlrDevId].daemon = True
self.globals['timers']['raceCondition'][trvCtlrDevId].start()
race_condition = True
return race_condition # Note if True then the 'finally:' statement at the end of deviceUpdated method will return the correct values to Indigo
device_updated_prefix = f"{u'':={u'^'}22}> " # 22 equal signs as first part of prefix
if (newDev.deviceTypeId == 'trvController' and newDev.configured and newDev.id in self.globals['trvc']
and self.globals['trvc'][newDev.id]['deviceStarted']):
# As this is a TRV Controller device only log the updates - Don't queue the update for the TRV Handler otherwise it will loop!
trvCtlrDevId = newDev.id
# Check for Race condition
race_condition_result = check_for_race_condition("trvController", "TRV CONTROLLER", "TRV Controller")
if race_condition_result:
return # Note that the 'finally:' statement at the end of this deviceUpdated method will return the correct values to Indigo
self.globals['trvc'][trvCtlrDevId]['lastSuccessfulComm'] = newDev.lastSuccessfulComm
updateLogItems = list()
if origDev.hvacMode != newDev.hvacMode:
oldInternalHvacMode = self.globals['trvc'][trvCtlrDevId]['hvacOperationMode']
self.globals['trvc'][trvCtlrDevId]['hvacOperationMode'] = newDev.hvacMode
updateLogItems.append(
f'HVAC Operation Mode updated from {HVAC_TRANSLATION[origDev.hvacMode]} to {HVAC_TRANSLATION[newDev.hvacMode]} [Internal store was = {HVAC_TRANSLATION[oldInternalHvacMode]} and is now = {HVAC_TRANSLATION[int(self.globals["trvc"][trvCtlrDevId]["hvacOperationMode"])]}]')
if (float(origDev.temperatures[0]) != float(newDev.temperatures[0])) or (self.globals['trvc'][trvCtlrDevId]['temperature'] != float(newDev.temperatures[0])):
origTemp = float(origDev.temperatures[0])
newTemp = float(newDev.temperatures[0])
updateLogItems.append(f'Temperature updated from {origTemp} to {newTemp} [Internal store = {self.globals["trvc"][trvCtlrDevId]["temperature"]}]')
if origDev.states['controllerMode'] != newDev.states['controllerMode']:
oldInternalControllerMode = self.globals['trvc'][trvCtlrDevId]['controllerMode']
self.globals['trvc'][trvCtlrDevId]['controllerMode'] = newDev.states['controllerMode']
updateLogItems.append(
f'Mode updated from {CONTROLLER_MODE_TRANSLATION[origDev.states["controllerMode"]]} to {CONTROLLER_MODE_TRANSLATION[newDev.states["controllerMode"]]} [Internal store was = {CONTROLLER_MODE_TRANSLATION[oldInternalControllerMode]} and is now = {CONTROLLER_MODE_TRANSLATION[self.globals["trvc"][trvCtlrDevId]["controllerMode"]]}]')
if float(origDev.heatSetpoint) != float(newDev.heatSetpoint):
oldInternalSetpointHeat = self.globals['trvc'][trvCtlrDevId]['setpointHeat']
self.globals['trvc'][trvCtlrDevId]['setpointHeat'] = float(newDev.heatSetpoint)
updateLogItems.append(
f'Heat Setpoint changed from {origDev.heatSetpoint} to {newDev.heatSetpoint} [Internal store was = {oldInternalSetpointHeat} and is now = {self.globals["trvc"][trvCtlrDevId]["setpointHeat"]}]')
# Update CSV files if TRV Controller Heat Setpoint updated
if self.globals['trvc'][trvCtlrDevId]['updateCsvFile']:
if self.globals['trvc'][trvCtlrDevId]['updateAllCsvFiles']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_ALL_CSV_FILES, trvCtlrDevId, None])
else:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['setpointHeat', self.globals['trvc'][trvCtlrDevId]['setpointHeat']]])
if len(updateLogItems) > 0:
device_updated_report = (
f"\n\n{device_updated_prefix}DEVICE UPDATED [{self.globals['deviceUpdatedSequenceCount']}]: TRV Controller '{newDev.name}'; Last Communication at {newDev.lastSuccessfulComm}\n")
for itemToReport in updateLogItems:
device_updated_report = (f"{device_updated_report}{device_updated_prefix}{itemToReport}\n")
self.logger.debug(device_updated_report)
elif int(newDev.id) in self.globals['devicesToTrvControllerTable'].keys(): # Check if a TRV device, Remote Thermostat or Radiator Temperature sensor already stored in table
deviceUpdatedLog = u'\n\n======================================================================================================================================================\n=='
deviceUpdatedLog = deviceUpdatedLog + u'\n== Method: \'deviceUpdated\''
self.globals['deviceUpdatedSequenceCount'] += 1
deviceUpdatedLog = deviceUpdatedLog + f'\n== Sequence: {self.globals["deviceUpdatedSequenceCount"]}'
deviceUpdatedLog = deviceUpdatedLog + f'\n== Device: {DEVICE_TYPE_TRANSLATION[self.globals["devicesToTrvControllerTable"][newDev.id]["type"]]} - \'{newDev.name}\''
deviceUpdatedLog = deviceUpdatedLog + f'\n== Last Communication: {newDev.lastSuccessfulComm}'
trvCtlrDevId = int(self.globals['devicesToTrvControllerTable'][newDev.id]['trvControllerId'])
if indigo.devices[trvCtlrDevId].enabled:
trvControllerDev = indigo.devices[trvCtlrDevId]
updateRequested = False
updateList = dict()
updateLogItems = dict()
if self.globals['devicesToTrvControllerTable'][newDev.id]['type'] == TRV or self.globals['devicesToTrvControllerTable'][newDev.id]['type'] == VALVE:
race_condition_result = check_for_race_condition("trv", "TRV", "TRV device managed by TRV Controller")
if race_condition_result:
return # Note that the 'finally:' statement at the end of this deviceUpdated method will return the correct values to Indigo
# The first checks are general across all sub-devices i.e thermostat and valve
self.globals['trvc'][trvCtlrDevId]['lastSuccessfulCommTrv'] = newDev.lastSuccessfulComm
# Check if Z-Wave Event has been received
if self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountTrv'] > self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountPreviousTrv']:
self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountPreviousTrv'] = self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountTrv']
updateRequested = True
updateList[UPDATE_ZWAVE_EVENT_RECEIVED_TRV] = self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeTrv']
updateLogItems[UPDATE_ZWAVE_EVENT_RECEIVED_TRV] = f'TRV Z-Wave event received. Time updated to \'{self.globals["trvc"][trvCtlrDevId]["zwaveEventReceivedDateTimeTrv"]}\'. Received count now totals: {self.globals["trvc"][trvCtlrDevId]["zwaveReceivedCountTrv"]}'
# Check if Z-Wave Event has been sent
if self.globals['trvc'][trvCtlrDevId]['zwaveSentCountTrv'] > self.globals['trvc'][trvCtlrDevId]['zwaveSentCountPreviousTrv']:
self.globals['trvc'][trvCtlrDevId]['zwaveSentCountPreviousTrv'] = self.globals['trvc'][trvCtlrDevId]['zwaveSentCountTrv']
updateRequested = True
updateList[UPDATE_ZWAVE_EVENT_SENT_TRV] = self.globals['trvc'][trvCtlrDevId]['zwaveEventSentDateTimeTrv']
updateLogItems[UPDATE_ZWAVE_EVENT_SENT_TRV] = f'TRV Z-Wave event sent. Time updated to \'{self.globals["trvc"][trvCtlrDevId]["zwaveEventSentDateTimeTrv"]}\'. Sent count now totals: {self.globals["trvc"][trvCtlrDevId]["zwaveSentCountTrv"]}'
# Check the wakeup interval in case it has changed
wakeupInterval = int(indigo.devices[self.globals['trvc'][trvCtlrDevId]['trvDevId']].globalProps["com.perceptiveautomation.indigoplugin.zwave"]["zwWakeInterval"])
if int(self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalTrv']) != wakeupInterval:
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalTrv'] = wakeupInterval
updateRequested = True
updateList[UPDATE_ZWAVE_WAKEUP_INTERVAL] = wakeupInterval
updateLogItems[UPDATE_ZWAVE_WAKEUP_INTERVAL] = f'TRV Z-Wave wakeup interval changed from \'{self.globals["trvc"][trvCtlrDevId]["zwaveWakeupIntervalTrv"]}\' to \'{wakeupInterval}\''
# if newDev.globalProps['com.perceptiveautomation.indigoplugin.zwave']['zwDevSubIndex'] == 0: # Thermostat
if self.globals['devicesToTrvControllerTable'][newDev.id]['type'] == TRV:
if trvControllerDev.states['controllerMode'] != self.globals['trvc'][trvCtlrDevId]['controllerMode']:
updateRequested = True
updateList[UPDATE_CONTROLLER_MODE] = self.globals['trvc'][trvCtlrDevId]['controllerMode']
updateLogItems[UPDATE_CONTROLLER_MODE] = (
f'Controller Mode updated from {CONTROLLER_MODE_TRANSLATION[trvControllerDev.states["controllerMode"]]} to {CONTROLLER_MODE_TRANSLATION[self.globals["trvc"][trvCtlrDevId]["controllerMode"]]}')
if 'batteryLevel' in newDev.states:
# self.logger.debug(f'=====================>>>> Battery Level for TRV device \'{origDev.name}\' - OLD: {origDev.batteryLevel}, NEW: {newDev.batteryLevel}')
if (origDev.batteryLevel != newDev.batteryLevel) or (self.globals['trvc'][trvCtlrDevId]['batteryLevelTrv'] != newDev.batteryLevel):
self.globals['trvc'][trvCtlrDevId]['batteryLevelTrv'] = newDev.batteryLevel
updateRequested = True
updateList[UPDATE_TRV_BATTERY_LEVEL] = newDev.batteryLevel
updateLogItems[UPDATE_TRV_BATTERY_LEVEL] = (
f'TRV Battery Level updated from {origDev.batteryLevel} to {newDev.batteryLevel} [Internal store was = \'{self.globals["trvc"][trvCtlrDevId]["batteryLevelTrv"]}\']')
if self.globals['trvc'][trvCtlrDevId]['trvSupportsTemperatureReporting']:
if (float(origDev.temperatures[0]) != float(newDev.temperatures[0])) or (self.globals['trvc'][trvCtlrDevId]['temperatureTrv'] != float(newDev.temperatures[0])):
origTemp = float(origDev.temperatures[0])
newTemp = float(newDev.temperatures[0])
updateRequested = True
updateList[UPDATE_TRV_TEMPERATURE] = newTemp
updateLogItems[UPDATE_TRV_TEMPERATURE] = (
f'Temperature updated from {origTemp} to {newTemp} [Internal store was = \'{self.globals["trvc"][trvCtlrDevId]["temperatureTrv"]}\']')
if self.globals['trvc'][trvCtlrDevId]['updateCsvFile']:
if self.globals['trvc'][trvCtlrDevId]['updateAllCsvFiles']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_ALL_CSV_FILES, trvCtlrDevId, None])
else:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['temperatureTrv', newTemp]])
if (int(origDev.hvacMode) != int(newDev.hvacMode)) or (int(self.globals['trvc'][trvCtlrDevId]['hvacOperationModeTrv']) != int(newDev.hvacMode)):
hvacMode = newDev.hvacMode
if hvacMode == HVAC_COOL or hvacMode == HVAC_AUTO: # Don't allow HVAC Mode of Cool or Auto
hvacMode = RESET_TO_HVAC_HEAT
updateRequested = True
updateList[UPDATE_TRV_HVAC_OPERATION_MODE] = hvacMode
if newDev.hvacMode == hvacMode:
updateLogItems[UPDATE_TRV_HVAC_OPERATION_MODE] = f'TRV HVAC Operation Mode updated from \'{HVAC_TRANSLATION[origDev.hvacMode]}\' to \'{HVAC_TRANSLATION[newDev.hvacMode]}\' [Internal store was = \'{HVAC_TRANSLATION[int(self.globals["trvc"][trvCtlrDevId]["hvacOperationModeTrv"])]}\']'
else:
updateLogItems[
UPDATE_TRV_HVAC_OPERATION_MODE] = f'TRV HVAC Operation Mode update from \'{HVAC_TRANSLATION[origDev.hvacMode]}\' to \'{HVAC_TRANSLATION[newDev.hvacMode]}\', overridden and reset to \'{HVAC_TRANSLATION[hvacMode]}\' [Internal store was = \'{HVAC_TRANSLATION[self.globals["trvc"][trvCtlrDevId]["hvacOperationModeTrv"]]}\']'
if newDev.model == 'Thermostat (Spirit)':
if 'zwaveHvacOperationModeID' in newDev.states:
if origDev.states['zwaveHvacOperationModeID'] != newDev.states['zwaveHvacOperationModeID']:
zwaveHvacOperationModeID = newDev.states['zwaveHvacOperationModeID']
if zwaveHvacOperationModeID == HVAC_COOL: # Don't allow Cool
zwaveHvacOperationModeID = RESET_TO_HVAC_HEAT
elif zwaveHvacOperationModeID == HVAC_AUTO: # Don't allow Auto
zwaveHvacOperationModeID = RESET_TO_HVAC_HEAT
updateRequested = True
updateList[UPDATE_ZWAVE_HVAC_OPERATION_MODE_ID] = zwaveHvacOperationModeID
if newDev.states['zwaveHvacOperationModeID'] == zwaveHvacOperationModeID:
updateLogItems[UPDATE_ZWAVE_HVAC_OPERATION_MODE_ID] = f'ZWave HVAC Operation Mode updated from \'{HVAC_TRANSLATION[origDev.states["zwaveHvacOperationModeID"]]}\' to \'{HVAC_TRANSLATION[newDev.states["zwaveHvacOperationModeID"]]}\''
else:
updateLogItems[UPDATE_ZWAVE_HVAC_OPERATION_MODE_ID] = f'ZWave HVAC Operation Mode update from \'{HVAC_TRANSLATION[origDev.states["zwaveHvacOperationModeID"]]}\' to \'{HVAC_TRANSLATION[newDev.states["zwaveHvacOperationModeID"]]}\', overridden and reset to \'{HVAC_TRANSLATION[zwaveHvacOperationModeID]}\''
# if self.globals['trvc'][trvCtlrDevId]['trvSupportsManualSetpoint']:
# if (float(origDev.heatSetpoint) != float(newDev.heatSetpoint)):
# updateRequested = True
# if self.globals['trvc'][trvCtlrDevId]['controllerMode'] == CONTROLLER_MODE_TRV_HARDWARE:
# updateList[UPDATE_TRV_HEAT_SETPOINT_FROM_DEVICE] = newDev.heatSetpoint
# updateLogItems[UPDATE_TRV_HEAT_SETPOINT_FROM_DEVICE] = f'TRV Heat Setpoint changed on device from {origDev.heatSetpoint} to {newDev.heatSetpoint} [Internal store = {self.globals["trvc"][trvCtlrDevId]["setpointHeatTrv"]}]'
# else:
# updateList[UPDATE_TRV_HEAT_SETPOINT] = newDev.heatSetpoint
# updateLogItems[UPDATE_TRV_HEAT_SETPOINT] = f'TRV Heat Setpoint changed from {origDev.heatSetpoint} to {newDev.heatSetpoint} [Internal store = {self.globals["trvc"][trvCtlrDevId]["setpointHeatTrv"]}]'
# if self.globals['trvc'][trvCtlrDevId]['trvSupportsManualSetpoint']:
if float(origDev.heatSetpoint) != float(newDev.heatSetpoint):
updateRequested = True
if self.globals['trvc'][trvCtlrDevId]['controllerMode'] == CONTROLLER_MODE_TRV_HARDWARE:
updateList[UPDATE_TRV_HEAT_SETPOINT_FROM_DEVICE] = newDev.heatSetpoint
updateLogItems[UPDATE_TRV_HEAT_SETPOINT_FROM_DEVICE] = (
f'TRV Heat Setpoint changed on device from {origDev.heatSetpoint} to {newDev.heatSetpoint} [Internal store was = {self.globals["trvc"][trvCtlrDevId]["setpointHeatTrv"]}]')
else:
updateList[UPDATE_TRV_HEAT_SETPOINT] = newDev.heatSetpoint
updateLogItems[UPDATE_TRV_HEAT_SETPOINT] = (
f'TRV Heat Setpoint changed from {origDev.heatSetpoint} to {newDev.heatSetpoint} [Internal store was = {self.globals["trvc"][trvCtlrDevId]["setpointHeatTrv"]}]')
if self.globals['trvc'][trvCtlrDevId]['updateCsvFile']:
if self.globals['trvc'][trvCtlrDevId]['updateAllCsvFiles']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_ALL_CSV_FILES, trvCtlrDevId, None])
else:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['setpointHeatTrv', newDev.heatSetpoint]])
# elif newDev.globalProps['com.perceptiveautomation.indigoplugin.zwave']['zwDevSubIndex'] == 1: # Valve ?
elif self.globals['devicesToTrvControllerTable'][newDev.id]['type'] == VALVE:
if newDev.model == 'Thermostat (Spirit)': # Check to make sure it is a valve
if int(origDev.brightness) != int(newDev.brightness) or int(self.globals['trvc'][trvCtlrDevId]['valvePercentageOpen']) != int(newDev.brightness):
updateRequested = True
updateList[UPDATE_CONTROLLER_VALVE_PERCENTAGE] = int(newDev.brightness)
updateLogItems[UPDATE_ZWAVE_HVAC_OPERATION_MODE_ID] = (
f'Valve Percentage Open updated from \'{origDev.brightness}\' to \'{newDev.brightness}\' [Internal store was = {self.globals["trvc"][trvCtlrDevId]["valvePercentageOpen"]}]')
if self.globals['trvc'][trvCtlrDevId]['updateCsvFile']:
if self.globals['trvc'][trvCtlrDevId]['updateAllCsvFiles']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_ALL_CSV_FILES, trvCtlrDevId, None])
else:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['valvePercentageOpen', int(newDev.brightness)]])
elif self.globals['devicesToTrvControllerTable'][newDev.id]['type'] == REMOTE:
race_condition_result = check_for_race_condition("remote", "REMOTE", "Remote Thermostat device managed by TRV Controller")
if race_condition_result:
return # Note that the 'finally:' statement at the end of this deviceUpdated method will return the correct values to Indigo
if 'batteryLevel' in newDev.states:
if (origDev.batteryLevel != newDev.batteryLevel) or (self.globals['trvc'][trvCtlrDevId]['batteryLevelRemote'] != newDev.batteryLevel):
self.globals['trvc'][trvCtlrDevId]['batteryLevelRemote'] = newDev.batteryLevel
updateRequested = True
updateList[UPDATE_REMOTE_BATTERY_LEVEL] = newDev.batteryLevel
updateLogItems[UPDATE_REMOTE_BATTERY_LEVEL] = (
f'Remote Battery Level updated from {origDev.batteryLevel} to {newDev.batteryLevel} [Internal store was = \'{self.globals["trvc"][trvCtlrDevId]["batteryLevelRemote"]}\']')
if trvControllerDev.states['controllerMode'] != self.globals['trvc'][trvCtlrDevId]['controllerMode']:
updateRequested = True
updateList[UPDATE_CONTROLLER_MODE] = self.globals['trvc'][trvCtlrDevId]['controllerMode']
updateLogItems[UPDATE_CONTROLLER_MODE] = (
f'Controller Mode updated from {CONTROLLER_MODE_TRANSLATION[trvControllerDev.states["controllerMode"]]} to {CONTROLLER_MODE_TRANSLATION[self.globals["trvc"][trvCtlrDevId]["controllerMode"]]}')
try:
origTemp = float(origDev.temperatures[0])
newTemp = float(newDev.temperatures[0]) # Remote
except AttributeError:
try:
origTemp = float(origDev.states['sensorValue'])
newTemp = float(newDev.states['sensorValue']) # e.g. Aeon 4 in 1
except (AttributeError, KeyError):
try:
origTemp = float(origDev.states['temperatureInput1'])
newTemp = float(newDev.states['temperatureInput1']) # e.g. Secure SRT321 / HRT4-ZW
except (AttributeError, KeyError):
try:
origTemp = float(origDev.states['temperature'])
newTemp = float(newDev.states['temperature']) # e.g. Oregon Scientific Temp Sensor
except (AttributeError, KeyError):
try:
origTemp = float(origDev.states['Temperature'])
newTemp = float(newDev.states['Temperature']) # e.g. Netatmo
except (AttributeError, KeyError):
try:
origTemp = float(origDev.states['sensorValue'])
newTemp = float(newDev.states['sensorValue']) # e.g. HeatIT TF021
except (AttributeError, KeyError):
origTemp = 10.0 #
newTemp = 10.0
self.logger.error(f'\'{newDev.name}\' is an unknown Remote Thermostat type - remote support disabled for \'{trvControllerDev.name}\'')
del self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['remoteDevId']] # Disable Remote Support
self.globals['trvc'][trvCtlrDevId]['remoteDevId'] = 0
if self.globals['trvc'][trvCtlrDevId]['remoteDevId'] != 0:
# origTemp should already have had the offset applied - just need to add it to newTemp to ensure comparison is valid
newTempPlusOffset = newTemp + float(self.globals['trvc'][trvCtlrDevId]['remoteTempOffset'])
if origTemp != newTempPlusOffset:
updateRequested = True
updateList[UPDATE_REMOTE_TEMPERATURE] = newTemp # Send through the original (non-offset) temperature
updateLogItems[UPDATE_REMOTE_TEMPERATURE] = (
f'Temperature updated from {origTemp} to {newTempPlusOffset} [Internal store = \'{self.globals["trvc"][trvCtlrDevId]["temperatureRemote"]}\']')
if self.globals['trvc'][trvCtlrDevId]['updateCsvFile']:
if self.globals['trvc'][trvCtlrDevId]['updateAllCsvFiles']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_ALL_CSV_FILES, trvCtlrDevId, None])
else:
self.globals['queues']['trvHandler'].put(
[QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['temperatureRemote', newTempPlusOffset]]) # The offset temperature for the CSV file
if self.globals['trvc'][trvCtlrDevId]['remoteSetpointHeatControl']:
if float(newDev.heatSetpoint) != float(origDev.heatSetpoint):
updateRequested = True
updateList[UPDATE_REMOTE_HEAT_SETPOINT_FROM_DEVICE] = newDev.heatSetpoint
updateLogItems[UPDATE_REMOTE_HEAT_SETPOINT_FROM_DEVICE] = (
f'Remote Heat Setpoint changed from {origDev.heatSetpoint} to {newDev.heatSetpoint} [Internal store was = {self.globals["trvc"][trvCtlrDevId]["setpointHeatRemote"]}]')
if self.globals['trvc'][trvCtlrDevId]['updateCsvFile']:
if self.globals['trvc'][trvCtlrDevId]['updateAllCsvFiles']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_ALL_CSV_FILES, trvCtlrDevId, None])
else:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['setpointHeatRemote', float(newDev.heatSetpoint)]])
if newDev.protocol == indigo.kProtocol.ZWave:
# Check if Z-Wave Event has been received
if self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountRemote'] > self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountPreviousRemote']:
self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountPreviousRemote'] = self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountRemote']
updateRequested = True
updateList[UPDATE_ZWAVE_EVENT_RECEIVED_REMOTE] = self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeRemote']
updateLogItems[UPDATE_ZWAVE_EVENT_RECEIVED_REMOTE] = f'Remote Thermostat Z-Wave event received. Time updated to \'{self.globals["trvc"][trvCtlrDevId]["zwaveEventReceivedDateTimeRemote"]}\'. Received count now totals: {self.globals["trvc"][trvCtlrDevId]["zwaveReceivedCountPreviousRemote"]}'
# Check if Z-Wave Event has been sent
if self.globals['trvc'][trvCtlrDevId]['zwaveSentCountRemote'] > self.globals['trvc'][trvCtlrDevId]['zwaveSentCountPreviousRemote']:
self.globals['trvc'][trvCtlrDevId]['zwaveSentCountPreviousRemote'] = self.globals['trvc'][trvCtlrDevId]['zwaveSentCountRemote']
updateRequested = True
updateList[UPDATE_ZWAVE_EVENT_SENT_REMOTE] = self.globals['trvc'][trvCtlrDevId]['zwaveEventSentDateTimeRemote']
updateLogItems[UPDATE_ZWAVE_EVENT_SENT_REMOTE] = f'Remote Thermostat Z-Wave event sent. Time updated to \'{self.globals["trvc"][trvCtlrDevId]["zwaveEventSentDateTimeRemote"]}\'. Sent count now totals: {self.globals["trvc"][trvCtlrDevId]["zwaveSentCountRemote"]}'
else:
if newDev.lastSuccessfulComm != self.globals['trvc'][trvCtlrDevId]['lastSuccessfulCommRemote']:
self.globals['trvc'][trvCtlrDevId]['eventReceivedCountRemote'] += 1
updateRequested = True
updateList[UPDATE_EVENT_RECEIVED_REMOTE] = f'{newDev.lastSuccessfulComm}'
updateLogItems[UPDATE_EVENT_RECEIVED_REMOTE] = f'Remote Thermostat event received. Time updated to \'{newDev.lastSuccessfulComm}\'. Received count now totals: {self.globals["trvc"][trvCtlrDevId]["eventReceivedCountRemote"]}'
self.globals['trvc'][trvCtlrDevId]['lastSuccessfulCommRemote'] = newDev.lastSuccessfulComm
elif self.globals['devicesToTrvControllerTable'][newDev.id]['type'] == RADIATOR:
race_condition_result = check_for_race_condition("radiator", "RADIATOR", "Radiator Temperature Sensor device managed by TRV Controller")
if race_condition_result:
return # Note that the 'finally:' statement at the end of this deviceUpdated method will return the correct values to Indigo
if 'batteryLevel' in newDev.states:
if (origDev.batteryLevel != newDev.batteryLevel) or (self.globals['trvc'][trvCtlrDevId]['batteryLevelRadiator'] != newDev.batteryLevel):
self.globals['trvc'][trvCtlrDevId]['batteryLevelRadiator'] = newDev.batteryLevel
updateRequested = True
updateList[UPDATE_RADIATOR_BATTERY_LEVEL] = newDev.batteryLevel
updateLogItems[UPDATE_RADIATOR_BATTERY_LEVEL] = (
f'RRadiator Battery Level updated from {origDev.batteryLevel} to {newDev.batteryLevel} [Internal store was = \'{self.globals["trvc"][trvCtlrDevId]["batteryLevelRadiator"]}\']')
if trvControllerDev.states['controllerMode'] != self.globals['trvc'][trvCtlrDevId]['controllerMode']:
updateRequested = True
updateList[UPDATE_CONTROLLER_MODE] = self.globals['trvc'][trvCtlrDevId]['controllerMode']
updateLogItems[UPDATE_CONTROLLER_MODE] = (
f'Controller Mode updated from {CONTROLLER_MODE_TRANSLATION[trvControllerDev.states["controllerMode"]]} to {CONTROLLER_MODE_TRANSLATION[self.globals["trvc"][trvCtlrDevId]["controllerMode"]]}')
try:
origTemp = float(origDev.temperatures[0])
newTemp = float(newDev.temperatures[0]) # Radiator
except AttributeError:
try:
origTemp = float(origDev.states['sensorValue'])
newTemp = float(newDev.states['sensorValue']) # e.g. Aeon 4 in 1
except (AttributeError, KeyError):
try:
origTemp = float(origDev.states['temperatureInput1'])
newTemp = float(newDev.states['temperatureInput1']) # e.g. Secure SRT321 / HRT4-ZW
except (AttributeError, KeyError):
try:
origTemp = float(origDev.states['temperature'])
newTemp = float(newDev.states['temperature']) # e.g. Oregon Scientific Temp Sensor
except (AttributeError, KeyError):
try:
origTemp = float(origDev.states['Temperature'])
newTemp = float(newDev.states['Temperature']) # e.g. Netatmo
except (AttributeError, KeyError):
try:
origTemp = float(origDev.states['sensorValue'])
newTemp = float(newDev.states['sensorValue']) # e.g. HeatIT TF021
except (AttributeError, KeyError):
origTemp = 10.0 #
newTemp = 10.0
self.logger.error(f'\'{newDev.name}\' is an unknown Radiator Temperature Sensor type - radiator temperature support disabled for \'{trvControllerDev.name}\'')
del self.globals['devicesToTrvControllerTable'][self.globals['trvc'][trvCtlrDevId]['radiatorDevId']] # Disable Remote Support
self.globals['trvc'][trvCtlrDevId]['radiatorDevId'] = 0
if self.globals['trvc'][trvCtlrDevId]['radiatorDevId'] != 0:
# origTemp should already have had the offset applied - just need to add it to newTemp to ensure comparison is valid
if origTemp != newTemp:
updateRequested = True
updateList[UPDATE_RADIATOR_TEMPERATURE] = newTemp # Send through the original (non-offset) temperature
updateLogItems[UPDATE_RADIATOR_TEMPERATURE] = (
f'Temperature updated from {origTemp} to {newTemp} [Internal store = \'{self.globals["trvc"][trvCtlrDevId]["temperatureRadiator"]}\']')
if self.globals['trvc'][trvCtlrDevId]['updateCsvFile']:
if self.globals['trvc'][trvCtlrDevId]['updateAllCsvFiles']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_ALL_CSV_FILES, trvCtlrDevId, None])
else:
self.globals['queues']['trvHandler'].put(
[QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_CSV_FILE, trvCtlrDevId, ['temperatureradiator', newTemp]]) # The offset temperature for the CSV file
# if newDev.protocol == indigo.kProtocol.ZWave:
# # Check if Z-Wave Event has been received
# if self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountRemote'] > self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountPreviousRemote']:
# self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountPreviousRemote'] = self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountRemote']
# updateRequested = True
# updateList[UPDATE_ZWAVE_EVENT_RECEIVED_REMOTE] = self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeRemote']
# updateLogItems[UPDATE_ZWAVE_EVENT_RECEIVED_REMOTE] = f'Remote Thermostat Z-Wave event received. Time updated to \'{self.globals["trvc"][trvCtlrDevId]["zwaveEventReceivedDateTimeRemote"]}\'. Received count now totals: {self.globals["trvc"][trvCtlrDevId]["zwaveReceivedCountPreviousRemote"]}'
#
# # Check if Z-Wave Event has been sent
# if self.globals['trvc'][trvCtlrDevId]['zwaveSentCountRemote'] > self.globals['trvc'][trvCtlrDevId]['zwaveSentCountPreviousRemote']:
# self.globals['trvc'][trvCtlrDevId]['zwaveSentCountPreviousRemote'] = self.globals['trvc'][trvCtlrDevId]['zwaveSentCountRemote']
# updateRequested = True
# updateList[UPDATE_ZWAVE_EVENT_SENT_REMOTE] = self.globals['trvc'][trvCtlrDevId]['zwaveEventSentDateTimeRemote']
# updateLogItems[UPDATE_ZWAVE_EVENT_SENT_REMOTE] = f'Remote Thermostat Z-Wave event sent. Time updated to \'{self.globals["trvc"][trvCtlrDevId]["zwaveEventSentDateTimeRemote"]}\'. Sent count now totals: {self.globals["trvc"][trvCtlrDevId]["zwaveSentCountRemote"]}'
# else:
# if newDev.lastSuccessfulComm != self.globals['trvc'][trvCtlrDevId]['lastSuccessfulCommRemote']:
# self.globals['trvc'][trvCtlrDevId]['eventReceivedCountRemote'] += 1
# updateRequested = True
# updateList[UPDATE_EVENT_RECEIVED_REMOTE] = f'{newDev.lastSuccessfulComm}'
# updateLogItems[UPDATE_EVENT_RECEIVED_REMOTE] = f'Remote Thermostat event received. Time updated to \'{newDev.lastSuccessfulComm}\'. Received count now totals: {self.globals["trvc"][trvCtlrDevId]["eventReceivedCountRemote"]}'
if newDev.lastSuccessfulComm != self.globals['trvc'][trvCtlrDevId]['lastSuccessfulCommRadiator']:
self.globals['trvc'][trvCtlrDevId]['eventReceivedCountRadiator'] += 1
updateRequested = True
updateList[UPDATE_EVENT_RECEIVED_RADIATOR] = f'{newDev.lastSuccessfulComm}'
updateLogItems[UPDATE_EVENT_RECEIVED_RADIATOR] = f'Radiator Temperature Sensor event received. Time updated to \'{newDev.lastSuccessfulComm}\'. Received count now totals: {self.globals["trvc"][trvCtlrDevId]["eventReceivedCountRadiator"]}'
self.globals['trvc'][trvCtlrDevId]['lastSuccessfulCommRadiator'] = newDev.lastSuccessfulComm
if updateRequested:
deviceUpdatedLog = deviceUpdatedLog + f'\n== List of states to be queued for update by TRVHANDLER:'
for itemToUpdate in updateList.items():
updateKey = itemToUpdate[0]
updateValue = itemToUpdate[1]
deviceUpdatedLog = deviceUpdatedLog + f'\n== > Description = {UPDATE_TRANSLATION[updateKey]}, Value = {updateValue}'
queuedCommand = None
if self.globals['devicesToTrvControllerTable'][newDev.id]['type'] == TRV:
queuedCommand = CMD_UPDATE_TRV_STATES
elif self.globals['devicesToTrvControllerTable'][newDev.id]['type'] == VALVE:
queuedCommand = CMD_UPDATE_VALVE_STATES
elif self.globals['devicesToTrvControllerTable'][newDev.id]['type'] == REMOTE:
queuedCommand = CMD_UPDATE_REMOTE_STATES
elif self.globals['devicesToTrvControllerTable'][newDev.id]['type'] == RADIATOR:
queuedCommand = CMD_UPDATE_RADIATOR_STATES
if queuedCommand is not None:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, self.globals['deviceUpdatedSequenceCount'], queuedCommand, trvCtlrDevId, [updateList, ]])
deviceUpdatedLog = deviceUpdatedLog + f'\n== Description of updates that will be performed by TRVHANDLER:'
for itemToUpdate in updateLogItems.items():
# updateKey = itemToUpdate[0]
updateValue = itemToUpdate[1]
deviceUpdatedLog = deviceUpdatedLog + f'\n== > {updateValue}'
deviceUpdatedLog = deviceUpdatedLog + u'\n==\n======================================================================================================================================================\n\n'
else:
deviceUpdatedLog = deviceUpdatedLog + f'\n==\n== No updates to \'{DEVICE_TYPE_TRANSLATION[self.globals["devicesToTrvControllerTable"][newDev.id]["type"]]}\' that are of interest to the plugin'
deviceUpdatedLog = deviceUpdatedLog + u'\n==\n======================================================================================================================================================\n\n'
# deviceUpdatedLog = '' # TODO: Looks like this was a bug unless it was to suppress this particular message?
if len(deviceUpdatedLog) > 0:
self.logger.debug(deviceUpdatedLog)
# else:
#
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
finally:
indigo.PluginBase.deviceUpdated(self, origDev, newDev)
def getActionConfigUiValues(self, valuesDict, typeId, actionId):
try:
self.logger.debug(f'getActionConfigUiValues: typeId [{typeId}], actionId [{actionId}], pluginProps[{valuesDict}]')
errorDict = indigo.Dict()
# if typeId == "processUpdateSchedule":
# devId = actionId # TRV Controller Device Id
if typeId == "processBoost":
boostMode = int(valuesDict.get('boostMode', BOOST_MODE_NOT_SET))
if boostMode == BOOST_MODE_NOT_SET:
valuesDict['boostMode'] = str(BOOST_MODE_SELECT)
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
finally:
return valuesDict, errorDict # noqa [Reference before assignment]
def getDeviceConfigUiValues(self, pluginProps, typeId, devId):
try:
if 'remoteDeltaMax' not in pluginProps:
pluginProps['remoteDeltaMax'] = pluginProps.get('remoteTRVDeltaMax', '5.0') # This is a fix to transfer the old name value (remoteTRVDeltaMax) to the new name value (remoteDeltaMax)
# if not 'trvDeltaMax' in pluginProps:
# pluginProps['trvDeltaMax'] = '0.0'
if 'heatingId' not in pluginProps:
pluginProps['heatingId'] = '-1'
if 'heatingVarId' not in pluginProps:
pluginProps['heatingVarId'] = '-1'
if 'forceTrvOnOff' in pluginProps and 'enableTrvOnOff' not in pluginProps:
pluginProps['enableTrvOnOff'] = pluginProps['forceTrvOnOff']
del pluginProps['forceTrvOnOff']
if 'overrideSetpointHeatMaximum' not in pluginProps:
pluginProps['overrideSetpointHeatMaximum'] = False
if 'overrideSetpointHeatMaximumValue' not in pluginProps:
pluginProps['overrideSetpointHeatMaximumValue'] = 0.0
if 'trvDeviceSetpointHeatMaximum' not in pluginProps:
pluginProps['trvDeviceSetpointHeatMaximum'] = pluginProps['setpointHeatMaximum']
if 'remoteThermostatControlEnabled' not in pluginProps: # This is a fix to transfer the old name value (remoteThermostatControlEnabled) to the new name value (remoteThermostatControlEnabled)
pluginProps['remoteThermostatControlEnabled'] = False
if 'remoteThermostatControlEnabled' in pluginProps:
pluginProps['remoteThermostatControlEnabled'] = pluginProps['remoteThermostatControlEnabled']
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
finally:
return super(Plugin, self).getDeviceConfigUiValues(pluginProps, typeId, devId)
def getPrefsConfigUiValues(self):
prefsConfigUiValues = self.pluginPrefs
if "trvVariableFolderName" not in prefsConfigUiValues:
prefsConfigUiValues["trvVariableFolderName"] = 'TRV'
if "disableHeatSourceDeviceListFilter" not in prefsConfigUiValues:
prefsConfigUiValues["disableHeatSourceDeviceListFilter"] = False
if "delayQueueSeconds" not in prefsConfigUiValues:
prefsConfigUiValues["delayQueueSeconds"] = 0
return prefsConfigUiValues
def shutdown(self):
self.logger.debug('Shutdown called')
self.logger.info('\'TRV Controller\' Plugin shutdown complete')
def startup(self):
indigo.devices.subscribeToChanges()
# Subscribe to incoming raw Z-Wave command bytes
indigo.zwave.subscribeToIncoming()
# Subscribe to outgoing raw Z-Wave command bytes
indigo.zwave.subscribeToOutgoing()
# Initialise dictionary to store internal details about the Z-wave Interpreter
self.globals[ZWI] = dict()
self.globals[ZWI][ZWI_INSTANCE] = ZwaveInterpreter(self.exception_handler, self.logger, indigo.devices) # Instantiate and initialise Z-Wave Interpreter Object for this device
# TODO: remove this - 18-March-2022
# ZwaveInterpreter(self.exception_handler, self.logger, indigo.devices) # noqa [Defined outside __init__] Instantiate and initialise Z-Wave Interpreter Object
# Create trvHandler process queue
self.globals['queues']['trvHandler'] = queue.PriorityQueue() # Used to queue trvHandler commands
self.globals['queues']['delayHandler'] = queue.Queue()
self.globals['queues']['initialised'] = True
self.globals['threads']['trvHandler']['event'] = threading.Event()
self.globals['threads']['trvHandler']['thread'] = ThreadTrvHandler(self.globals, self.globals['threads']['trvHandler']['event'])
# self.globals['threads']['trvHandler']['thread'].daemon = True
self.globals['threads']['trvHandler']['thread'].start()
self.globals['threads']['delayHandler']['event'] = threading.Event()
self.globals['threads']['delayHandler']['thread'] = ThreadDelayHandler(self.globals, self.globals['threads']['delayHandler']['event'])
# self.globals['threads']['delayHandler']['thread'].daemon = True
self.globals['threads']['delayHandler']['thread'].start()
try:
secondsUntilSchedulesRestated = calculateSecondsUntilSchedulesRestated()
self.globals['timers']['reStateSchedules'] = threading.Timer(float(secondsUntilSchedulesRestated), self.restateSchedulesTriggered, [secondsUntilSchedulesRestated])
self.globals['timers']['reStateSchedules'].daemon = True
self.globals['timers']['reStateSchedules'].start()
self.logger.info(f'TRV Controller has calculated the number of seconds until Schedules restated as {secondsUntilSchedulesRestated}')
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
self.logger.info('\'TRV Controller\' initialization complete')
def stopConcurrentThread(self):
self.logger.debug('Thread shutdown called')
self.stopThread = True # noqa - Intsnace attribute stopThread defined outside __init__
def validateActionConfigUi(self, valuesDict, typeId, actionId):
try:
self.logger.debug(f'Validate Action Config UI: typeId = \'{typeId}\', actionId = \'{actionId}\', ValuesDict =\n{valuesDict}\n')
if typeId == "processUpdateSchedule":
valuesDict['setpointHeatMinimum'] = float(self.globals['trvc'][actionId]['setpointHeatMinimum'])
valuesDict['setpointHeatMaximum'] = float(self.globals['trvc'][actionId]['setpointHeatMaximum'])
# Suppress PyCharm warnings
# schedule1TimeOn = None
schedule1TimeOff = None
# schedule1SetpointHeat = None
schedule2TimeOn = None
schedule2TimeOff = None
# schedule2SetpointHeat = None
schedule3TimeOn = None
schedule3TimeOff = None
# schedule3SetpointHeat = None
schedule4TimeOn = None
# schedule4TimeOff = None
# schedule4SetpointHeat = None
# Validate Schedule 1
schedule1Enabled = bool(valuesDict.get('schedule1Enabled', False))
if schedule1Enabled:
scheduleValid, scheduleData = self.validateSchedule(actionId, valuesDict, '1')
if not scheduleValid:
return False, valuesDict, scheduleData # i.e. False, valuesDict, errorDict
# schedule1TimeOn = scheduleData[0]
schedule1TimeOff = scheduleData[1]
# schedule1SetpointHeat = scheduleData[2]
# Validate Schedule 2
schedule2Enabled = bool(valuesDict.get('schedule2Enabled', False))
if schedule2Enabled:
scheduleValid, scheduleData = self.validateSchedule(actionId, valuesDict, '2')
if not scheduleValid:
return False, valuesDict, scheduleData # i.e. False, valuesDict, errorDict
schedule2TimeOn = scheduleData[0]
schedule2TimeOff = scheduleData[1]
# schedule2SetpointHeat = scheduleData[2]
# Validate Schedule 3
schedule3Enabled = bool(valuesDict.get('schedule3Enabled', False))
if schedule3Enabled:
scheduleValid, scheduleData = self.validateSchedule(actionId, valuesDict, '3')
if not scheduleValid:
return False, valuesDict, scheduleData # i.e. False, valuesDict, errorDict
schedule3TimeOn = scheduleData[0]
schedule3TimeOff = scheduleData[1]
# schedule3SetpointHeat = scheduleData[2]
# Validate Schedule 4
schedule4Enabled = bool(valuesDict.get('schedule4Enabled', False))
if schedule4Enabled:
scheduleValid, scheduleData = self.validateSchedule(actionId, valuesDict, '4')
if not scheduleValid:
return False, valuesDict, scheduleData # i.e. False, valuesDict, errorDict
schedule4TimeOn = scheduleData[0]
# schedule4TimeOff = scheduleData[1]
# schedule4SetpointHeat = scheduleData[2]
# Consistency check across schedules
if schedule1Enabled:
if schedule2Enabled:
if schedule1TimeOff < schedule2TimeOn:
secondsDelta = secondsFromHHMM(schedule2TimeOn) - secondsFromHHMM(schedule1TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule1TimeOff'] = 'The Schedule One heating OFF time must end before the Schedule Two heating ON time'
errorDict['schedule2TimeOn'] = 'The Schedule Two heating On time must start after the Schedule One heating Off time'
errorDict[
'showAlertText'] = f'The Schedule One OFF time [{schedule1TimeOff}] must be before the Schedule Two ON time [{schedule2TimeOn}] and there must be at least 10 minutes between the Schedule One OFF time and Schedule Two ON time.'
return False, valuesDict, errorDict
if schedule3Enabled:
if schedule1TimeOff < schedule3TimeOn:
secondsDelta = secondsFromHHMM(schedule3TimeOn) - secondsFromHHMM(schedule1TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule1TimeOff'] = 'The Schedule One heating OFF time must end before the Schedule Three heating ON time'
errorDict['schedule3TimeOn'] = 'The Schedule Three heating On time must start after the Schedule One heating Off time'
errorDict[
'showAlertText'] = f'The Schedule One OFF time [{schedule1TimeOff}] must be before the Schedule Three ON time [{schedule3TimeOn}] and there must be at least 10 minutes between the Schedule One OFF time and Schedule Three ON time.'
return False, valuesDict, errorDict
if schedule4Enabled:
if schedule1TimeOff < schedule4TimeOn:
secondsDelta = secondsFromHHMM(schedule4TimeOn) - secondsFromHHMM(schedule1TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule1TimeOff'] = 'The Schedule One heating OFF time must end before the Schedule Four heating ON time'
errorDict['schedule4TimeOn'] = 'The Schedule Four heating On time must start after the Schedule One heating Off time'
errorDict[
'showAlertText'] = f'The Schedule One OFF time [{schedule1TimeOff}] must be before the Schedule Four ON time [{schedule4TimeOn}] and there must be at least 10 minutes between the Schedule One OFF time and Schedule Four ON time.'
return False, valuesDict, errorDict
if schedule2Enabled:
if schedule3Enabled:
if schedule2TimeOff < schedule3TimeOn:
secondsDelta = secondsFromHHMM(schedule3TimeOn) - secondsFromHHMM(schedule2TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule2TimeOff'] = 'The Schedule Two heating OFF time must end before the Schedule Three heating ON time'
errorDict['schedule3TimeOn'] = 'The Schedule Three heating On time must start after the Schedule Two heating Off time'
errorDict[
'showAlertText'] = f'The Schedule Two OFF time [{schedule2TimeOff}] must be before the Schedule Three ON time [{schedule3TimeOn}] and there must be at least 10 minutes between the Schedule Two OFF time and Schedule Three ON time.'
return False, valuesDict, errorDict
if schedule4Enabled:
if schedule2TimeOff < schedule4TimeOn:
secondsDelta = secondsFromHHMM(schedule4TimeOn) - secondsFromHHMM(schedule2TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule2TimeOff'] = 'The Schedule Two heating OFF time must end before the Schedule Four heating ON time'
errorDict['schedule4TimeOn'] = 'The Schedule Four heating On time must start after the Schedule Two heating Off time'
errorDict[
'showAlertText'] = f'The Schedule Two OFF time [{schedule2TimeOff}] must be before the Schedule Four ON time [{schedule4TimeOn}] and there must be at least 10 minutes between the Schedule Two OFF time and Schedule Four ON time.'
return False, valuesDict, errorDict
if schedule3Enabled:
if schedule4Enabled:
if schedule3TimeOff < schedule4TimeOn:
secondsDelta = secondsFromHHMM(schedule4TimeOn) - secondsFromHHMM(schedule3TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule3TimeOff'] = 'The Schedule Three heating OFF time must end before the Schedule Four heating ON time'
errorDict['schedule4TimeOn'] = 'The Schedule Four heating On time must start after the Schedule Three heating Off time'
errorDict[
'showAlertText'] = f'The Schedule Three OFF time [{schedule2TimeOff}] must be before the Schedule Four ON time [{schedule4TimeOn}] and there must be at least 10 minutes between the Schedule Three OFF time and Schedule Four ON time.'
return False, valuesDict, errorDict
elif typeId == "processBoost":
boostMode = int(valuesDict.get('boostMode', 0))
if boostMode == BOOST_MODE_SELECT:
errorDict = indigo.Dict()
errorDict['boostMode'] = 'You must select a boost mode: \'Delta T\' or \'Setpoint\'.'
errorDict['showAlertText'] = 'You must select a boost mode: \'Delta T\' or \'Setpoint\'.'
return False, valuesDict, errorDict
if boostMode == BOOST_MODE_DELTA_T: # Validate deltaT
valid = False
try:
boostDeltaT = float(valuesDict.get('boostDeltaT', 3))
valid = True
except ValueError:
boostDeltaT = 3 # To suppress PyCharm warning
if not valid or boostDeltaT < 1 or boostDeltaT > 5 or boostDeltaT % 0.5 != 0:
errorDict = indigo.Dict()
errorDict['boostDeltaT'] = 'Boost Delta T must be a numeric value between 1 and 5 (inclusive) e.g 2.5'
errorDict['showAlertText'] = 'You must enter a valid Delta T to boost the temperature by. It must be set between 1 and 5 (inclusive) and a multiple of 0.5.'
return False, valuesDict, errorDict
else: # Validate Setpoint
valid = False
try:
boostSetpoint = float(valuesDict.get('boostSetpoint', 3.0))
valid = True
except ValueError:
boostSetpoint = 3.0 # To suppress PyCharm warning
if actionId in self.globals['trvc']:
setpointHeatMinimum = float(self.globals['trvc'][actionId]['setpointHeatMinimum'])
setpointHeatMaximum = float(self.globals['trvc'][actionId]['setpointHeatMaximum'])
else:
errorDict = indigo.Dict()
errorDict['boostSetpoint'] = 'Unable to test Setpoint temperature against allowed minimum/maximum.'
errorDict['showAlertText'] = f'Unable to test Setpoint temperature against allowed minimum/maximum - make sure device \'{indigo.devices[actionId].name}\' is enabled.'
return False, valuesDict, errorDict
if not valid or boostSetpoint < setpointHeatMinimum or boostSetpoint > setpointHeatMaximum or boostSetpoint % 0.5 != 0:
errorDict = indigo.Dict()
errorDict['boostSetpoint'] = f'Setpoint temperature must be numeric and set between {setpointHeatMinimum} and {setpointHeatMaximum} (inclusive)'
errorDict['showAlertText'] = f'You must enter a valid Setpoint temperature for the TRV. It must be numeric and set between {setpointHeatMinimum} and {setpointHeatMaximum} (inclusive) and a multiple of 0.5.'
return False, valuesDict, errorDict
valid = False
try:
boostMinutes = int(valuesDict.get('boostMinutes', 20))
valid = True
except ValueError:
boostMinutes = 20 # To suppress PyCharm warning
if not valid or boostMinutes < 5 or boostMinutes > 120:
errorDict = indigo.Dict()
errorDict['boostMinutes'] = 'Boost Minutes must be an integer and set between 5 and 120 (inclusive) e.g 20'
errorDict['showAlertText'] = 'You must enter a valid number of minutes to boost the temperature by. It must be a numeric value and set between 5 and 120 (inclusive).'
return False, valuesDict, errorDict
elif typeId == "processExtend":
# Validate extend increment minutes
valid = False
try:
extendIncrementMinutes = int(valuesDict.get('extendIncrementMinutes', 15))
valid = True
except ValueError:
extendIncrementMinutes = 15 # To suppress PyCharm warning
if not valid or extendIncrementMinutes < 15 or extendIncrementMinutes > 60:
errorDict = indigo.Dict()
errorDict["extendIncrementMinutes"] = "The Extend Increment Minutes must be an integer and set between 15 and 60 (inclusive)"
errorDict[
'showAlertText'] = "You must enter a valid Extend Increment Minutes (length of time to increase extend by) for the TRV. It must be an integer and set between 15 and 60 (inclusive)."
return False, valuesDict, errorDict
# Validate extend maximum minutes
valid = False
try:
extendMaximumMinutes = int(valuesDict.get('extendMaximumMinutes', 15))
valid = True
except ValueError:
extendMaximumMinutes = 15 # To suppress PyCharm warning
if not valid or extendMaximumMinutes < 15 or extendMaximumMinutes > 1080:
errorDict = indigo.Dict()
errorDict["extendMaximumMinutes"] = "The Extend Maximum Minutes must be an integer and set between 15 and 1080 (18 hours!) (inclusive)"
errorDict[
'showAlertText'] = "You must enter a valid Extend Maximum Minutes (maximum length of time to extend by) for the TRV. It must be an integer and set between 15 and 1080 (18 hours!) (inclusive)."
return False, valuesDict, errorDict
elif typeId == "processUpdateAllCsvFilesViaPostgreSQL":
# Validate Override Default Retention Hours
valid = False
overrideDefaultRetentionHours = ''
try:
overrideDefaultRetentionHours = valuesDict.get('overrideDefaultRetentionHours', '')
if overrideDefaultRetentionHours == '':
overrideDefaultRetentionHours = 1024 # A random large number for validity check
valid = True
else:
overrideDefaultRetentionHours = int(valuesDict.get('overrideDefaultRetentionHours', ''))
valid = True
except ValueError:
pass
if not valid or overrideDefaultRetentionHours < 1:
errorDict = indigo.Dict()
errorDict["overrideDefaultRetentionHours"] = "The Override Default Retention Hours must be blank or an integer greater than 0"
errorDict[
'showAlertText'] = "You must leave the Override Default Retention Hours blank or enter a valid Retention Hours to retain the CSV data. If set it must be an integer and greater than zero."
return False, valuesDict, errorDict
return True, valuesDict
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def validateDeviceConfigUi(self, valuesDict, typeId, devId): # Validate TRV Thermostat Controller
try:
# Validate TRV Device
trvDevId = 0
valid = False
try:
trvDevId = int(valuesDict.get('trvDevId', 0))
if trvDevId != 0 and valuesDict['supportedModel'] != 'Unknown TRV Model':
valid = True
except Exception:
pass
if not valid:
try:
model = f'a \'{indigo.devices[trvDevId].model}\' is not a TRV known by the plugin.'
except KeyError:
model = 'no device selected!'
errorDict = indigo.Dict()
errorDict['trvDevId'] = 'Select a known TRV device'
errorDict['showAlertText'] = f'You must select a TRV device to monitor which is known by the plugin; {model}'
return False, valuesDict, errorDict
self.trvThermostatDeviceSelected(valuesDict, typeId, devId)
overrideSetpointHeatMaximum = bool(valuesDict.get('overrideSetpointHeatMaximum', False))
if overrideSetpointHeatMaximum:
overrideSetpointHeatMaximumValue = int(valuesDict.get('overrideSetpointHeatMaximumValue', 0))
valuesDictTrvDeviceSetpointHeatMaximum = float(valuesDict['trvDeviceSetpointHeatMaximum'])
if 21.0 < overrideSetpointHeatMaximumValue < valuesDictTrvDeviceSetpointHeatMaximum:
valuesDict['setpointHeatMaximum'] = overrideSetpointHeatMaximumValue
else:
errorDict = indigo.Dict()
errorDict['overrideSetpointHeatMaximumValue'] = 'Override Setpoint Maximum Value is invalid'
errorDict['showAlertText'] = 'Override Setpoint Maximum Value must be > 21 and less than TRV Maximum Settable Temperature [FULLY ON] value.'
return False, valuesDict, errorDict
# # Validate TRV Delta Maximum
# trvDeltaMax = float(valuesDict.get('trvDeltaMax', 0.0))
# if trvDeltaMax < 0.0 or trvDeltaMax > 10.0 or trvDeltaMax % 0.5 != 0:
# errorDict = indigo.Dict()
# errorDict['trvDeltaMax'] = 'TRV Delta Max must be set between 0.0 and 10.0 (inclusive)'
# errorDict['showAlertText'] = 'You must enter a valid maximum number of degrees to exceed the TRV Heat Setpoint by. It must be set between 0.0 and 10.0 (inclusive) and a multiple of 0.5.'
# return False, valuesDict, errorDict
# Validate Device Heat Source Controller
valid = False
try:
heatingId = int(valuesDict.get('heatingId', -1))
if heatingId != -1:
if heatingId == 0:
valid = True
else:
if self.globals['config']['disableHeatSourceDeviceListFilter']:
valid = True
else:
model = indigo.devices[heatingId].model
if model in self.globals['supportedHeatSourceControllers']:
valid = True
# else:
# heatingId = 0
except Exception:
pass
if not valid:
errorDict = indigo.Dict()
errorDict['heatingId'] = 'Select a Heat Source Controller device or Not Required'
errorDict['showAlertText'] = 'You must select a Heat Source Controller to switch on heat for the TRV or specify Not Required.'
return False, valuesDict, errorDict
# Validate Variable Heat Source Controller
valid = False
try:
heatingVarId = int(valuesDict.get('heatingVarId', -1))
if heatingVarId != -1:
valid = True
except Exception:
pass
if not valid:
errorDict = indigo.Dict()
errorDict['heatingVarId'] = 'Select a Heat Source Controller variable or Not Required'
errorDict['showAlertText'] = 'You must select a Heat Source Controller to switch on heat for the TRV or specify Not Required.'
return False, valuesDict, errorDict
# Check whether to validate Remote Thermostat
remoteDevId = 0
valid = False
remoteThermostatControlEnabled = bool(valuesDict.get('remoteThermostatControlEnabled', False))
if remoteThermostatControlEnabled:
remoteDevId = int(valuesDict.get('remoteDevId', 0))
if remoteDevId != 0 and indigo.devices[remoteDevId].deviceTypeId != u'trvController':
remoteDev = indigo.devices[remoteDevId]
# Deprecated 'subModel' code follows ...
# if (remoteDev.subModel == u'Temperature'
# or remoteDev.subModel == u'Temperature 1'
# or remoteDev.subModel == u'Thermostat'
# or remoteDev.subModel[0:7].lower() == u'sensor '
# if (remoteDev.subType == u'Temperature'
# or remoteDev.subType == u'Thermostat'
# or remoteDev.subType == u'Sensor'
# or u'temperatureInput1' in remoteDev.states
# or u'temperature' in remoteDev.states
# or u'Temperature' in remoteDev.states):
# valid = True
if type(remoteDev) == indigo.ThermostatDevice or type(remoteDev) == indigo.SensorDevice:
num_temperature_inputs = int(remoteDev.ownerProps.get("NumTemperatureInputs", "0"))
if num_temperature_inputs > 0 or remoteDev.subType == indigo.kSensorDeviceSubType.Temperature:
valid = True
else:
remoteDevId = 0
if not valid:
try:
model = f'a \'{indigo.devices[remoteDevId].model}\' is not a Remote Thermostat understood by this plugin.'
except KeyError:
model = u'no device selected!'
errorDict = indigo.Dict()
errorDict[u'remoteDevId'] = u'Select a Remote Thermostat device'
errorDict[u'showAlertText'] = f'You must select a Remote thermostat to control the TRV; {model}'
return False, valuesDict, errorDict
if remoteDevId != 0:
# Validate Remote Delta Maximum
valid = False
try:
remoteDeltaMax = float(valuesDict.get('remoteDeltaMax', 5.0))
valid = True
except ValueError:
remoteDeltaMax = 5.0 # Set to avoid pyCharm warning message on next line
if not valid or remoteDeltaMax < 0.0 or remoteDeltaMax > 10.0 or remoteDeltaMax % 0.5 != 0:
errorDict = indigo.Dict()
errorDict['remoteDeltaMax'] = 'Remote Delta Max must be set between 0.0 and 10.0 (inclusive)'
errorDict['showAlertText'] = 'You must enter a valid maximum number of degrees to exceed the TRV Heat Setpoint for the remote thermostat. It must be set between 0.0 and 10.0 (inclusive) and a multiple of 0.5.'
return False, valuesDict, errorDict
# Validate Remote Temperature Offset
valid = False
try:
remoteTempOffset = float(valuesDict.get('remoteTempOffset', 0.0))
valid = True
except ValueError:
remoteTempOffset = 0.0 # Set to avoid pyCharm warning message on next line
if not valid or remoteTempOffset < -5.0 or remoteDeltaMax > 5.0:
errorDict = indigo.Dict()
errorDict['remoteTempOffset'] = 'Remote Temperature Offset must be set between -5.0 and 5.0 (inclusive)'
errorDict['showAlertText'] = 'You must enter a valid Remote Temperature Offset. It must be set between -5.0 and 5.0 (inclusive).'
return False, valuesDict, errorDict
# Validate CSV Fields
csvCreationMethod = int(valuesDict.get('csvCreationMethod', 0))
if csvCreationMethod == '1' or csvCreationMethod == '2':
csvShortName = valuesDict.get('csvShortName', '')
if len(csvShortName) < 1 or len(csvShortName) > 10:
errorDict = indigo.Dict()
errorDict['csvShortName'] = 'Short Name must be present and have a length between 1 and 10 (inclusive).'
errorDict['showAlertText'] = 'Short Name must be present and have a length between 1 and 10 (inclusive).'
return False, valuesDict, errorDict
valid = False
try:
csvRetentionPeriodHours = int(valuesDict.get('csvRetentionPeriodHours', 24))
if csvRetentionPeriodHours > 0:
valid = True
except ValueError:
pass
if not valid:
errorDict = indigo.Dict()
errorDict['csvRetentionPeriodHours'] = 'Retention Period (Hours) must be a positive integer.'
errorDict['showAlertText'] = 'Retention Period (Hours) must be a positive integer.'
return False, valuesDict, errorDict
# Validate Polling Fields
supportsWakeup = valuesDict.get('supportsWakeup', 'true')
if supportsWakeup == 'false':
valid = False
try:
pollingScheduleActive = int(valuesDict.get('pollingScheduleActive', 5))
if pollingScheduleActive >= 0:
valid = True
except ValueError:
pass
if not valid:
errorDict = indigo.Dict()
errorDict['pollingScheduleActive'] = 'Polling Minutes [Schedule Active] must be a positive integer or zero to disable.'
errorDict['showAlertText'] = 'Polling Minutes [Schedule Active] must be a positive integer or zero to disable.'
return False, valuesDict, errorDict
valid = False
try:
pollingScheduleInactive = int(valuesDict.get('pollingScheduleInactive', 5))
if pollingScheduleInactive >= 0:
valid = True
except ValueError:
pass
if not valid:
errorDict = indigo.Dict()
errorDict['pollingScheduleInactive'] = 'Polling Minutes [Schedule Inactive] must be a positive integer or zero to disable.'
errorDict['showAlertText'] = 'Polling Minutes [Schedule Inactive] must be a positive integer or zero to disable.'
return False, valuesDict, errorDict
valid = False
try:
pollingSchedulesNotEnabled = int(valuesDict.get('pollingSchedulesNotEnabled', 5))
if pollingSchedulesNotEnabled >= 0:
valid = True
except ValueError:
pass
if not valid:
errorDict = indigo.Dict()
errorDict['pollingSchedulesNotEnabled'] = 'Polling Minutes [Schedules Not Enabled] must be a positive integer or zero to disable.'
errorDict['showAlertText'] = 'Polling Minutes [Schedules Not Enabled] must be a positive integer or zero to disable.'
return False, valuesDict, errorDict
valid = False
try:
pollingBoostEnabled = int(valuesDict.get('pollingBoostEnabled', 5))
if pollingBoostEnabled >= 0:
valid = True
except ValueError:
pass
if not valid:
errorDict = indigo.Dict()
errorDict['pollingBoostEnabled'] = 'Polling Minutes [Boost Enabled] must be a positive integer or zero to disable.'
errorDict['showAlertText'] = 'Polling Minutes [Boost Enabled] must be a positive integer or zero to disable.'
return False, valuesDict, errorDict
# Validate Device Start Method fields
setpointHeatDeviceStartMethod = int(valuesDict.get('setpointHeatDeviceStartMethod', DEVICE_START_SETPOINT_DEVICE_MINIMUM))
if setpointHeatDeviceStartMethod == DEVICE_START_SETPOINT_SPECIFIED:
valid = False
try:
setpointHeatDeviceStartDefault = float(valuesDict.get('setpointHeatDeviceStartDefault', 8.0))
if (8 <= setpointHeatDeviceStartDefault <= 30) and setpointHeatDeviceStartDefault % 0.5 == 0.0:
valid = True
except Exception:
pass
if not valid:
errorDict = indigo.Dict()
errorDict['setpointHeatDeviceStartDefault'] = 'Temperature must be set between 8 and 30 (inclusive)'
errorDict['showAlertText'] = 'You must enter a valid \'Device Start\' temperature for the TRV. It must be set between 8 and 30 (inclusive) and a multiple of 0.5.'
return False, valuesDict, errorDict
# Validate default ON temperature
valid = False
try:
setpointHeatOnDefault = float(valuesDict.get('setpointHeatOnDefault', 0))
if (10.0 <= setpointHeatOnDefault <= 30.0) and setpointHeatOnDefault % 0.5 == 0.0:
valid = True
except Exception:
pass
if not valid:
errorDict = indigo.Dict()
errorDict['setpointHeatOnDefault'] = 'Temperature must be set between 10 and 30 (inclusive)'
errorDict['showAlertText'] = 'You must enter a valid Turn On temperature for the TRV. It must be set between 10 and 30 (inclusive) and a multiple of 0.5.'
return False, valuesDict, errorDict
# Suppress PyCharm warnings
# schedule1TimeOn = None
schedule1TimeOff = None
# schedule1SetpointHeat = None
schedule2TimeOn = None
schedule2TimeOff = None
# schedule2SetpointHeat = None
schedule3TimeOn = None
schedule3TimeOff = None
# schedule3SetpointHeat = None
schedule4TimeOn = None
# schedule4TimeOff = None
# schedule4SetpointHeat = None
# Validate Schedule 1
schedule1Enabled = bool(valuesDict.get('schedule1Enabled', False))
if schedule1Enabled:
scheduleValid, scheduleData = self.validateSchedule(devId, valuesDict, '1')
if not scheduleValid:
return False, valuesDict, scheduleData # i.e. False, valuesDict, errorDict
# schedule1TimeOn = scheduleData[0]
schedule1TimeOff = scheduleData[1]
# schedule1SetpointHeat = scheduleData[2]
# Validate Schedule 2
schedule2Enabled = bool(valuesDict.get('schedule2Enabled', False))
if schedule2Enabled:
scheduleValid, scheduleData = self.validateSchedule(devId, valuesDict, '2')
if not scheduleValid:
return False, valuesDict, scheduleData # i.e. False, valuesDict, errorDict
schedule2TimeOn = scheduleData[0]
schedule2TimeOff = scheduleData[1]
# schedule2SetpointHeat = scheduleData[2]
# Validate Schedule 3
schedule3Enabled = bool(valuesDict.get('schedule3Enabled', False))
if schedule3Enabled:
scheduleValid, scheduleData = self.validateSchedule(devId, valuesDict, '3')
if not scheduleValid:
return False, valuesDict, scheduleData # i.e. False, valuesDict, errorDict
schedule3TimeOn = scheduleData[0]
schedule3TimeOff = scheduleData[1]
# schedule3SetpointHeat = scheduleData[2]
# Validate Schedule 3
schedule4Enabled = bool(valuesDict.get('schedule4Enabled', False))
if schedule4Enabled:
scheduleValid, scheduleData = self.validateSchedule(devId, valuesDict, '4')
if not scheduleValid:
return False, valuesDict, scheduleData # i.e. False, valuesDict, errorDict
schedule4TimeOn = scheduleData[0]
# schedule4TimeOff = scheduleData[1]
# schedule4SetpointHeat = scheduleData[2]
# Consistency check across schedules
if schedule1Enabled:
if schedule2Enabled:
if schedule1TimeOff < schedule2TimeOn:
secondsDelta = secondsFromHHMM(schedule2TimeOn) - secondsFromHHMM(schedule1TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule1TimeOff'] = 'The Schedule One heating OFF time must end before the Schedule Two heating ON time'
errorDict['schedule2TimeOn'] = 'The Schedule Two heating On time must start after the Schedule One heating Off time'
errorDict['showAlertText'] = f'The Schedule One OFF time [{schedule1TimeOff}] must be before the Schedule Two ON time [{schedule2TimeOn}] and there must be at least 10 minutes between the Schedule One OFF time and Schedule Two ON time.'
return False, valuesDict, errorDict
if schedule3Enabled:
if schedule1TimeOff < schedule3TimeOn:
secondsDelta = secondsFromHHMM(schedule3TimeOn) - secondsFromHHMM(schedule1TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule1TimeOff'] = 'The Schedule One heating OFF time must end before the Schedule Three heating ON time'
errorDict['schedule3TimeOn'] = 'The Schedule Three heating On time must start after the Schedule One heating Off time'
errorDict['showAlertText'] = f'The Schedule One OFF time [{schedule1TimeOff}] must be before the Schedule Three ON time [{schedule3TimeOn}] and there must be at least 10 minutes between the Schedule One OFF time and Schedule Three ON time.'
return False, valuesDict, errorDict
if schedule4Enabled:
if schedule1TimeOff < schedule4TimeOn:
secondsDelta = secondsFromHHMM(schedule4TimeOn) - secondsFromHHMM(schedule1TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule1TimeOff'] = 'The Schedule One heating OFF time must end before the Schedule Four heating ON time'
errorDict['schedule4TimeOn'] = 'The Schedule Four heating On time must start after the Schedule One heating Off time'
errorDict['showAlertText'] = f'The Schedule One OFF time [{schedule1TimeOff}] must be before the Schedule Four ON time [{schedule3TimeOn}] and there must be at least 10 minutes between the Schedule One OFF time and Schedule Four ON time.'
return False, valuesDict, errorDict
if schedule2Enabled:
if schedule3Enabled:
if schedule2TimeOff < schedule3TimeOn:
secondsDelta = secondsFromHHMM(schedule3TimeOn) - secondsFromHHMM(schedule2TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule2TimeOff'] = 'The Schedule Two heating OFF time must end before the Schedule Three heating ON time'
errorDict['schedule3TimeOn'] = 'The Schedule Three heating On time must start after the Schedule Two heating Off time'
errorDict['showAlertText'] = f'The Schedule Two OFF time [{schedule2TimeOff}] must be before the Schedule Three ON time [{schedule3TimeOn}] and there must be at least 10 minutes between the Schedule Two OFF time and Schedule Three ON time.'
return False, valuesDict, errorDict
if schedule4Enabled:
if schedule2TimeOff < schedule4TimeOn:
secondsDelta = secondsFromHHMM(schedule4TimeOn) - secondsFromHHMM(schedule2TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule2TimeOff'] = 'The Schedule Two heating OFF time must end before the Schedule Four heating ON time'
errorDict['schedule4TimeOn'] = 'The Schedule Four heating On time must start after the Schedule Two heating Off time'
errorDict['showAlertText'] = f'The Schedule Two OFF time [{schedule2TimeOff}] must be before the Schedule Four ON time [{schedule4TimeOn}] and there must be at least 10 minutes between the Schedule Two OFF time and Schedule Four ON time.'
return False, valuesDict, errorDict
if schedule3Enabled:
if schedule4Enabled:
if schedule3TimeOff < schedule4TimeOn:
secondsDelta = secondsFromHHMM(schedule4TimeOn) - secondsFromHHMM(schedule3TimeOff)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict['schedule3TimeOff'] = 'The Schedule Three heating OFF time must end before the Schedule Four heating ON time'
errorDict['schedule4TimeOn'] = 'The Schedule Four heating On time must start after the Schedule Three heating Off time'
errorDict['showAlertText'] = f'The Schedule Three OFF time [{schedule3TimeOff}] must be before the Schedule Four ON time [{schedule4TimeOn}] and there must be at least 10 minutes between the Schedule Three OFF time and Schedule Four ON time.'
return False, valuesDict, errorDict
return True, valuesDict
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def validatePrefsConfigUi(self, values_dict): # noqa - Method is not declared static
return True, values_dict
# noinspection PyUnusedLocal
def zwaveCommandQueued(self, zwaveCommand): # Not yet available in Indigo API :)
self.logger.error('QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED==QUEUED')
def zwaveCommandReceived(self, zwave_command):
try:
zwave_report_prefix = f"{u'':-{u'^'}22}> Z-WAVE " # 22 dashes as first part of prefix
now_time = indigo.server.getTime()
now_time_string = now_time.strftime('%Y-%m-%d %H:%M:%S')
nodeId = zwave_command['nodeId'] # Can be None!
zwave_report_additional_detail = u""
if nodeId and nodeId in self.globals['zwave']['WatchList']:
# Interpret Z-Wave Command
zw_interpretation = self.globals[ZWI][ZWI_INSTANCE].interpret_zwave(True, zwave_command) # True is to indicate Z-Wave Message received
if zw_interpretation is not None and zw_interpretation[ZW_INTERPRETATION_ATTEMPTED]:
# self.zwave_log(zw_interpretation[ZW_INDIGO_DEVICE], zw_interpretation[ZW_INTERPRETATION_OVERVIEW_UI], zw_interpretation[ZW_INTERPRETATION_DETAIL_UI])
address = zw_interpretation[ZW_NODE_ID]
if address in self.globals['zwave']['addressToDevice']:
dev = indigo.devices[self.globals['zwave']['addressToDevice'][address]['devId']] # TRV or Remote
devId = dev.id
devType = self.globals['zwave']['addressToDevice'][address]['type']
trvcDev = indigo.devices[self.globals['zwave']['addressToDevice'][address]['trvcId']] # TRV Controller
trvCtlrDevId = trvcDev.id
if devType == TRV:
self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeTrv'] = now_time_string
if 'zwaveReceivedCountTrv' in self.globals['trvc'][trvCtlrDevId]:
self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountTrv'] += 1
else:
self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountTrv'] = 1
self.globals['trvc'][trvCtlrDevId]['zwaveLastReceivedCommandTrv'] = zw_interpretation[ZW_COMMAND_CLASS]
if self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalTrv'] > 0:
if self.globals['trvc'][trvCtlrDevId]['zwaveWakeupDelayTrv']:
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupDelayTrv'] = False
self.logger.info(
f'Z-Wave connection re-established with {"TRV device"} \'{indigo.devices[devId].name}\', controlled by \'{indigo.devices[trvCtlrDevId].name}\'. This device had previously missed a wakeup.')
trvcDev.updateStateImageOnServer(indigo.kStateImageSel.HvacHeatMode)
nextWakeupMissedSeconds = (self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalTrv'] + 2) * 60 # Add 2 minutes to next expected wakeup
if devId in self.globals['timers']['zwaveWakeupCheck']:
self.globals['timers']['zwaveWakeupCheck'][devId].cancel()
self.globals['timers']['zwaveWakeupCheck'][devId] = threading.Timer(float(nextWakeupMissedSeconds), self.zwaveWakeupMissedTriggered, [trvCtlrDevId, devType, devId])
self.globals['timers']['zwaveWakeupCheck'][devId].daemon = True
self.globals['timers']['zwaveWakeupCheck'][devId].start()
# zwaveReport = zwaveReport + f"\nZZ TRV Z-WAVE > Next wakeup missed alert in {nextWakeupMissedSeconds} seconds"
else: # Must be Remote
self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeRemote'] = now_time_string
if 'zwaveReceivedCountRemote' in self.globals['trvc'][trvCtlrDevId]:
self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountRemote'] += 1
else:
self.globals['trvc'][trvCtlrDevId]['zwaveReceivedCountRemote'] = 1
self.globals['trvc'][trvCtlrDevId]['zwaveLastReceivedCommandRemote'] = zw_interpretation[ZW_COMMAND_CLASS]
if self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalRemote'] > 0:
if self.globals['trvc'][trvCtlrDevId]['zwaveWakeupDelayRemote']:
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupDelayRemote'] = False
self.logger.info(
f'Z-Wave connection re-established with {u"Remote Thermostat device"} \'{indigo.devices[devId].name}\', controlled by \'{indigo.devices[trvCtlrDevId].name}\'. This device had previously missed a wakeup.')
trvcDev.updateStateImageOnServer(indigo.kStateImageSel.HvacHeatMode)
nextWakeupMissedSeconds = (self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalRemote'] + 2) * 60 # Add 2 minutes to next expected wakeup
if devId in self.globals['timers']['zwaveWakeupCheck']:
self.globals['timers']['zwaveWakeupCheck'][devId].cancel()
self.globals['timers']['zwaveWakeupCheck'][devId] = threading.Timer(float(nextWakeupMissedSeconds), self.zwaveWakeupMissedTriggered, [trvCtlrDevId, devType, devId])
self.globals['timers']['zwaveWakeupCheck'][devId].daemon = True
self.globals['timers']['zwaveWakeupCheck'][devId].start()
# zwaveReport = zwaveReport + f"\nZZ TRV Z-WAVE > Next wakeup missed alert in {nextWakeupMissedSeconds} seconds"
if zw_interpretation[ZW_COMMAND_CLASS] == ZW_THERMOSTAT_SETPOINT:
if devType == TRV and self.globals['trvc'][trvCtlrDevId]['trvSupportsManualSetpoint']:
self.globals['trvc'][trvCtlrDevId]['controllerMode'] = CONTROLLER_MODE_TRV_HARDWARE
zwave_report_additional_detail = f', Pending Controller Mode = {CONTROLLER_MODE_TRANSLATION[CONTROLLER_MODE_TRV_HARDWARE]}'
elif devType == REMOTE and self.globals['trvc'][trvCtlrDevId]['remoteSetpointHeatControl']:
self.globals['trvc'][trvCtlrDevId]['controllerMode'] = CONTROLLER_MODE_REMOTE_HARDWARE
zwave_report_additional_detail = f', Pending Controller Mode = {CONTROLLER_MODE_TRANSLATION[CONTROLLER_MODE_REMOTE_HARDWARE]}'
elif zw_interpretation[ZW_COMMAND_CLASS] == ZW_SWITCH_MULTILEVEL:
zwaveCommandValve = zw_interpretation[ZW_VALUE]
if zwaveCommandValve == 0:
zwave_report_additional_detail = u', Valve = Closed'
else:
zwave_report_additional_detail = f', Valve = Open {zwaveCommandValve}%'
elif zw_interpretation[ZW_COMMAND_CLASS] == ZW_THERMOSTAT_MODE:
pass
# zwave_report_additional_detail = f', Mode = {zw_interpretation[ZW_MODE_UI]}'
# if devType == TRV:
# self.globals['trvc'][trvCtlrDevId]['controllerMode'] = CONTROLLER_MODE_TRV_HARDWARE
# else: # Must be Remote as can't be a valve
# self.globals['trvc'][trvCtlrDevId]['controllerMode'] = CONTROLLER_MODE_REMOTE_HARDWARE
elif zw_interpretation[ZW_COMMAND_CLASS] == ZW_SENSOR_MULTILEVEL:
pass
# zwave_report_additional_detail = f', Temperature = {zw_interpretation[ZW_VALUE_UI]}{zw_interpretation[ZW_SCALE_UI_COMPACT]}'
if zw_interpretation[ZW_COMMAND_CLASS] == ZW_WAKE_UP:
if zw_interpretation[ZW_COMMAND] == ZW_WAKE_UP_NOTIFICATION:
if devType == TRV or devType == VALVE:
# As just a wakeup received - update TRV Controller device to ensure last TRV wakeup time recorded
trvcDev.updateStateOnServer(key='zwaveEventReceivedDateTimeTrv', value=self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeTrv'])
elif devType == REMOTE:
# As just a wakeup received - update TRV Controller device to ensure last Remote wakeup time recorded
trvcDev.updateStateOnServer(key='zwaveEventReceivedDateTimeRemote', value=self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeRemote'])
if self.globals['trvc'][trvCtlrDevId]['zwaveEventWakeUpSentDisplayFix'] != "":
self.logger.debug(self.globals['trvc'][trvCtlrDevId]['zwaveEventWakeUpSentDisplayFix'])
self.globals['trvc'][trvCtlrDevId]['zwaveEventWakeUpSentDisplayFix'] = u""
zwave_report = f"\n\n{zwave_report_prefix}{zw_interpretation[ZW_INTERPRETATION_OVERVIEW_UI]}"
zwave_report = f"{zwave_report}\n{zwave_report_prefix}{zw_interpretation[ZW_INTERPRETATION_DETAIL_UI]}{zwave_report_additional_detail}\n".encode('utf-8')
self.logger.debug(zwave_report)
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def zwaveCommandSent(self, zwave_command):
try:
zwave_report_prefix = f"{u'':-{u'^'}22}> Z-WAVE " # 22 dashes as first part of prefix
now_time = indigo.server.getTime()
now_time_string = now_time.strftime('%Y-%m-%d %H:%M:%S')
nodeId = zwave_command['nodeId'] # Can be None!
zwave_report_additional_detail = u""
trvCtlrDevId = 0
zwave_event_wake_up_sent_display_fix = False
if nodeId and nodeId in self.globals['zwave']['WatchList']:
# Interpret Z-Wave Command
zw_interpretation = self.globals[ZWI][ZWI_INSTANCE].interpret_zwave(False, zwave_command) # True is to indicate Z-Wave Message sent
if zw_interpretation is not None and zw_interpretation[ZW_INTERPRETATION_ATTEMPTED]:
# self.zwave_log(zw_interpretation[ZW_INDIGO_DEVICE], zw_interpretation[ZW_INTERPRETATION_OVERVIEW_UI], zw_interpretation[ZW_INTERPRETATION_DETAIL_UI])
address = zw_interpretation[ZW_NODE_ID]
if address in self.globals['zwave']['addressToDevice']:
# dev = indigo.devices[self.globals['zwave']['addressToDevice'][address]['devId']] # TODO: IS THIS CORRECT / NEEDED ?
# devId = dev.id # TODO: IS THIS CORRECT / NEEDED ?
devType = self.globals['zwave']['addressToDevice'][address]['type']
trvcDev = indigo.devices[self.globals['zwave']['addressToDevice'][address]['trvcId']] # TRV Controller
trvCtlrDevId = trvcDev.id
if devType == TRV or devType == VALVE:
self.globals['trvc'][trvCtlrDevId]['zwaveEventSentDateTimeTrv'] = now_time_string
if 'zwaveSentCountTrv' in self.globals['trvc'][trvCtlrDevId]:
self.globals['trvc'][trvCtlrDevId]['zwaveSentCountTrv'] += 1
else:
self.globals['trvc'][trvCtlrDevId]['zwaveSentCountTrv'] = 1
self.globals['trvc'][trvCtlrDevId]['zwaveLastSentCommandTrv'] = zw_interpretation[ZW_COMMAND_CLASS]
else: # Must be Remote
self.globals['trvc'][trvCtlrDevId]['zwaveEventSentDateTimeRemote'] = now_time_string
if 'zwaveSentCountRemote' in self.globals['trvc'][trvCtlrDevId]:
self.globals['trvc'][trvCtlrDevId]['zwaveSentCountRemote'] += 1
else:
self.globals['trvc'][trvCtlrDevId]['zwaveSentCountRemote'] = 1
self.globals['trvc'][trvCtlrDevId]['zwaveLastSentCommandRemote'] = zw_interpretation[ZW_COMMAND_CLASS]
if zw_interpretation[ZW_COMMAND_CLASS] == ZW_THERMOSTAT_SETPOINT and zw_interpretation[ZW_COMMAND] == ZW_THERMOSTAT_SETPOINT_SET:
zwaveCommandSetpoint = zw_interpretation[ZW_VALUE]
if devType == TRV:
zwave_report_additional_detail = (
f", Pending: {self.globals['trvc'][trvCtlrDevId]['zwavePendingTrvSetpointFlag']}, Sequence: '{self.globals['trvc'][trvCtlrDevId]['zwavePendingTrvSetpointSequence']}', Setpoint: '{self.globals['trvc'][trvCtlrDevId]['zwavePendingTrvSetpointValue']}'")
if self.globals['trvc'][trvCtlrDevId]['zwavePendingTrvSetpointValue'] != zwaveCommandSetpoint: # Assume internally generated Z-Wave setpoint command
# if self.globals['trvc'][trvCtlrDevId]['zwavePendingTrvSetpointFlag']: # if internally generated Z-Wave setpoint command reset flag
# self.globals['trvc'][trvCtlrDevId]['zwavePendingTrvSetpointFlag'] = False # Turn off
# else:
# As not internally generated Z-Wave setpoint command, must be from UI
self.globals['trvc'][trvCtlrDevId]['controllerMode'] = CONTROLLER_MODE_TRV_UI
else: # Must be Remote as can't be a valve
zwave_report_additional_detail = (
f", Pending: {self.globals['trvc'][trvCtlrDevId]['zwavePendingRemoteSetpointFlag']}, Sequence: '{self.globals['trvc'][trvCtlrDevId]['zwavePendingRemoteSetpointSequence']}', Setpoint: '{self.globals['trvc'][trvCtlrDevId]['zwavePendingRemoteSetpointValue']}'")
if self.globals['trvc'][trvCtlrDevId]['zwavePendingRemoteSetpointFlag']: # if internally generated Z-Wave setpoint command reset flag
self.globals['trvc'][trvCtlrDevId]['zwavePendingRemoteSetpointFlag'] = False # Turn off
else:
# As not internally generated Z-Wave setpoint command, must be from UI
self.globals['trvc'][trvCtlrDevId]['controllerMode'] = CONTROLLER_MODE_REMOTE_UI
elif zw_interpretation[ZW_COMMAND_CLASS] == ZW_SWITCH_MULTILEVEL:
if zw_interpretation[ZW_COMMAND] == ZW_SWITCH_MULTILEVEL_REPORT:
pass
elif zw_interpretation[ZW_COMMAND_CLASS] == ZW_SWITCH_MULTILEVEL_SET:
zwaveCommandValve = zw_interpretation[ZW_VALUE]
if zwaveCommandValve == 0:
zwave_report_additional_detail = u", Closed"
else:
zwave_report_additional_detail = f", Open {zw_interpretation[ZW_VALUE]}%"
elif zw_interpretation[ZW_COMMAND_CLASS] == ZW_THERMOSTAT_MODE and zw_interpretation[ZW_COMMAND] == ZW_THERMOSTAT_MODE_SET:
zwave_report_additional_detail = f", Mode = {zw_interpretation[ZW_MODE_UI]}" # ERROR WAS HERE!!!
if self.globals['trvc'][trvCtlrDevId]['zwavePendingHvac']: # if internally generated Z-Wave hvac command reset flag
self.globals['trvc'][trvCtlrDevId]['zwavePendingHvac'] = False # Turn off
else:
pass
# As not internally generated Z-Wave hvac command, must be from UI
# if devType == TRV:
# self.globals['trvc'][trvCtlrDevId]['controllerMode'] = CONTROLLER_MODE_TRV_UI
# else: # Must be Remote as can't be a valve
# self.globals['trvc'][trvCtlrDevId]['controllerMode'] = CONTROLLER_MODE_REMOTE_UI
elif zw_interpretation[ZW_COMMAND_CLASS] == ZWAVE_COMMAND_CLASS_WAKEUP:
zwave_event_wake_up_sent_display_fix = True
zwave_report = f"\n\n{zwave_report_prefix}{zw_interpretation[ZW_INTERPRETATION_OVERVIEW_UI]}"
zwave_report = f"{zwave_report}\n{zwave_report_prefix}{zw_interpretation[ZW_INTERPRETATION_DETAIL_UI]}{zwave_report_additional_detail}\n"
if trvCtlrDevId != 0 and not zwave_event_wake_up_sent_display_fix: # Not a Wakeup command - so output Z-Wave report
self.logger.debug(zwave_report)
else:
self.globals['trvc'][trvCtlrDevId]['zwaveEventWakeUpSentDisplayFix'] = zwave_report
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
#################################
#
# Start of bespoke plugin methods
#
#################################
def _showSchedule(self, trvCtlrDevId, scheduleReportLineLength):
scheduleReport = ''
trvcDev = indigo.devices[trvCtlrDevId]
if trvcDev.enabled and trvcDev.configured:
trvCtlrDevId = trvcDev.id
scheduleReport = scheduleReport + self.boxLine(f'Device: \'{trvcDev.name}\'', scheduleReportLineLength, u'==')
# scheduleList = collections.OrderedDict(sorted(self.globals['schedules'][trvCtlrDevId]['dynamic'].items()))
ScheduleGroupList = [(collections.OrderedDict(sorted(self.globals['schedules'][trvCtlrDevId]['default'].items())), 'Default'),
(collections.OrderedDict(sorted(self.globals['schedules'][trvCtlrDevId]['running'].items())), 'Running'),
(collections.OrderedDict(sorted(self.globals['schedules'][trvCtlrDevId]['dynamic'].items())), 'Dynamic')]
storedScheduleDefault = {}
storedScheduleRunning = {}
for scheduleList, scheduleType in ScheduleGroupList:
if (scheduleType == 'Default' or scheduleType == 'Dynamic') and len(scheduleList) == 2:
continue
elif scheduleType == 'Running' and len(scheduleList) == 2:
scheduleReport = scheduleReport + self.boxLine(' No schedules defined or enabled for device.', scheduleReportLineLength, u'==')
continue
else:
scheduleReport = scheduleReport + self.boxLine(f' Schedule Type: \'{scheduleType}\'', scheduleReportLineLength, u'==')
previousScheduleId = 0
previousScheduleTimeUi = '' # To suppress PyCharm warning
previousScheduleSetpoint = 0.0 # To suppress PyCharm warning
for key, value in scheduleList.items():
# scheduleTime = int(key)
scheduleTimeUi = f'{value[0]}'
scheduleSetpoint = float(value[1])
scheduleId = value[2]
if scheduleId == 0: # Ignore start entry (00:00)
continue
if previousScheduleId == 0 or previousScheduleId != scheduleId:
previousScheduleId = scheduleId
previousScheduleTimeUi = scheduleTimeUi
previousScheduleSetpoint = scheduleSetpoint
else:
scheduleEnabledName = f'schedule{previousScheduleId}Enabled'
scheduleActiveName = f'schedule{previousScheduleId}Active'
# self.logger.info(f'scheduleActiveName = {scheduleActiveName}, {self.globals["trvc"][trvCtlrDevId][scheduleActiveName]}')
if self.globals['trvc'][trvCtlrDevId][scheduleEnabledName]:
combinedScheduleTimesUi = f'{previousScheduleTimeUi} - {scheduleTimeUi}'
scheduleUi = f'Schedule {scheduleId}: {combinedScheduleTimesUi}. Setpoint = {previousScheduleSetpoint}'
# schedule = self.globals['trvc'][trvCtlrDevId]['schedule1TimeOn'] + ' - ' + self.globals['trvc'][trvCtlrDevId]['schedule1TimeOff']
else:
scheduleUi = f'Schedule {scheduleId}: Disabled'
if scheduleType == 'Default':
storedScheduleDefault[scheduleId] = scheduleUi
elif scheduleType == 'Running':
storedScheduleRunning[scheduleId] = scheduleUi
if storedScheduleDefault[scheduleId] != storedScheduleRunning[scheduleId]:
scheduleUi = f'{scheduleUi} [*]'
elif scheduleType == 'Dynamic':
if storedScheduleRunning[scheduleId] != scheduleUi:
scheduleUi = f'{scheduleUi} [*]'
if trvcDev.states[scheduleActiveName]:
scheduleUi = f'{scheduleUi} ACTIVE'
scheduleReport = scheduleReport + self.boxLine(f' {scheduleUi}', scheduleReportLineLength, u'==')
return scheduleReport
def actionConfigApplyDefaultScheduleValues(self, valuesDict, typeId, actionId):
self.logger.debug(f'actionConfigApplyDefaultScheduleValues: typeId[{typeId}], actionId[{actionId}], ValuesDict:\n{valuesDict}\'')
devId = actionId # TRV Controller Device Id
valuesDict['schedule1Enabled'] = self.globals['trvc'][devId]['scheduleReset1Enabled']
valuesDict['schedule1TimeOn'] = self.globals['trvc'][devId]['scheduleReset1TimeOn']
valuesDict['schedule1TimeOff'] = self.globals['trvc'][devId]['scheduleReset1TimeOff']
valuesDict['schedule1SetpointHeat'] = self.globals['trvc'][devId]['scheduleReset1HeatSetpoint']
valuesDict['schedule2Enabled'] = self.globals['trvc'][devId]['scheduleReset2Enabled']
valuesDict['schedule2TimeOn'] = self.globals['trvc'][devId]['scheduleReset2TimeOn']
valuesDict['schedule2TimeOff'] = self.globals['trvc'][devId]['scheduleReset2TimeOff']
valuesDict['schedule2SetpointHeat'] = self.globals['trvc'][devId]['scheduleReset2HeatSetpoint']
valuesDict['schedule3Enabled'] = self.globals['trvc'][devId]['scheduleReset3Enabled']
valuesDict['schedule3TimeOn'] = self.globals['trvc'][devId]['scheduleReset3TimeOn']
valuesDict['schedule3TimeOff'] = self.globals['trvc'][devId]['scheduleReset3TimeOff']
valuesDict['schedule3SetpointHeat'] = self.globals['trvc'][devId]['scheduleReset3HeatSetpoint']
valuesDict['schedule4Enabled'] = self.globals['trvc'][devId]['scheduleReset4Enabled']
valuesDict['schedule4TimeOn'] = self.globals['trvc'][devId]['scheduleReset4TimeOn']
valuesDict['schedule4TimeOff'] = self.globals['trvc'][devId]['scheduleReset4TimeOff']
valuesDict['schedule4SetpointHeat'] = self.globals['trvc'][devId]['scheduleReset4HeatSetpoint']
return valuesDict
def boxLine(self, info, lineLength, boxCharacters): # noqa - Method is not declared static
fillLength = lineLength - len(info) - 1 - (2 * len(boxCharacters))
if fillLength < 0:
return boxCharacters + f'\n LINE LENGTH {lineLength} TOO SMALL FOR BOX CHARACTERS \'{boxCharacters}\' AND INFORMATION \'{info}\''
# lenBoxCharacters = len(boxCharacters)
updatedLine = f'\n{boxCharacters} {info}{(" " * fillLength)}{boxCharacters}'
return updatedLine
def deviceRaceConditionReEnableTriggered(self, trvCtlrDevId):
try:
if trvCtlrDevId in self.globals['timers']['raceCondition']:
self.globals['timers']['raceCondition'][trvCtlrDevId].cancel()
self.logger.error(f'Re-Enabling TRV Controller \'{indigo.devices[trvCtlrDevId].name}\' following potential race condition detection (which as a result the device was disabled).')
indigo.device.enable(trvCtlrDevId, value=True)
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
# noinspection PyUnusedLocal
def heatSourceControllerDevices(self, indigo_filter="", valuesDict=None, typeId="", targetId=0):
array = []
for dev in indigo.devices:
if self.globals['config']['disableHeatSourceDeviceListFilter']:
try:
if dev.deviceTypeId == 'zwThermostatType' or dev.deviceTypeId == 'zwRelayType' or dev.deviceTypeId == 'pseudoRelay':
if dev.model not in self.globals['supportedTrvModels']:
array.append((dev.id, dev.name))
except Exception:
pass
else:
if dev.model in self.globals['supportedHeatSourceControllers']:
array.append((dev.id, dev.name))
arraySorted = sorted(array, key=lambda dev_name: dev_name[1].lower()) # sort by device name
arraySorted.insert(0, (0, 'NO HEAT SOURCE DEVICE '))
arraySorted.insert(0, (-1, '-- Select Device Heat Source --'))
return arraySorted
# noinspection PyUnusedLocal
def heatSourceControllerVariables(self, indigo_filter="", valuesDict=None, typeId="", targetId=0):
array = []
for var in indigo.variables:
if self.globals['config']['trvVariableFolderId'] == 0:
array.append((var.id, var.name))
else:
if var.folderId == self.globals['config']['trvVariableFolderId']:
array.append((var.id, var.name))
arraySorted = sorted(array, key=lambda var_name: var_name[1].lower()) # sort by variable name
arraySorted.insert(0, (0, 'NO HEAT SOURCE VARIABLE'))
arraySorted.insert(0, (-1, '-- Select Variable Heat Source --'))
return arraySorted
def listActiveDebugging(self, monitorDebugTypes): # noqa - Method is not declared static
loop = 0
listedTypes = ''
for monitorDebugType in monitorDebugTypes:
if loop == 0:
listedTypes = listedTypes + monitorDebugType
else:
listedTypes = listedTypes + ', ' + monitorDebugType
loop += 1
return listedTypes
def processToggleTurnOnOff(self, pluginAction, dev):
if float(self.globals['trvc'][dev.id]['setpointHeat']) == float(self.globals['trvc'][dev.id]['setpointHeatMinimum']):
self.processTurnOn(pluginAction, dev)
else:
self.processTurnOff(pluginAction, dev)
# noinspection PyUnusedLocal
def processTurnOff(self, pluginAction, dev):
trvCtlrDevId = dev.id
newSetpoint = float(self.globals['trvc'][trvCtlrDevId]['setpointHeatMinimum'])
# keyValueList = [
# {'key': 'controllerMode', 'value': CONTROLLER_MODE_UI},
# {'key': 'controllerModeUi', 'value': CONTROLLER_MODE_TRANSLATION[CONTROLLER_MODE_UI]},
# {'key': 'setpointHeat', 'value': newSetpoint}
# ]
# dev.updateStatesOnServer(keyValueList)
queuedCommand = CMD_UPDATE_TRV_CONTROLLER_STATES
updateList = dict()
updateList[UPDATE_CONTROLLER_HEAT_SETPOINT] = newSetpoint
updateList[UPDATE_CONTROLLER_MODE] = CONTROLLER_MODE_UI
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, self.globals['deviceUpdatedSequenceCount'], queuedCommand, trvCtlrDevId, [updateList, ]])
# noinspection PyUnusedLocal
def processTurnOn(self, pluginAction, dev):
trvCtlrDevId = dev.id
newSetpoint = float(self.globals['trvc'][trvCtlrDevId]['setpointHeatDefault'])
# keyValueList = [
# {'key': 'controllerMode', 'value': CONTROLLER_MODE_UI},
# {'key': 'controllerModeUi', 'value': CONTROLLER_MODE_TRANSLATION[CONTROLLER_MODE_UI]},
# {'key': 'setpointHeat', 'value': newSetpoint}
# ]
# dev.updateStatesOnServer(keyValueList)
queuedCommand = CMD_UPDATE_TRV_CONTROLLER_STATES
updateList = dict()
updateList[UPDATE_CONTROLLER_HEAT_SETPOINT] = newSetpoint
updateList[UPDATE_CONTROLLER_MODE] = CONTROLLER_MODE_UI
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, self.globals['deviceUpdatedSequenceCount'], queuedCommand, trvCtlrDevId, [updateList, ]])
# noinspection PyUnusedLocal
def processAdvance(self, pluginAction, dev):
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_HIGH, 0, CMD_ADVANCE, dev.id, [ADVANCE_NEXT]])
# noinspection PyUnusedLocal
def processAdvanceOff(self, pluginAction, dev):
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_HIGH, 0, CMD_ADVANCE, dev.id, [ADVANCE_NEXT_OFF]])
def processAdvanceOffToggle(self, pluginAction, dev):
if not self.globals['trvc'][dev.id]['advanceActive']:
self.processAdvanceOff(pluginAction, dev)
else:
self.processCancelAdvance(pluginAction, dev)
# noinspection PyUnusedLocal
def processAdvanceOn(self, pluginAction, dev):
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_HIGH, 0, CMD_ADVANCE, dev.id, [ADVANCE_NEXT_ON]])
def processAdvanceOnToggle(self, pluginAction, dev):
if not self.globals['trvc'][dev.id]['advanceActive']:
self.processAdvanceOn(pluginAction, dev)
else:
self.processCancelAdvance(pluginAction, dev)
def processAdvanceToggle(self, pluginAction, dev):
if not self.globals['trvc'][dev.id]['advanceActive']:
self.processAdvance(pluginAction, dev)
else:
self.processCancelAdvance(pluginAction, dev)
# noinspection PyUnusedLocal
def processCancelAdvance(self, pluginAction, dev):
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_HIGH, 0, CMD_ADVANCE_CANCEL, dev.id, [True]])
def processBoost(self, pluginAction, dev):
if pluginAction.pluginTypeId == 'processBoost':
boostMode = int(pluginAction.props.get('boostMode', 0))
elif pluginAction.pluginTypeId == 'processBoostToggle':
boostMode = int(pluginAction.props.get('toggleBoostMode', 0))
else:
self.logger.error(f'Boost logic failure for thermostat \'{dev.name}\' - boost not actioned for id \'{pluginAction}\'')
return
if boostMode == BOOST_MODE_SELECT:
self.logger.error(f'Boost Mode not set for thermostat \'{dev.name}\' - boost not actioned')
return
if pluginAction.pluginTypeId == 'processBoost':
boostDeltaT = float(pluginAction.props.get('boostDeltaT', 2.0))
boostSetpoint = float(pluginAction.props.get('boostSetpoint', 21.0))
boostMinutes = int(pluginAction.props.get('boostMinutes', 20))
else: # Must be pluginAction = processBoostToggle
boostDeltaT = float(pluginAction.props.get('toggleBoostDeltaT', 2.0))
boostSetpoint = float(pluginAction.props.get('toggleBoostSetpoint', 21.0))
boostMinutes = int(pluginAction.props.get('toggleBoostMinutes', 20))
self.globals['trvc'][dev.id]['boostActive'] = True
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, 0, CMD_BOOST, dev.id, [boostMode, boostDeltaT, boostSetpoint, boostMinutes]])
if boostMode == BOOST_MODE_DELTA_T:
self.logger.info(f'Boost actioned for {boostMinutes} minutes with a Delta T of {boostDeltaT} degrees for thermostat \'{dev.name}\'')
else: # BOOST_MODE_SETPOINT
self.logger.info(f'Boost actioned for {boostMinutes} minutes with a Setpoint of {boostSetpoint} degrees for thermostat \'{dev.name}\'')
def processBoostToggle(self, pluginAction, dev):
if not self.globals['trvc'][dev.id]['boostActive']:
self.processBoost(pluginAction, dev)
else:
self.processCancelBoost(pluginAction, dev)
# noinspection PyUnusedLocal
def processCancelBoost(self, pluginAction, dev):
if self.globals['trvc'][dev.id]['boostActive']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_HIGH, 0, CMD_BOOST_CANCEL, dev.id, [True]])
self.logger.info(f'Boost cancelled for thermostat \'{dev.name}\'')
else:
self.logger.info(f'Boost cancel request ignored for thermostat \'{dev.name}\' as no Boost active')
def processExtend(self, pluginAction, dev):
extendIncrementMinutes = int(pluginAction.props.get('extendIncrementMinutes', 15))
extendMaximumMinutes = int(pluginAction.props.get('extendMaximumMinutes', 15))
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, 0, CMD_EXTEND, dev.id, [extendIncrementMinutes, extendMaximumMinutes]])
# self.logger.info(f'Extend actioned for thermostat \'{dev.name}\'')
# noinspection PyUnusedLocal
def processCancelExtend(self, pluginAction, dev):
if self.globals['trvc'][dev.id]['extendActive']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_HIGH, 0, CMD_EXTEND_CANCEL, dev.id, [True]])
# self.logger.info(f'Extend cancelled for thermostat \'{dev.name}\'')
else:
self.logger.info(f'Extend cancel request ignored for thermostat \'{dev.name}\' as no Extend active')
def processResetScheduleToDeviceDefaults(self, pluginAction, dev):
self.logger.debug(f' Thermostat \'{dev.name}\', Action received: \'{pluginAction.description}\'')
self.logger.debug(f'... Action details:\n{pluginAction}\n')
devId = dev.id
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_HIGH, 0, CMD_RESET_SCHEDULE_TO_DEVICE_DEFAULTS, devId, None])
# noinspection PyUnusedLocal
def processShowAllSchedules(self, pluginAction):
scheduleReportLineLength = 80
scheduleReport = f'\n{"=" * scheduleReportLineLength}'
scheduleReport = scheduleReport + self.boxLine('TRV Controller Plugin - Heating Schedules', scheduleReportLineLength, u'==')
scheduleReport = scheduleReport + self.boxLine(' ', scheduleReportLineLength, u'==')
for trvcDev in indigo.devices.iter("self"):
scheduleReport = scheduleReport + self._showSchedule(trvcDev.id, scheduleReportLineLength)
scheduleReport = scheduleReport + self.boxLine(' ', scheduleReportLineLength, u'==')
scheduleReport = scheduleReport + f'\n{"=" * scheduleReportLineLength}\n'
self.logger.info(scheduleReport)
# noinspection PyUnusedLocal
def processShowSchedule(self, pluginAction, trvcDev):
try:
scheduleReportLineLength = 80
scheduleReport = f'\n{"=" * scheduleReportLineLength}'
scheduleReport = scheduleReport + self.boxLine('TRV Controller Plugin - Heating Schedule', scheduleReportLineLength, u'==')
scheduleReport = scheduleReport + self.boxLine(' ', scheduleReportLineLength, u'==')
scheduleReport = scheduleReport + self._showSchedule(trvcDev.id, scheduleReportLineLength)
scheduleReport = scheduleReport + self.boxLine(' ', scheduleReportLineLength, u'==')
scheduleReport = scheduleReport + f'\n{"=" * scheduleReportLineLength}\n'
self.logger.info(scheduleReport)
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
# noinspection PyUnusedLocal
def processShowStatus(self, pluginAction, dev):
devId = dev.id
self.logger.info(f'Showing full internal status of \'{dev.name}\'')
for self.key in sorted(self.globals['trvc'][devId].keys()):
self.logger.info(f'\'{dev.name}\': {self.key} = {self.globals["trvc"][devId][self.key]}')
self.logger.info("Showing Heat SourceTRV Controller Device Table")
for dev in self.globals['devicesToTrvControllerTable'].items():
self.logger.info(f"Device: {dev}")
# noinspection PyUnusedLocal
def processShowZwaveWakeupInterval(self, pluginAction):
statusOptimize = dict()
for dev in indigo.devices.iter("self"):
if dev.enabled and dev.configured:
devId = dev.id
if self.globals['trvc'][devId]['zwaveDeltaCurrent'] != "[n/a]":
tempSplit = self.globals['trvc'][devId]['zwaveDeltaCurrent'].split(':')
tempZwaveDeltaCurrent = int(tempSplit[0]) * 60 + int(tempSplit[1])
# tempZwaveDeltaCurrent = datetime.datetime.strptime(self.globals['trvc'][devId]['zwaveDeltaCurrent'], '%M:%S')
tempA, tempB = divmod(tempZwaveDeltaCurrent, 300)
statusOptimize[dev.name] = int(tempB)
self.logger.info("Z-wave wakeup intervals between TRVs (in seconds):")
optimizeDifference = 0
sortedItems = sorted(statusOptimize.items(), key=operator.itemgetter(1, 0))
for item1 in sortedItems:
if optimizeDifference == 0: # Ensure Intervals start at zero
optimizeDifference = int(item1[1])
optimizeDifferenceCalc = int(item1[1] - optimizeDifference)
self.logger.info(" %s = %s [Interval = %s]" % (item1[0], str(" " + str(item1[1]))[-3:], str(" " + str(optimizeDifferenceCalc))[-3:]))
optimizeDifference = int(item1[1])
# noinspection PyUnusedLocal
def processUpdateAllCsvFiles(self, pluginAction, trvCtlrDev):
trvCtlrDevId = trvCtlrDev.id
try:
if self.globals['config']['csvStandardEnabled']:
if self.globals['trvc'][trvCtlrDevId]['updateCsvFile']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_ALL_CSV_FILES, trvCtlrDevId, None])
else:
self.logger.error(f'Request to update All CSV Files ignored as option \'On State Change [...]\' not set for \'{trvCtlrDev.name}\' in its device settings.')
else:
self.logger.error(f'Request to update All CSV Files ignored for \'{trvCtlrDev.name}\' as option \'Enable Standard CSV\' not enabled in the plugin config.')
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def processUpdateAllCsvFilesViaPostgreSQL(self, pluginAction, trvCtlrDev):
trvCtlrDevId = trvCtlrDev.id
try:
overrideDefaultRetentionHours = pluginAction.props.get('overrideDefaultRetentionHours', '')
if overrideDefaultRetentionHours == '':
overrideDefaultRetentionHours = 0
else:
overrideDefaultRetentionHours = int(overrideDefaultRetentionHours)
overrideCsvFilePrefix = pluginAction.props.get('overrideCsvFilePrefix', '')
trvCtlrDevId = trvCtlrDev.id
if self.globals['config']['csvPostgresqlEnabled']:
if self.globals['trvc'][trvCtlrDevId]['updateAllCsvFilesViaPostgreSQL']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_UPDATE_ALL_CSV_FILES_VIA_POSTGRESQL, trvCtlrDevId, [overrideDefaultRetentionHours, overrideCsvFilePrefix]])
else:
self.logger.error(f'Request to update All CSV Files Via PostgreSQL ignored as option \'Enable PostgreSQL CSV\' not set for \'{trvCtlrDev.name}\' in its device settings.')
else:
self.logger.error(f'Request to update All CSV Files Via PostgreSQL ignored for \'{trvCtlrDev.name}\' as option \'Enable PostgreSQL CSV\' not enabled in the plugin config.')
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def processInvokeDatagraphUsingPostgresqlToCsv(self, pluginAction, trvCtlrDev):
trvCtlrDevId = trvCtlrDev.id
try:
overrideDefaultRetentionHours = pluginAction.props.get('overrideDefaultRetentionHours', '')
if overrideDefaultRetentionHours == '':
overrideDefaultRetentionHours = 0
else:
overrideDefaultRetentionHours = int(overrideDefaultRetentionHours)
overrideCsvFilePrefix = pluginAction.props.get('overrideCsvFilePrefix', '')
if self.globals['config']['datagraphCliPath'] == "":
self.logger.error(f'Request to update Datagraph CSV File Via PostgreSQL ignored as DataGraph command line utility path not specified in plugin config.')
return
if self.globals['config']['datagraphGraphTemplatesPath'] == "":
self.logger.error(f'Request to update Datagraph CSV File Via PostgreSQL ignored as DataGraph graph templates folder path not specified in plugin config.')
return
if self.globals['config']['datagraphImagesPath'] == "":
self.logger.error(f'Request to update Datagraph CSV File Via PostgreSQL ignored as DataGraph output images folder path not specified in plugin config.')
return
trvCtlrDevId = trvCtlrDev.id
if self.globals['config']['csvPostgresqlEnabled']:
if self.globals['trvc'][trvCtlrDevId]['updateDatagraphCsvFileViaPostgreSQL']:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_LOW, 0, CMD_INVOKE_DATAGRAPH_USING_POSTGRESQL_TO_CSV, trvCtlrDevId, [overrideDefaultRetentionHours, overrideCsvFilePrefix]])
else:
self.logger.error(f'Request to update Datagraph CSV File Via PostgreSQL ignored as option \'Enable PostgreSQL CSV\' not set for \'{trvCtlrDev.name}\' in its device settings.')
else:
self.logger.error(f'Request to update Datagraph CSV File Via PostgreSQL ignored for \'{trvCtlrDev.name}\' as option \'Enable PostgreSQL CSV\' not enabled in the plugin config.')
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def processUpdateSchedule(self, pluginAction, dev):
self.logger.debug(f' Thermostat \'{dev.name}\', Action received: \'{pluginAction.description}\'')
self.logger.debug(f'... Action details:\n{pluginAction}\n')
devId = dev.id
self.globals['trvc'][devId]['nextScheduleExecutionTime'] = 'Not yet evaluated'
self.globals['trvc'][devId]['schedule1Enabled'] = bool(pluginAction.props.get('schedule1Enabled', False))
self.globals['trvc'][devId]['schedule1TimeOn'] = pluginAction.props.get('schedule1TimeOn', '00:00')
self.globals['trvc'][devId]['schedule1TimeOff'] = pluginAction.props.get('schedule1TimeOff', '00:00')
self.globals['trvc'][devId]['schedule1SetpointHeat'] = pluginAction.props.get('schedule1SetpointHeat', 0.00)
self.globals['trvc'][devId]['schedule2Enabled'] = bool(pluginAction.props.get('schedule2Enabled', False))
self.globals['trvc'][devId]['schedule2TimeOn'] = pluginAction.props.get('schedule2TimeOn', '00:00')
self.globals['trvc'][devId]['schedule2TimeOff'] = pluginAction.props.get('schedule2TimeOff', '00:00')
self.globals['trvc'][devId]['schedule2SetpointHeat'] = pluginAction.props.get('schedule2SetpointHeat', 0.00)
self.globals['trvc'][devId]['schedule3Enabled'] = bool(pluginAction.props.get('schedule3Enabled', False))
self.globals['trvc'][devId]['schedule3TimeOn'] = pluginAction.props.get('schedule3TimeOn', '00:00')
self.globals['trvc'][devId]['schedule3TimeOff'] = pluginAction.props.get('schedule3TimeOff', '00:00')
self.globals['trvc'][devId]['schedule3SetpointHeat'] = pluginAction.props.get('schedule3SetpointHeat', 0.00)
self.globals['trvc'][devId]['schedule4Enabled'] = bool(pluginAction.props.get('schedule4Enabled', False))
self.globals['trvc'][devId]['schedule4TimeOn'] = pluginAction.props.get('schedule4TimeOn', '00:00')
self.globals['trvc'][devId]['schedule4TimeOff'] = pluginAction.props.get('schedule4TimeOff', '00:00')
self.globals['trvc'][devId]['schedule4SetpointHeat'] = pluginAction.props.get('schedule4SetpointHeat', 0.00)
if not self.globals['trvc'][devId]['schedule1Enabled'] or self.globals['trvc'][devId]['schedule1SetpointHeat'] == 0.0:
self.globals['trvc'][devId]['schedule1SetpointHeatUi'] = 'Not Set'
self.globals['trvc'][devId]['schedule1TimeUi'] = 'Inactive'
else:
self.globals['trvc'][devId]['schedule1SetpointHeatUi'] = f'{self.globals["trvc"][devId]["schedule1SetpointHeat"]} °C'
self.globals['trvc'][devId]['schedule1TimeUi'] = f'{self.globals["trvc"][devId]["schedule1TimeOn"]} - {self.globals["trvc"][devId]["schedule1TimeOff"]}'
if not self.globals['trvc'][devId]['schedule2Enabled'] or self.globals['trvc'][devId]['schedule2SetpointHeat'] == 0.0:
self.globals['trvc'][devId]['schedule2SetpointHeatUi'] = 'Not Set'
self.globals['trvc'][devId]['schedule2TimeUi'] = 'Inactive'
else:
self.globals['trvc'][devId]['schedule2SetpointHeatUi'] = f'{self.globals["trvc"][devId]["schedule2SetpointHeat"]} °C'
self.globals['trvc'][devId]['schedule2TimeUi'] = f'{self.globals["trvc"][devId]["schedule2TimeOn"]} - {self.globals["trvc"][devId]["schedule2TimeOff"]}'
if not self.globals['trvc'][devId]['schedule3Enabled'] or self.globals['trvc'][devId]['schedule3SetpointHeat'] == 0.0:
self.globals['trvc'][devId]['schedule3SetpointHeatUi'] = 'Not Set'
self.globals['trvc'][devId]['schedule3TimeUi'] = 'Inactive'
else:
self.globals['trvc'][devId]['schedule3SetpointHeatUi'] = f'{self.globals["trvc"][devId]["schedule3SetpointHeat"]} °C'
self.globals['trvc'][devId]['schedule3TimeUi'] = f'{self.globals["trvc"][devId]["schedule3TimeOn"]} - {self.globals["trvc"][devId]["schedule3TimeOff"]}'
if not self.globals['trvc'][devId]['schedule4Enabled'] or self.globals['trvc'][devId]['schedule4SetpointHeat'] == 0.0:
self.globals['trvc'][devId]['schedule4SetpointHeatUi'] = 'Not Set'
self.globals['trvc'][devId]['schedule4TimeUi'] = 'Inactive'
else:
self.globals['trvc'][devId]['schedule4SetpointHeatUi'] = f'{self.globals["trvc"][devId]["schedule4SetpointHeat"]} °C'
self.globals['trvc'][devId]['schedule4TimeUi'] = f'{self.globals["trvc"][devId]["schedule4TimeOn"]} - {self.globals["trvc"][devId]["schedule4TimeOff"]}'
keyValueList = [
{'key': 'nextScheduleExecutionTime', 'value': self.globals['trvc'][devId]['nextScheduleExecutionTime']},
{'key': 'schedule1Active', 'value': self.globals['trvc'][devId]['schedule1Active']},
{'key': 'schedule1Enabled', 'value': self.globals['trvc'][devId]['schedule1Enabled']},
{'key': 'schedule1TimeOn', 'value': self.globals['trvc'][devId]['schedule1TimeOn']},
{'key': 'schedule1TimeOff', 'value': self.globals['trvc'][devId]['schedule1TimeOff']},
{'key': 'schedule1TimeUi', 'value': self.globals['trvc'][devId]['schedule1TimeUi']},
{'key': 'schedule1SetpointHeat', 'value': self.globals['trvc'][devId]['schedule1SetpointHeatUi']},
{'key': 'schedule2Active', 'value': self.globals['trvc'][devId]['schedule2Active']},
{'key': 'schedule2Enabled', 'value': self.globals['trvc'][devId]['schedule2Enabled']},
{'key': 'schedule2TimeOn', 'value': self.globals['trvc'][devId]['schedule2TimeOn']},
{'key': 'schedule2TimeOff', 'value': self.globals['trvc'][devId]['schedule2TimeOff']},
{'key': 'schedule2TimeUi', 'value': self.globals['trvc'][devId]['schedule2TimeUi']},
{'key': 'schedule2SetpointHeat', 'value': self.globals['trvc'][devId]['schedule2SetpointHeatUi']},
{'key': 'schedule3Active', 'value': self.globals['trvc'][devId]['schedule3Active']},
{'key': 'schedule3Enabled', 'value': self.globals['trvc'][devId]['schedule3Enabled']},
{'key': 'schedule3TimeOn', 'value': self.globals['trvc'][devId]['schedule3TimeOn']},
{'key': 'schedule3TimeOff', 'value': self.globals['trvc'][devId]['schedule3TimeOff']},
{'key': 'schedule3TimeUi', 'value': self.globals['trvc'][devId]['schedule3TimeUi']},
{'key': 'schedule3SetpointHeat', 'value': self.globals['trvc'][devId]['schedule3SetpointHeatUi']},
{'key': 'schedule4Active', 'value': self.globals['trvc'][devId]['schedule4Active']},
{'key': 'schedule4Enabled', 'value': self.globals['trvc'][devId]['schedule4Enabled']},
{'key': 'schedule4TimeOn', 'value': self.globals['trvc'][devId]['schedule4TimeOn']},
{'key': 'schedule4TimeOff', 'value': self.globals['trvc'][devId]['schedule4TimeOff']},
{'key': 'schedule4TimeUi', 'value': self.globals['trvc'][devId]['schedule4TimeUi']},
{'key': 'schedule4SetpointHeat', 'value': self.globals['trvc'][devId]['schedule4SetpointHeatUi']}
]
dev.updateStatesOnServer(keyValueList)
# Set-up schedules
self.globals['schedules'][devId]['running'] = dict()
scheduleSetpointOff = float(self.globals['trvc'][devId]['setpointHeatMinimum'])
self.globals['schedules'][devId]['running'][0] = ('00:00', scheduleSetpointOff, 0, False) # Start of Day
self.globals['schedules'][devId]['running'][240000] = ('24:00', scheduleSetpointOff, 9, False) # End of Day
if self.globals['trvc'][devId]['schedule1Enabled']:
scheduleTimeOnUi = self.globals['trvc'][devId]['schedule1TimeOn']
scheduleTimeOn = int(scheduleTimeOnUi.replace(':', '')) * 100 # Add in Seconds
scheduleTimeOffUi = self.globals['trvc'][devId]['schedule1TimeOff']
scheduleTimeOff = int(scheduleTimeOffUi.replace(':', '')) * 100 # Add in Seconds
scheduleSetpointOn = float(self.globals['trvc'][devId]['schedule1SetpointHeat'])
self.globals['schedules'][devId]['running'][scheduleTimeOn] = (scheduleTimeOnUi, scheduleSetpointOn, 1, True)
self.globals['schedules'][devId]['running'][scheduleTimeOff] = (scheduleTimeOffUi, scheduleSetpointOff, 1, False)
if self.globals['trvc'][devId]['schedule2Enabled']:
scheduleTimeOnUi = self.globals['trvc'][devId]['schedule2TimeOn']
scheduleTimeOn = int(scheduleTimeOnUi.replace(':', '')) * 100 # Add in Seconds
scheduleTimeOffUi = self.globals['trvc'][devId]['schedule2TimeOff']
scheduleTimeOff = int(scheduleTimeOffUi.replace(':', '')) * 100 # Add in Seconds
scheduleSetpointOn = float(self.globals['trvc'][devId]['schedule2SetpointHeat'])
self.globals['schedules'][devId]['running'][scheduleTimeOn] = (scheduleTimeOnUi, scheduleSetpointOn, 2, True)
self.globals['schedules'][devId]['running'][scheduleTimeOff] = (scheduleTimeOffUi, scheduleSetpointOff, 2, False)
if self.globals['trvc'][devId]['schedule3Enabled']:
scheduleTimeOnUi = self.globals['trvc'][devId]['schedule3TimeOn']
scheduleTimeOn = int(scheduleTimeOnUi.replace(':', '')) * 100 # Add in Seconds
scheduleTimeOffUi = self.globals['trvc'][devId]['schedule3TimeOff']
scheduleTimeOff = int(scheduleTimeOffUi.replace(':', '')) * 100 # Add in Seconds
scheduleSetpointOn = float(self.globals['trvc'][devId]['schedule3SetpointHeat'])
self.globals['schedules'][devId]['running'][scheduleTimeOn] = (scheduleTimeOnUi, scheduleSetpointOn, 3, True)
self.globals['schedules'][devId]['running'][scheduleTimeOff] = (scheduleTimeOffUi, scheduleSetpointOff, 3, False)
if self.globals['trvc'][devId]['schedule4Enabled']:
scheduleTimeOnUi = self.globals['trvc'][devId]['schedule4TimeOn']
scheduleTimeOn = int(scheduleTimeOnUi.replace(':', '')) * 100 # Add in Seconds
scheduleTimeOffUi = self.globals['trvc'][devId]['schedule4TimeOff']
scheduleTimeOff = int(scheduleTimeOffUi.replace(':', '')) * 100 # Add in Seconds
scheduleSetpointOn = float(self.globals['trvc'][devId]['schedule4SetpointHeat'])
self.globals['schedules'][devId]['running'][scheduleTimeOn] = (scheduleTimeOnUi, scheduleSetpointOn, 4, True)
self.globals['schedules'][devId]['running'][scheduleTimeOff] = (scheduleTimeOffUi, scheduleSetpointOff, 4, False)
self.globals['schedules'][devId]['running'] = collections.OrderedDict(sorted(self.globals['schedules'][devId]['running'].items()))
self.globals['schedules'][devId]['dynamic'] = self.globals['schedules'][devId]['running'].copy()
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_MEDIUM, 0, CMD_DELAY_COMMAND, devId, [CMD_PROCESS_HEATING_SCHEDULE, 2.0, None]])
def remoteThermostatDevices(self, indigo_filter="", valuesDict=None, typeId="", targetId=0): # noqa - Method is not declared static + unused local symbols
array = []
for dev in indigo.devices.iter():
if dev.deviceTypeId != 'trvController':
# Deprecated 'subModel' code follows ...
# if (dev.subModel == 'Temperature' or dev.subModel == 'Temperature 1' or dev.subModel == 'Thermostat' or dev.deviceTypeId == 'hueMotionTemperatureSensor' or
# (dev.model == 'Thermostat (TF021)' and dev.subModel[0:7].lower() == 'sensor ')):
# if dev.subType == 'Temperature' or dev.subType == 'Thermostat' or (dev.model == 'Thermostat (TF021)' and dev.subType == 'Sensor'):
# array.append((dev.id, dev.name))
# else:
# try:
# test = float(dev.states['temperatureInput1']) # noqa [test value not used] - e.g. Secure SRT321 / HRT4-ZW
# except (AttributeError, KeyError, ValueError):
# try:
# test = float(dev.states['temperature']) # noqa [test value not used] - e.g. Oregon Scientific Temp Sensor
# except (AttributeError, KeyError, ValueError):
# try:
# test = float(dev.states['Temperature']) # noqa [test value not used] - e.g. Netatmo
# except (AttributeError, KeyError, ValueError):
# try:
# test = float(dev.states['sensorValue']) # noqa [test value not used] - e.g. HeatIT TF021 / MQTT Value Sensor Device
# except (AttributeError, KeyError, ValueError):
# continue
# array.append((dev.id, dev.name))
if type(dev) == indigo.ThermostatDevice or type(dev) == indigo.SensorDevice:
num_temperature_inputs = int(dev.ownerProps.get("NumTemperatureInputs", "0"))
if num_temperature_inputs > 0 or dev.subType == indigo.kSensorDeviceSubType.Temperature:
array.append((dev.id, dev.name))
return sorted(array, key=lambda dev_name: dev_name[1].lower()) # sort by device name
def radiatorTemperatureSensorDevices(self, indigo_filter="", valuesDict=None, typeId="", targetId=0): # noqa - Method is not declared static + unused local symbols
array = []
for dev in indigo.devices.iter():
if dev.deviceTypeId != 'trvController':
if type(dev) == indigo.ThermostatDevice or type(dev) == indigo.SensorDevice:
num_temperature_inputs = int(dev.ownerProps.get("NumTemperatureInputs", "0"))
if num_temperature_inputs > 0 or dev.subType == indigo.kSensorDeviceSubType.Temperature:
array.append((dev.id, dev.name))
return sorted(array, key=lambda dev_name: dev_name[1].lower()) # sort by device name
# noinspection PyUnusedLocal
def restateSchedulesTriggered(self, triggeredSeconds):
try:
self.globals['queues']['trvHandler'].put([QUEUE_PRIORITY_STATUS_HIGH, 0, CMD_RESTATE_SCHEDULES, None, None])
secondsUntilSchedulesRestated = calculateSecondsUntilSchedulesRestated()
self.globals['timers']['reStateSchedules'] = threading.Timer(float(secondsUntilSchedulesRestated), self.restateSchedulesTriggered, [secondsUntilSchedulesRestated])
self.globals['timers']['reStateSchedules'].daemon = True
self.globals['timers']['reStateSchedules'].start()
self.logger.info(f'TRV Controller has calculated the number of seconds until Schedules restated as {secondsUntilSchedulesRestated}')
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
# noinspection PyUnusedLocal
def trvControlledDevices(self, indigo_filter="", valuesDict=None, typeId="", targetId=0): # noqa
array = []
for dev in indigo.devices.iter("indigo.thermostat"):
if dev.deviceTypeId != 'trvController':
array.append((dev.id, dev.name))
return sorted(array, key=lambda dev_name: dev_name[1].lower()) # sort by device name
# noinspection PyUnusedLocal
def trvThermostatDeviceSelected(self, valuesDict, typeId, devId):
trvDevId = int(valuesDict.get('trvDevId', 0))
if trvDevId != 0:
trvDev = indigo.devices[trvDevId]
trv_model_name = trvDev.model
if trv_model_name in self.globals['supportedTrvModels']:
pass
else:
trv_model_name = 'Unknown TRV Model'
trvModelProperties = self.globals['supportedTrvModels'][trv_model_name]
valuesDict['supportedModel'] = trv_model_name
valuesDict['supportsWakeup'] = self.globals['supportedTrvModels'][trv_model_name]['supportsWakeup']
valuesDict['supportsTemperatureReporting'] = self.globals['supportedTrvModels'][trv_model_name]['supportsTemperatureReporting']
valuesDict['supportsHvacOnOff'] = self.globals['supportedTrvModels'][trv_model_name]['supportsHvacOnOff']
valuesDict['supportsValveControl'] = self.globals['supportedTrvModels'][trv_model_name]['supportsValveControl']
valuesDict['supportsManualSetpoint'] = self.globals['supportedTrvModels'][trv_model_name]['supportsManualSetpoint']
valuesDict['setpointHeatMinimum'] = self.globals['supportedTrvModels'][trv_model_name]['setpointHeatMinimum']
valuesDict['setpointHeatMaximum'] = self.globals['supportedTrvModels'][trv_model_name]['setpointHeatMaximum']
valuesDict['trvDeviceSetpointHeatMaximum'] = self.globals['supportedTrvModels'][trv_model_name]['setpointHeatMaximum']
return valuesDict
def validateSchedule(self, trvcId, valuesDict, scheduleNumber):
# Common routine to check a schedule: On time, off time and heat setpoint
# Used by validateDeviceConfigUi
try:
# setup names
scheduleTimeOnName = f'schedule{scheduleNumber}TimeOn'
scheduleTimeOffName = f'schedule{scheduleNumber}TimeOff'
scheduleSetpointHeatName = f'schedule{scheduleNumber}SetpointHeat'
# self.logger.error(f'validateSchedule: OnName = \'{scheduleTimeOnName}\', OffName = \'{scheduleTimeOffName}\', SetpointHeatName = \'{scheduleSetpointHeatName}\'')
scheduleName = ('One', 'Two', 'Three', 'Four')[int(scheduleNumber)-1]
def validateTime(timeField):
try:
if len(timeField) != 5:
return '24:00'
if timeField[2:3] != ':':
return '24:00'
hour = int(timeField[0:2])
if hour < 0 or hour > 23:
return '24:00'
minute = int(timeField[3:5])
if minute < 0 or minute > 59:
return '24:00'
# Valid at this point
return timeField
except Exception:
return '24:00'
# Validate Schedule ON time
scheduleTimeOn = '24:00'
try:
scheduleTimeToTest = valuesDict.get(scheduleTimeOnName, '24:00')
scheduleTimeOn = validateTime(scheduleTimeToTest)
except Exception:
pass
if scheduleTimeOn == '00:00' or scheduleTimeOn == '24:00':
errorDict = indigo.Dict()
errorDict[scheduleTimeOnName] = 'Set time (in HH:MM format) between 00:01 and 23:59 (inclusive)'
errorDict['showAlertText'] = f'You must enter a Schedule {scheduleName} time (in HH:MM format) between 00:01 and 23:59 (inclusive) for when the TRV will turn ON.'
return False, errorDict
# Validate Schedule OFF time
scheduleTimeOff = '24:00'
try:
scheduleTimeToTest = valuesDict.get(scheduleTimeOffName, '24:00')
scheduleTimeOff = validateTime(scheduleTimeToTest)
except Exception:
pass
if scheduleTimeOff == '00:00' or scheduleTimeOff == '24:00':
errorDict = indigo.Dict()
errorDict[scheduleTimeOffName] = 'Set time (in HH:MM format) between 00:01 and 23:59 (inclusive)'
errorDict['showAlertText'] = f'You must enter a Schedule {scheduleName} time (in HH:MM format) between 00:01 and 23:59 (inclusive) for when the TRV will turn OFF.'
return False, errorDict
# Validate Schedule 1 Heat Setpoint
setpointHeatMinimum = float(valuesDict.get('setpointHeatMinimum', 0.0))
setpointHeatMaximum = float(valuesDict.get('setpointHeatMaximum', 0.0))
if setpointHeatMinimum == 0.0 or setpointHeatMaximum == 0.0:
errorDict = indigo.Dict()
errorDict[scheduleSetpointHeatName] = 'TRV Maximum and Minimum Setpoint Heat Temperatures invalid - make sure to select TRV Thermostat Device before defining schedule'
errorDict['showAlertText'] = f'TRV Maximum and Minimum Setpoint Heat Temperatures invalid for Schedule {scheduleName}, make sure to select TRV Thermostat Device before defining schedule'
return False, errorDict
valid = False
try:
scheduleSetpointHeat = float(valuesDict.get(scheduleSetpointHeatName, 0))
valid = True
except ValueError:
scheduleSetpointHeat = 0 # To suppress PyCharm warning
if valid: # so far!
if scheduleSetpointHeat < setpointHeatMinimum or scheduleSetpointHeat > setpointHeatMaximum or scheduleSetpointHeat % 0.5 != 0:
valid = False
if not valid:
errorDict = indigo.Dict()
errorDict[scheduleSetpointHeatName] = f'Setpoint temperature must be numeric and set between {setpointHeatMinimum} and {setpointHeatMaximum} (inclusive)'
errorDict['showAlertText'] = f'You must enter a valid Schedule {scheduleName} Setpoint temperature for the TRV. It must be numeric and set between {setpointHeatMinimum} and {setpointHeatMaximum} (inclusive) and a multiple of 0.5.'
return False, errorDict
# Check Schedule Times consistent
if scheduleTimeOff > scheduleTimeOn:
secondsDelta = secondsFromHHMM(scheduleTimeOff) - secondsFromHHMM(scheduleTimeOn)
else:
secondsDelta = 0
if secondsDelta < 600: # 10 minutes (600 seconds) check
errorDict = indigo.Dict()
errorDict[scheduleTimeOnName] = f'The Schedule {scheduleName} ON time must be at least 10 minutes before the Schedule {scheduleName} OFF time'
errorDict[scheduleTimeOffName] = f'The Schedule {scheduleName} OFF time must be at least 10 minutes after the Schedule {scheduleName} ON time'
errorDict['showAlertText'] = f'The Schedule {scheduleName} ON time [{scheduleTimeOn}] must be at least 10 minutes before the Schedule {scheduleName} OFF time [{scheduleTimeOff}]'
return False, errorDict
return True, [scheduleTimeOn, scheduleTimeOff, scheduleSetpointHeat]
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
def zwaveWakeupMissedTriggered(self, trvCtlrDevId, devType, devId):
try:
# Wakeup missed
if devType == TRV:
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupDelayTrv'] = True
nextWakeupMissedSeconds = self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalTrv'] * 60
deviceType = 'TRV device'
lastWakeupTime = self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeTrv']
else: # Must be Remote
self.globals['trvc'][trvCtlrDevId]['zwaveWakeupDelayRemote'] = True
nextWakeupMissedSeconds = self.globals['trvc'][trvCtlrDevId]['zwaveWakeupIntervalRemote'] * 60
deviceType = 'Remote Thermostat device'
lastWakeupTime = self.globals['trvc'][trvCtlrDevId]['zwaveEventReceivedDateTimeRemote']
if not indigo.devices[trvCtlrDevId].enabled:
self.logger.warning(
f'Z-Wave wakeup check cancelled for disabled Controller \'{indigo.devices[trvCtlrDevId].name}\' and associated {deviceType} \'{indigo.devices[devId].name}\'. Last Z-wave command received: {lastWakeupTime}')
return
if not indigo.devices[devId].enabled:
self.logger.warning(
f'Z-Wave wakeup check cancelled for disabled {deviceType} \'{indigo.devices[devId].name}\', controlled by \'{indigo.devices[trvCtlrDevId].name}\'. Last Z-wave command received: {lastWakeupTime}')
return
indigo.devices[trvCtlrDevId].updateStateImageOnServer(indigo.kStateImageSel.TimerOn)
self.logger.warning(
f'Z-Wave wakeup missed for {deviceType} \'{indigo.devices[devId].name}\', controlled by \'{indigo.devices[trvCtlrDevId].name}\'. Last Z-wave command received: {lastWakeupTime}')
if nextWakeupMissedSeconds < 300: # If less than 5 minutes
nextWakeupMissedSeconds = 300 # default to 5 minutes
self.globals['timers']['zwaveWakeupCheck'][devId] = threading.Timer(float(nextWakeupMissedSeconds), self.zwaveWakeupMissedTriggered, [trvCtlrDevId, devType, devId])
self.globals['timers']['zwaveWakeupCheck'][devId].daemon = True
self.globals['timers']['zwaveWakeupCheck'][devId].start()
except Exception as exception_error:
self.exception_handler(exception_error, True) # Log error and display failing statement
| [
"logging.getLogger",
"indigo.device.statusRequest",
"trvHandler.ThreadTrvHandler",
"sys.exc_info",
"indigo.server.getTime",
"indigo.Dict",
"operator.itemgetter",
"datetime.timedelta",
"indigo.PluginBase.__init__",
"datetime.datetime",
"xml.etree.ElementTree.parse",
"datetime.time",
"indigo.zwave.subscribeToOutgoing",
"indigo.variables.folder.create",
"threading.Lock",
"indigo.server.error",
"indigo.PluginBase.deviceUpdated",
"indigo.devices.iter",
"queue.PriorityQueue",
"indigo.server.getInstallFolderPath",
"threading.Timer",
"delayHandler.ThreadDelayHandler",
"platform.mac_ver",
"sys.version.replace",
"indigo.devices.subscribeToChanges",
"indigo.variables.folders.getId",
"platform.machine",
"indigo.PluginBase.__del__",
"datetime.datetime.utcnow",
"datetime.datetime.strptime",
"logging.Formatter",
"indigo.variable.updateValue",
"threading.Event",
"datetime.datetime.now",
"indigo.zwave.subscribeToIncoming",
"queue.Queue",
"indigo.device.enable"
] | [((1325, 1435), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'tomorrow.year', 'month': 'tomorrow.month', 'day': 'tomorrow.day', 'hour': '(0)', 'minute': '(0)', 'second': '(0)'}), '(year=tomorrow.year, month=tomorrow.month, day=tomorrow.\n day, hour=0, minute=0, second=0)\n', (1342, 1435), False, 'import datetime\n'), ((1262, 1285), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1283, 1285), False, 'import datetime\n'), ((1288, 1309), 'datetime.timedelta', 'datetime.timedelta', (['(1)'], {}), '(1)\n', (1306, 1309), False, 'import datetime\n'), ((2058, 2151), 'indigo.PluginBase.__init__', 'indigo.PluginBase.__init__', (['self', 'pluginId', 'pluginDisplayName', 'pluginVersion', 'pluginPrefs'], {}), '(self, pluginId, pluginDisplayName, pluginVersion,\n pluginPrefs)\n', (2084, 2151), False, 'import indigo\n'), ((3416, 3452), 'indigo.server.getInstallFolderPath', 'indigo.server.getInstallFolderPath', ([], {}), '()\n', (3450, 3452), False, 'import indigo\n'), ((4773, 4908), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s.%(msecs)03d\t%(levelname)-12s\t%(name)s.%(funcName)-25s %(msg)s"""'], {'datefmt': '"""%Y-%m-%d %H:%M:%S"""'}), "(\n '%(asctime)s.%(msecs)03d\\t%(levelname)-12s\\t%(name)s.%(funcName)-25s %(msg)s'\n , datefmt='%Y-%m-%d %H:%M:%S')\n", (4790, 4908), False, 'import logging\n'), ((5175, 5206), 'logging.getLogger', 'logging.getLogger', (['"""Plugin.TRV"""'], {}), "('Plugin.TRV')\n", (5192, 5206), False, 'import logging\n'), ((6009, 6027), 'platform.machine', 'platform.machine', ([], {}), '()\n', (6025, 6027), False, 'import platform\n'), ((6117, 6146), 'sys.version.replace', 'sys.version.replace', (['"""\n"""', '""""""'], {}), "('\\n', '')\n", (6136, 6146), False, 'import sys\n'), ((8450, 8466), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (8464, 8466), False, 'import threading\n'), ((8680, 8732), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2000-01-01"""', '"""%Y-%m-%d"""'], {}), "('2000-01-01', '%Y-%m-%d')\n", (8706, 8732), False, 'import datetime\n'), ((8932, 8952), 'xml.etree.ElementTree.parse', 'eTree.parse', (['xmlFile'], {}), '(xmlFile)\n', (8943, 8952), True, 'import xml.etree.ElementTree as eTree\n'), ((10638, 10658), 'xml.etree.ElementTree.parse', 'eTree.parse', (['xmlFile'], {}), '(xmlFile)\n', (10649, 10658), True, 'import xml.etree.ElementTree as eTree\n'), ((11638, 11669), 'indigo.PluginBase.__del__', 'indigo.PluginBase.__del__', (['self'], {}), '(self)\n', (11663, 11669), False, 'import indigo\n'), ((140192, 140227), 'indigo.devices.subscribeToChanges', 'indigo.devices.subscribeToChanges', ([], {}), '()\n', (140225, 140227), False, 'import indigo\n'), ((140294, 140328), 'indigo.zwave.subscribeToIncoming', 'indigo.zwave.subscribeToIncoming', ([], {}), '()\n', (140326, 140328), False, 'import indigo\n'), ((140395, 140429), 'indigo.zwave.subscribeToOutgoing', 'indigo.zwave.subscribeToOutgoing', ([], {}), '()\n', (140427, 140429), False, 'import indigo\n'), ((141040, 141061), 'queue.PriorityQueue', 'queue.PriorityQueue', ([], {}), '()\n', (141059, 141061), False, 'import queue\n'), ((141148, 141161), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (141159, 141161), False, 'import queue\n'), ((141273, 141290), 'threading.Event', 'threading.Event', ([], {}), '()\n', (141288, 141290), False, 'import threading\n'), ((141349, 141427), 'trvHandler.ThreadTrvHandler', 'ThreadTrvHandler', (['self.globals', "self.globals['threads']['trvHandler']['event']"], {}), "(self.globals, self.globals['threads']['trvHandler']['event'])\n", (141365, 141427), False, 'from trvHandler import ThreadTrvHandler\n'), ((141624, 141641), 'threading.Event', 'threading.Event', ([], {}), '()\n', (141639, 141641), False, 'import threading\n'), ((141702, 141789), 'delayHandler.ThreadDelayHandler', 'ThreadDelayHandler', (['self.globals', "self.globals['threads']['delayHandler']['event']"], {}), "(self.globals, self.globals['threads']['delayHandler'][\n 'event'])\n", (141720, 141789), False, 'from delayHandler import ThreadDelayHandler\n'), ((219508, 219535), 'indigo.devices.iter', 'indigo.devices.iter', (['"""self"""'], {}), "('self')\n", (219527, 219535), False, 'import indigo\n'), ((221481, 221508), 'indigo.devices.iter', 'indigo.devices.iter', (['"""self"""'], {}), "('self')\n", (221500, 221508), False, 'import indigo\n'), ((238659, 238680), 'indigo.devices.iter', 'indigo.devices.iter', ([], {}), '()\n', (238678, 238680), False, 'import indigo\n'), ((241045, 241066), 'indigo.devices.iter', 'indigo.devices.iter', ([], {}), '()\n', (241064, 241066), False, 'import indigo\n'), ((242659, 242699), 'indigo.devices.iter', 'indigo.devices.iter', (['"""indigo.thermostat"""'], {}), "('indigo.thermostat')\n", (242678, 242699), False, 'import indigo\n'), ((24958, 24981), 'indigo.server.getTime', 'indigo.server.getTime', ([], {}), '()\n', (24979, 24981), False, 'import indigo\n'), ((91860, 91886), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (91884, 91886), False, 'import datetime\n'), ((136600, 136654), 'indigo.PluginBase.deviceUpdated', 'indigo.PluginBase.deviceUpdated', (['self', 'origDev', 'newDev'], {}), '(self, origDev, newDev)\n', (136631, 136654), False, 'import indigo\n'), ((136890, 136903), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (136901, 136903), False, 'import indigo\n'), ((184437, 184460), 'indigo.server.getTime', 'indigo.server.getTime', ([], {}), '()\n', (184458, 184460), False, 'import indigo\n'), ((194285, 194308), 'indigo.server.getTime', 'indigo.server.getTime', ([], {}), '()\n', (194306, 194308), False, 'import indigo\n'), ((209159, 209205), 'indigo.device.enable', 'indigo.device.enable', (['trvCtlrDevId'], {'value': '(True)'}), '(trvCtlrDevId, value=True)\n', (209179, 209205), False, 'import indigo\n'), ((1471, 1494), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1492, 1494), False, 'import datetime\n'), ((20861, 20940), 'indigo.variables.folders.getId', 'indigo.variables.folders.getId', (["self.globals['config']['trvVariableFolderName']"], {}), "(self.globals['config']['trvVariableFolderName'])\n", (20891, 20940), False, 'import indigo\n'), ((39627, 39726), 'indigo.variable.updateValue', 'indigo.variable.updateValue', (["self.globals['trvc'][trvCtlrDevId]['heatingVarId']"], {'value': '"""false"""'}), "(self.globals['trvc'][trvCtlrDevId][\n 'heatingVarId'], value='false')\n", (39654, 39726), False, 'import indigo\n'), ((91955, 91971), 'datetime.time', 'datetime.time', (['(0)'], {}), '(0)\n', (91968, 91971), False, 'import datetime\n'), ((160853, 160866), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (160864, 160866), False, 'import indigo\n'), ((163595, 163608), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (163606, 163608), False, 'import indigo\n'), ((164250, 164263), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (164261, 164263), False, 'import indigo\n'), ((174491, 174504), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (174502, 174504), False, 'import indigo\n'), ((222276, 222301), 'operator.itemgetter', 'operator.itemgetter', (['(1)', '(0)'], {}), '(1, 0)\n', (222295, 222301), False, 'import operator\n'), ((246248, 246261), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (246259, 246261), False, 'import indigo\n'), ((246992, 247005), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (247003, 247005), False, 'import indigo\n'), ((247660, 247673), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (247671, 247673), False, 'import indigo\n'), ((248634, 248647), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (248645, 248647), False, 'import indigo\n'), ((249456, 249469), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (249467, 249469), False, 'import indigo\n'), ((6277, 6295), 'platform.mac_ver', 'platform.mac_ver', ([], {}), '()\n', (6293, 6295), False, 'import platform\n'), ((11824, 11838), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (11836, 11838), False, 'import sys\n'), ((20717, 20796), 'indigo.variables.folder.create', 'indigo.variables.folder.create', (["self.globals['config']['trvVariableFolderName']"], {}), "(self.globals['config']['trvVariableFolderName'])\n", (20747, 20796), False, 'import indigo\n'), ((161794, 161807), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (161805, 161807), False, 'import indigo\n'), ((166594, 166607), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (166605, 166607), False, 'import indigo\n'), ((169059, 169072), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (169070, 169072), False, 'import indigo\n'), ((169739, 169752), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (169750, 169752), False, 'import indigo\n'), ((170532, 170545), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (170543, 170545), False, 'import indigo\n'), ((171234, 171247), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (171245, 171247), False, 'import indigo\n'), ((171951, 171964), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (171962, 171964), False, 'import indigo\n'), ((172658, 172671), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (172669, 172671), False, 'import indigo\n'), ((173700, 173713), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (173711, 173713), False, 'import indigo\n'), ((38423, 38609), 'indigo.server.error', 'indigo.server.error', (['f"""Error detected by TRV Plugin for device [{trvcDev.name}] - Unknown Heating Source Device Type with Id: {self.globals[\'trvc\'][trvCtlrDevId][\'heatingId\']}"""'], {}), '(\n f"Error detected by TRV Plugin for device [{trvcDev.name}] - Unknown Heating Source Device Type with Id: {self.globals[\'trvc\'][trvCtlrDevId][\'heatingId\']}"\n )\n', (38442, 38609), False, 'import indigo\n'), ((94276, 94323), 'indigo.device.enable', 'indigo.device.enable', (['trvCtlrDevId'], {'value': '(False)'}), '(trvCtlrDevId, value=False)\n', (94296, 94323), False, 'import indigo\n'), ((94478, 94563), 'threading.Timer', 'threading.Timer', (['(60.0)', 'self.deviceRaceConditionReEnableTriggered', '[trvCtlrDevId]'], {}), '(60.0, self.deviceRaceConditionReEnableTriggered, [trvCtlrDevId]\n )\n', (94493, 94563), False, 'import threading\n'), ((153088, 153101), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (153099, 153101), False, 'import indigo\n'), ((156329, 156342), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (156340, 156342), False, 'import indigo\n'), ((167424, 167437), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (167435, 167437), False, 'import indigo\n'), ((168354, 168367), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (168365, 168367), False, 'import indigo\n'), ((177937, 177950), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (177948, 177950), False, 'import indigo\n'), ((178938, 178951), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (178949, 178951), False, 'import indigo\n'), ((179947, 179960), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (179958, 179960), False, 'import indigo\n'), ((180985, 180998), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (180996, 180998), False, 'import indigo\n'), ((181994, 182007), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (182005, 182007), False, 'import indigo\n'), ((183033, 183046), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (183044, 183046), False, 'import indigo\n'), ((146727, 146740), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (146738, 146740), False, 'import indigo\n'), ((147805, 147818), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (147816, 147818), False, 'import indigo\n'), ((148891, 148904), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (148902, 148904), False, 'import indigo\n'), ((150011, 150024), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (150022, 150024), False, 'import indigo\n'), ((151098, 151111), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (151109, 151111), False, 'import indigo\n'), ((152218, 152231), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (152229, 152231), False, 'import indigo\n'), ((153865, 153878), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (153876, 153878), False, 'import indigo\n'), ((154920, 154933), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (154931, 154933), False, 'import indigo\n'), ((155486, 155499), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (155497, 155499), False, 'import indigo\n'), ((157229, 157242), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (157240, 157242), False, 'import indigo\n'), ((158143, 158156), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (158154, 158156), False, 'import indigo\n'), ((17897, 17975), 'indigo.device.statusRequest', 'indigo.device.statusRequest', (["self.globals['trvc'][action.deviceId]['trvDevId']"], {}), "(self.globals['trvc'][action.deviceId]['trvDevId'])\n", (17924, 17975), False, 'import indigo\n'), ((18066, 18152), 'indigo.device.statusRequest', 'indigo.device.statusRequest', (["self.globals['trvc'][action.deviceId]['remoteDevId']"], {}), "(self.globals['trvc'][action.deviceId][\n 'remoteDevId'])\n", (18093, 18152), False, 'import indigo\n'), ((159491, 159504), 'indigo.Dict', 'indigo.Dict', ([], {}), '()\n', (159502, 159504), False, 'import indigo\n'), ((58343, 58572), 'indigo.server.error', 'indigo.server.error', (['f"""\'{indigo.devices[self.globals[\'trvc\'][trvCtlrDevId][\'radiatorDevId\']].name}\' is an unknown Radiator Temperature Sensor type - Radiator Temperature Sensor support disabled for TRV \'{trvcDev.name}\'"""'], {}), '(\n f"\'{indigo.devices[self.globals[\'trvc\'][trvCtlrDevId][\'radiatorDevId\']].name}\' is an unknown Radiator Temperature Sensor type - Radiator Temperature Sensor support disabled for TRV \'{trvcDev.name}\'"\n )\n', (58362, 58572), False, 'import indigo\n'), ((60322, 60518), 'indigo.server.error', 'indigo.server.error', (['f"""\'{indigo.devices[self.globals[\'trvc\'][trvCtlrDevId][\'remoteDevId\']].name}\' is an unknown Remote Thermostat type - Remote support disabled for TRV \'{trvcDev.name}\'"""'], {}), '(\n f"\'{indigo.devices[self.globals[\'trvc\'][trvCtlrDevId][\'remoteDevId\']].name}\' is an unknown Remote Thermostat type - Remote support disabled for TRV \'{trvcDev.name}\'"\n )\n', (60341, 60518), False, 'import indigo\n')] |
#coding=utf-8
'''
Created on 23/04/2018
@author: <EMAIL>
'''
import re
import string
import operator
import glob
from datetime import datetime
from dateutil.relativedelta import relativedelta
from SimpleMapReduce import SimpleMapReduce
# Funcion que permite cargar el archivo con los datos de los articulos de Wikipedia
def cargar_archivo(ruta):
#archivo = open(ruta, 'r', encoding="utf8").read()
# Listado de articulos cargados desde el archivo Wikipedia
#articulos = archivo.split(',\n')
# Listado de lenguajes de programacion para la busqueda
lenguajes = list(set(["JavaScript", "Java", "PHP", "Python", "C#",
"C++", "Ruby", "CSS", "Objective-C", "Perl",
"Scala", "Haskell", "MATLAB", "Clojure", "Groovy"]))
# Tabla de asignacion y reemplazo de la puntuacion con espacios
# excepto los caracteres + - #
puntuacion = string.punctuation.replace('+', '').replace('-', '').replace('#', '')
TR = str.maketrans(puntuacion, ' ' * len(puntuacion))
# Declaro listado de registros con tuplas (lenguaje, ocurrencias)
ocurrencias = []
# Recorro listado de articulos buscando lenguajes de programacion
with open(ruta, 'r', encoding="utf8") as archivo:
# Listado de articulos cargados desde el archivo Wikipedia
articulos = archivo.read().split(',\n')
print("Cantidad de articulos: {}\n".format(len(articulos)))
for articulo in articulos:
articulo = articulo.translate(TR)
articulo = articulo.lower()
for lenguaje in lenguajes:
lenguaje = lenguaje.lower()
texto_re = r'\b'+re.escape(lenguaje)+r'\b'
if len(re.findall(texto_re, articulo)) > 0:
# Cuando encuentro el lenguaje en un articulo, genero un tupla
# con el lenguaje y el valor de 1 (para sumar estos despues)
ocurrencias.append((lenguaje, 1))
return(ocurrencias)
# Funcion que permite contar la cantidad de veces que aparece un lenguaje en un articulo
def contar_palabras(tupla):
lenguaje, ocurrencias = tupla
return(lenguaje, sum(ocurrencias))
# Funcion para calcular el tiempo de ejecucion
def diff(t_a, t_b):
t_diff = relativedelta(t_b, t_a) # later/end time comes first!
return('{h}h {m}m {s}s'.format(h=t_diff.hours, m=t_diff.minutes, s=t_diff.seconds))
if __name__ == '__main__':
ruta_archivos = glob.glob('resources/wikipedia_1000.dat')
tiempo_ini = datetime.now()
mapper = SimpleMapReduce(cargar_archivo, contar_palabras, 15)
cuenta_leng = mapper(ruta_archivos)
cuenta_leng.sort(key=operator.itemgetter(1))
cuenta_leng.reverse()
maximo_leng = max(len(leng) for leng, cant in cuenta_leng)
for leng, cant in cuenta_leng:
print( '%-*s: %5s' % (maximo_leng+1, leng, cant))
tiempo_fin = datetime.now()
print("\nTiempo de ejecucion: {}".format(diff(tiempo_ini, tiempo_fin)))
| [
"re.escape",
"dateutil.relativedelta.relativedelta",
"datetime.datetime.now",
"string.punctuation.replace",
"operator.itemgetter",
"re.findall",
"SimpleMapReduce.SimpleMapReduce",
"glob.glob"
] | [((2299, 2322), 'dateutil.relativedelta.relativedelta', 'relativedelta', (['t_b', 't_a'], {}), '(t_b, t_a)\n', (2312, 2322), False, 'from dateutil.relativedelta import relativedelta\n'), ((2494, 2535), 'glob.glob', 'glob.glob', (['"""resources/wikipedia_1000.dat"""'], {}), "('resources/wikipedia_1000.dat')\n", (2503, 2535), False, 'import glob\n'), ((2553, 2567), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2565, 2567), False, 'from datetime import datetime\n'), ((2581, 2633), 'SimpleMapReduce.SimpleMapReduce', 'SimpleMapReduce', (['cargar_archivo', 'contar_palabras', '(15)'], {}), '(cargar_archivo, contar_palabras, 15)\n', (2596, 2633), False, 'from SimpleMapReduce import SimpleMapReduce\n'), ((2922, 2936), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2934, 2936), False, 'from datetime import datetime\n'), ((2699, 2721), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (2718, 2721), False, 'import operator\n'), ((926, 961), 'string.punctuation.replace', 'string.punctuation.replace', (['"""+"""', '""""""'], {}), "('+', '')\n", (952, 961), False, 'import string\n'), ((1699, 1718), 're.escape', 're.escape', (['lenguaje'], {}), '(lenguaje)\n', (1708, 1718), False, 'import re\n'), ((1748, 1778), 're.findall', 're.findall', (['texto_re', 'articulo'], {}), '(texto_re, articulo)\n', (1758, 1778), False, 'import re\n')] |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 <NAME>, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import json
from bamboo_engine import metrics
from bamboo_engine.eri import (
Node,
NodeType,
ServiceActivity,
SubProcess,
ExclusiveGateway,
ParallelGateway,
ConditionalParallelGateway,
ConvergeGateway,
EmptyStartEvent,
EmptyEndEvent,
ExecutableEndEvent,
Condition,
)
from pipeline.eri.models import Node as DBNode
class NodeMixin:
def _get_node(self, node: DBNode):
node_detail = json.loads(node.detail)
node_type = node_detail["type"]
targets = node_detail["targets"]
common_args = dict(
id=node.node_id,
target_flows=list(targets.keys()),
target_nodes=list(targets.values()),
targets=node_detail["targets"],
root_pipeline_id=node_detail["root_pipeline_id"],
parent_pipeline_id=node_detail["parent_pipeline_id"],
can_skip=node_detail["can_skip"],
can_retry=node_detail["can_retry"],
)
if node_type == NodeType.ServiceActivity.value:
return ServiceActivity(
type=NodeType.ServiceActivity,
code=node_detail["code"],
version=node_detail["version"],
timeout=node_detail["timeout"],
error_ignorable=node_detail["error_ignorable"],
**common_args
)
elif node_type == NodeType.SubProcess.value:
return SubProcess(type=NodeType.SubProcess, start_event_id=node_detail["start_event_id"], **common_args)
elif node_type == NodeType.ExclusiveGateway.value:
return ExclusiveGateway(
type=NodeType.ExclusiveGateway,
conditions=[Condition(**c) for c in node_detail["conditions"]],
**common_args
)
elif node_type == NodeType.ParallelGateway.value:
return ParallelGateway(
type=NodeType.ParallelGateway, converge_gateway_id=node_detail["converge_gateway_id"], **common_args
)
elif node_type == NodeType.ConditionalParallelGateway.value:
return ConditionalParallelGateway(
type=NodeType.ConditionalParallelGateway,
converge_gateway_id=node_detail["converge_gateway_id"],
conditions=[Condition(**c) for c in node_detail["conditions"]],
**common_args
)
elif node_type == NodeType.ConvergeGateway.value:
return ConvergeGateway(type=NodeType.ConvergeGateway, **common_args)
elif node_type == NodeType.EmptyStartEvent.value:
return EmptyStartEvent(type=NodeType.EmptyStartEvent, **common_args)
elif node_type == NodeType.EmptyEndEvent.value:
return EmptyEndEvent(type=NodeType.EmptyEndEvent, **common_args)
elif node_type == NodeType.ExecutableEndEvent.value:
return ExecutableEndEvent(type=NodeType.ExecutableEndEvent, code=node_detail["code"], **common_args)
else:
raise ValueError("unknown node type: {}".format(node_type))
@metrics.setup_histogram(metrics.ENGINE_RUNTIME_NODE_READ_TIME)
def get_node(self, node_id: str) -> Node:
"""
获取某个节点的详细信息
:param node_id: 节点 ID
:type node_id: str
:return: Node 实例
:rtype: Node
"""
node = DBNode.objects.get(node_id=node_id)
return self._get_node(node)
| [
"json.loads",
"bamboo_engine.metrics.setup_histogram",
"bamboo_engine.eri.EmptyStartEvent",
"bamboo_engine.eri.EmptyEndEvent",
"pipeline.eri.models.Node.objects.get",
"bamboo_engine.eri.ConvergeGateway",
"bamboo_engine.eri.ParallelGateway",
"bamboo_engine.eri.ServiceActivity",
"bamboo_engine.eri.SubProcess",
"bamboo_engine.eri.ExecutableEndEvent",
"bamboo_engine.eri.Condition"
] | [((3808, 3870), 'bamboo_engine.metrics.setup_histogram', 'metrics.setup_histogram', (['metrics.ENGINE_RUNTIME_NODE_READ_TIME'], {}), '(metrics.ENGINE_RUNTIME_NODE_READ_TIME)\n', (3831, 3870), False, 'from bamboo_engine import metrics\n'), ((1169, 1192), 'json.loads', 'json.loads', (['node.detail'], {}), '(node.detail)\n', (1179, 1192), False, 'import json\n'), ((4080, 4115), 'pipeline.eri.models.Node.objects.get', 'DBNode.objects.get', ([], {'node_id': 'node_id'}), '(node_id=node_id)\n', (4098, 4115), True, 'from pipeline.eri.models import Node as DBNode\n'), ((1779, 1986), 'bamboo_engine.eri.ServiceActivity', 'ServiceActivity', ([], {'type': 'NodeType.ServiceActivity', 'code': "node_detail['code']", 'version': "node_detail['version']", 'timeout': "node_detail['timeout']", 'error_ignorable': "node_detail['error_ignorable']"}), "(type=NodeType.ServiceActivity, code=node_detail['code'],\n version=node_detail['version'], timeout=node_detail['timeout'],\n error_ignorable=node_detail['error_ignorable'], **common_args)\n", (1794, 1986), False, 'from bamboo_engine.eri import Node, NodeType, ServiceActivity, SubProcess, ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway, EmptyStartEvent, EmptyEndEvent, ExecutableEndEvent, Condition\n'), ((2162, 2264), 'bamboo_engine.eri.SubProcess', 'SubProcess', ([], {'type': 'NodeType.SubProcess', 'start_event_id': "node_detail['start_event_id']"}), "(type=NodeType.SubProcess, start_event_id=node_detail[\n 'start_event_id'], **common_args)\n", (2172, 2264), False, 'from bamboo_engine.eri import Node, NodeType, ServiceActivity, SubProcess, ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway, EmptyStartEvent, EmptyEndEvent, ExecutableEndEvent, Condition\n'), ((2607, 2729), 'bamboo_engine.eri.ParallelGateway', 'ParallelGateway', ([], {'type': 'NodeType.ParallelGateway', 'converge_gateway_id': "node_detail['converge_gateway_id']"}), "(type=NodeType.ParallelGateway, converge_gateway_id=\n node_detail['converge_gateway_id'], **common_args)\n", (2622, 2729), False, 'from bamboo_engine.eri import Node, NodeType, ServiceActivity, SubProcess, ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway, EmptyStartEvent, EmptyEndEvent, ExecutableEndEvent, Condition\n'), ((2433, 2447), 'bamboo_engine.eri.Condition', 'Condition', ([], {}), '(**c)\n', (2442, 2447), False, 'from bamboo_engine.eri import Node, NodeType, ServiceActivity, SubProcess, ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway, EmptyStartEvent, EmptyEndEvent, ExecutableEndEvent, Condition\n'), ((3204, 3265), 'bamboo_engine.eri.ConvergeGateway', 'ConvergeGateway', ([], {'type': 'NodeType.ConvergeGateway'}), '(type=NodeType.ConvergeGateway, **common_args)\n', (3219, 3265), False, 'from bamboo_engine.eri import Node, NodeType, ServiceActivity, SubProcess, ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway, EmptyStartEvent, EmptyEndEvent, ExecutableEndEvent, Condition\n'), ((3344, 3405), 'bamboo_engine.eri.EmptyStartEvent', 'EmptyStartEvent', ([], {'type': 'NodeType.EmptyStartEvent'}), '(type=NodeType.EmptyStartEvent, **common_args)\n', (3359, 3405), False, 'from bamboo_engine.eri import Node, NodeType, ServiceActivity, SubProcess, ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway, EmptyStartEvent, EmptyEndEvent, ExecutableEndEvent, Condition\n'), ((3030, 3044), 'bamboo_engine.eri.Condition', 'Condition', ([], {}), '(**c)\n', (3039, 3044), False, 'from bamboo_engine.eri import Node, NodeType, ServiceActivity, SubProcess, ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway, EmptyStartEvent, EmptyEndEvent, ExecutableEndEvent, Condition\n'), ((3482, 3539), 'bamboo_engine.eri.EmptyEndEvent', 'EmptyEndEvent', ([], {'type': 'NodeType.EmptyEndEvent'}), '(type=NodeType.EmptyEndEvent, **common_args)\n', (3495, 3539), False, 'from bamboo_engine.eri import Node, NodeType, ServiceActivity, SubProcess, ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway, EmptyStartEvent, EmptyEndEvent, ExecutableEndEvent, Condition\n'), ((3621, 3719), 'bamboo_engine.eri.ExecutableEndEvent', 'ExecutableEndEvent', ([], {'type': 'NodeType.ExecutableEndEvent', 'code': "node_detail['code']"}), "(type=NodeType.ExecutableEndEvent, code=node_detail[\n 'code'], **common_args)\n", (3639, 3719), False, 'from bamboo_engine.eri import Node, NodeType, ServiceActivity, SubProcess, ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway, EmptyStartEvent, EmptyEndEvent, ExecutableEndEvent, Condition\n')] |
# Generated by Django 3.2.3 on 2021-11-14 17:14
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('list', '0020_rename_text_item_description'),
]
operations = [
migrations.RemoveField(
model_name='trip',
name='total',
),
migrations.RemoveField(
model_name='trip',
name='total_currency',
),
migrations.AddField(
model_name='receipt',
name='image',
field=models.ImageField(blank=True, null=True, upload_to='receipts'),
),
migrations.AlterField(
model_name='receipt',
name='trip',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='receipts', to='list.trip'),
),
]
| [
"django.db.models.ImageField",
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey"
] | [((275, 330), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""trip"""', 'name': '"""total"""'}), "(model_name='trip', name='total')\n", (297, 330), False, 'from django.db import migrations, models\n'), ((375, 439), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""trip"""', 'name': '"""total_currency"""'}), "(model_name='trip', name='total_currency')\n", (397, 439), False, 'from django.db import migrations, models\n'), ((583, 645), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""receipts"""'}), "(blank=True, null=True, upload_to='receipts')\n", (600, 645), False, 'from django.db import migrations, models\n'), ((766, 897), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""receipts"""', 'to': '"""list.trip"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='receipts', to='list.trip')\n", (783, 897), False, 'from django.db import migrations, models\n')] |
# Copyright (C) 2015-2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pynfcreader.sessions.iso14443.tpdu import Tpdu
from pynfcreader.tools import utils
class Iso14443Session(object):
def __init__(self, cid=0, nad=0, drv=None, block_size=16):
self._init_pcb_block_nb()
self._addNAD = False
self._addCID = True
self._cid = cid
self._nad = nad
self._drv = drv
self._iblock_pcb_number = 0x00
self._pcb_block_number = None
self._drv = drv
self._logger = self._drv.get_logger()
self.block_size = block_size
def connect(self):
self._drv.connect()
self._drv.set_mode_iso14443A()
def field_on(self):
self._logger.info("field on")
self._drv.field_on()
def field_off(self):
self._logger.info("field off")
self._drv.field_off()
def polling(self):
self.send_reqa()
self.send_select_full()
self.send_pps()
@property
def block_size(self):
return self._block_size
@block_size.setter
def block_size(self, size):
assert (0 <= size <= 256)
self._block_size = size
def get_and_update_iblock_pcb_number(self):
self._iblock_pcb_number ^= 1
return self._iblock_pcb_number ^ 1
def send_pps(self, cid=0x0, pps1=False, dri=0x0, dsi=0x0):
self._logger.info("PPS")
self._logger.info("\tPCD selected options:")
pps0_pps1 = [0x01]
if pps1:
pps0_pps1.append(dri << 4 + dsi)
self._logger.info("\tCID : 0x%X" % cid)
self._logger.info("\tPPS1 %stransmitted" % ("not " * (not pps1)))
data = bytes([0xD0 + cid] + pps0_pps1)
self.comment_data("PPS:", data)
resp = self._drv.write(data=data, resp_len=3, transmitter_add_crc=True)
self.comment_data("PPS response:", resp)
if resp[0] == (0xD0 + cid):
self._logger.info("\tPPS accepted")
else:
self._logger.info("\tPPS rejected")
self._logger.info("")
return resp
def _init_pcb_block_nb(self):
self._pcb_block_number = 0
def _inc_pcb_block_number(self):
self._pcb_block_number ^= 1
def _get_pcb_block_number(self):
return self._pcb_block_number
def _get_and_update_pcb_block_number(self):
nb = self._get_pcb_block_number()
self._inc_pcb_block_number()
return nb
def get_rblock(self, ack=True, cid=True, block_number=None):
pcb = 0xA2
cid = ""
if not ack:
pcb |= 0x10
if cid:
pcb |= 0x08
cid = f" 0x{self._cid:02X} "
if block_number:
pcb |= block_number
else:
pcb |= self.get_and_update_iblock_pcb_number()
if cid != "":
return bytes([pcb, cid])
else:
return bytes([pcb])
def get_iblock(self, data, chaining_bit=False):
"""
- 0
- 0
- 0
- Chaining if 1
- CID following if 1
- NAD following if 1
- 1
- Block number
"""
pcb = self.get_and_update_iblock_pcb_number() + 0x02
cid = ""
if self._addCID:
cid = self._cid
pcb |= 0x08
if chaining_bit:
pcb |= 0x10
nad = ""
if self._addNAD:
nad = self._nad
pcb |= 0x04
header = [pcb]
if nad != "":
header.append(nad)
if cid != "":
header.append(cid)
return bytes(header) + data
def chaining_iblock(self, data=None, block_size=None):
if not block_size:
block_size = self.block_size
block_lst = []
fragmented_data_index = range(0, len(data), block_size)
for hit in fragmented_data_index[:-1]:
inf_field = data[hit:hit + block_size]
frame = self.get_iblock(inf_field, chaining_bit=True)
block_lst.append(frame)
if fragmented_data_index[-1]:
index = fragmented_data_index[-1]
else:
index = 0
inf_field = data[index:index + block_size]
frame = self.get_iblock(inf_field, chaining_bit=False)
block_lst.append(frame)
return block_lst
def _send_tpdu(self, tpdu):
self._logger.info("\t\t" + "TPDU command:")
for hit in utils.get_pretty_print_block(tpdu):
self._logger.info("\t\t" + hit)
resp = self._drv.write(data=tpdu, resp_len=16, transmitter_add_crc=True)
resp = Tpdu(resp)
self._logger.info("\t\t" + "TPDU response:")
for hit in utils.get_pretty_print_block(resp.get_tpdu()):
self._logger.info("\t\t" + hit)
return resp
def send_apdu(self, apdu):
apdu = bytes.fromhex(apdu)
self._logger.info("APDU command:")
for hit in utils.get_pretty_print_block(apdu):
self._logger.info("\t" + hit)
block_lst = self.chaining_iblock(data=apdu)
if len(block_lst) == 1:
resp = self._send_tpdu(block_lst[0])
else:
self._logger.info("Block chaining, %d blocks to send" % len(block_lst))
for iblock in block_lst:
resp = self._send_tpdu(iblock)
while resp.is_wtx():
wtx_reply = resp.get_wtx_reply()
resp = self._send_tpdu(wtx_reply)
rapdu = resp.get_inf_field()
while resp.is_chaining():
rblock = self.get_rblock()
resp = self._send_tpdu(rblock)
rapdu += resp.get_inf_field()
self._logger.info("APDU response:")
for hit in utils.get_pretty_print_block(rapdu):
self._logger.info("\t" + hit)
return rapdu
def send_raw_bytes(self, data, transmitter_add_crc=True):
self._logger.info("Send Raw Bytes:")
for hit in utils.get_pretty_print_block(data):
self._logger.info("\t" + hit)
resp = self._drv.write(data=data, transmitter_add_crc=transmitter_add_crc)
self._logger.info("Response:")
for hit in utils.get_pretty_print_block(resp):
self._logger.info("\t" + hit)
return resp
def comment_data(self, msg, data):
self._logger.info(msg)
for hit in utils.get_pretty_print_block(data):
self._logger.info("\t" + hit)
| [
"pynfcreader.tools.utils.get_pretty_print_block",
"pynfcreader.sessions.iso14443.tpdu.Tpdu"
] | [((4943, 4977), 'pynfcreader.tools.utils.get_pretty_print_block', 'utils.get_pretty_print_block', (['tpdu'], {}), '(tpdu)\n', (4971, 4977), False, 'from pynfcreader.tools import utils\n'), ((5121, 5131), 'pynfcreader.sessions.iso14443.tpdu.Tpdu', 'Tpdu', (['resp'], {}), '(resp)\n', (5125, 5131), False, 'from pynfcreader.sessions.iso14443.tpdu import Tpdu\n'), ((5444, 5478), 'pynfcreader.tools.utils.get_pretty_print_block', 'utils.get_pretty_print_block', (['apdu'], {}), '(apdu)\n', (5472, 5478), False, 'from pynfcreader.tools import utils\n'), ((6223, 6258), 'pynfcreader.tools.utils.get_pretty_print_block', 'utils.get_pretty_print_block', (['rapdu'], {}), '(rapdu)\n', (6251, 6258), False, 'from pynfcreader.tools import utils\n'), ((6451, 6485), 'pynfcreader.tools.utils.get_pretty_print_block', 'utils.get_pretty_print_block', (['data'], {}), '(data)\n', (6479, 6485), False, 'from pynfcreader.tools import utils\n'), ((6672, 6706), 'pynfcreader.tools.utils.get_pretty_print_block', 'utils.get_pretty_print_block', (['resp'], {}), '(resp)\n', (6700, 6706), False, 'from pynfcreader.tools import utils\n'), ((6861, 6895), 'pynfcreader.tools.utils.get_pretty_print_block', 'utils.get_pretty_print_block', (['data'], {}), '(data)\n', (6889, 6895), False, 'from pynfcreader.tools import utils\n')] |
# Copyright 2019 Atalaya Tech, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# List of APIs for accessing remote or local yatai service via Python
import io
import os
import json
import logging
import tarfile
import requests
import shutil
from bentoml import config
from bentoml.deployment.store import ALL_NAMESPACE_TAG
from bentoml.proto.deployment_pb2 import (
ApplyDeploymentRequest,
DescribeDeploymentRequest,
GetDeploymentRequest,
DeploymentSpec,
DeleteDeploymentRequest,
ListDeploymentsRequest,
ApplyDeploymentResponse,
)
from bentoml.exceptions import BentoMLException, YataiDeploymentException
from bentoml.proto.repository_pb2 import (
AddBentoRequest,
GetBentoRequest,
BentoUri,
UpdateBentoRequest,
UploadStatus,
)
from bentoml.proto import status_pb2
from bentoml.utils.usage_stats import track_save
from bentoml.utils.tempdir import TempDirectory
from bentoml.bundler import save_to_dir, load_bento_service_metadata
from bentoml.utils.validator import validate_deployment_pb_schema
from bentoml.yatai.deployment_utils import (
deployment_yaml_string_to_pb,
deployment_dict_to_pb,
)
from bentoml.yatai.status import Status
logger = logging.getLogger(__name__)
def upload_bento_service(bento_service, base_path=None, version=None):
"""Save given bento_service via BentoML's default Yatai service, which manages
all saved Bento files and their deployments in cloud platforms. If remote yatai
service has not been configured, this will default to saving new Bento to local
file system under BentoML home directory
Args:
bento_service (bentoml.service.BentoService): a Bento Service instance
base_path (str): optional, base path of the bento repository
version (str): optional,
Return:
URI to where the BentoService is being saved to
"""
track_save(bento_service)
with TempDirectory() as tmpdir:
save_to_dir(bento_service, tmpdir, version)
return _upload_bento_service(tmpdir, base_path)
def _upload_bento_service(saved_bento_path, base_path):
bento_service_metadata = load_bento_service_metadata(saved_bento_path)
from bentoml.yatai import get_yatai_service
# if base_path is not None, default repository base path in config will be override
if base_path is not None:
logger.warning("Overriding default repository path to '%s'", base_path)
yatai = get_yatai_service(repo_base_url=base_path)
get_bento_response = yatai.GetBento(
GetBentoRequest(
bento_name=bento_service_metadata.name,
bento_version=bento_service_metadata.version,
)
)
if get_bento_response.status.status_code == status_pb2.Status.OK:
raise BentoMLException(
"BentoService bundle {}:{} already registered in repository. Reset "
"BentoService version with BentoService#set_version or bypass BentoML's "
"model registry feature with BentoService#save_to_dir".format(
bento_service_metadata.name, bento_service_metadata.version
)
)
elif get_bento_response.status.status_code != status_pb2.Status.NOT_FOUND:
raise BentoMLException(
'Failed accessing YataiService. {error_code}:'
'{error_message}'.format(
error_code=Status.Name(get_bento_response.status.status_code),
error_message=get_bento_response.status.error_message,
)
)
request = AddBentoRequest(
bento_name=bento_service_metadata.name,
bento_version=bento_service_metadata.version,
)
response = yatai.AddBento(request)
if response.status.status_code != status_pb2.Status.OK:
raise BentoMLException(
"Error adding BentoService bundle to repository: {}:{}".format(
Status.Name(response.status.status_code), response.status.error_message
)
)
if response.uri.type == BentoUri.LOCAL:
if os.path.exists(response.uri.uri):
# due to copytree dst must not already exist
shutil.rmtree(response.uri.uri)
shutil.copytree(saved_bento_path, response.uri.uri)
_update_bento_upload_progress(yatai, bento_service_metadata)
logger.info(
"BentoService bundle '%s:%s' created at: %s",
bento_service_metadata.name,
bento_service_metadata.version,
response.uri.uri,
)
# Return URI to saved bento in repository storage
return response.uri.uri
elif response.uri.type == BentoUri.S3:
_update_bento_upload_progress(
yatai, bento_service_metadata, UploadStatus.UPLOADING, 0
)
fileobj = io.BytesIO()
with tarfile.open(mode="w:gz", fileobj=fileobj) as tar:
tar.add(saved_bento_path, arcname=bento_service_metadata.name)
fileobj.seek(0, 0)
files = {'file': ('dummy', fileobj)} # dummy file name because file name
# has been generated when getting the pre-signed signature.
data = json.loads(response.uri.additional_fields)
uri = data.pop('url')
http_response = requests.post(uri, data=data, files=files)
if http_response.status_code != 204:
_update_bento_upload_progress(
yatai, bento_service_metadata, UploadStatus.ERROR
)
raise BentoMLException(
"Error saving BentoService bundle to S3. {}: {} ".format(
Status.Name(http_response.status_code), http_response.text
)
)
_update_bento_upload_progress(yatai, bento_service_metadata)
logger.info(
"Successfully saved BentoService bundle '%s:%s' to S3: %s",
bento_service_metadata.name,
bento_service_metadata.version,
response.uri.uri,
)
return response.uri.uri
else:
raise BentoMLException(
"Error saving Bento to target repository, URI type %s at %s not supported"
% response.uri.type,
response.uri.uri,
)
def _update_bento_upload_progress(
yatai, bento_service_metadata, status=UploadStatus.DONE, percentage=None
):
upload_status = UploadStatus(status=status, percentage=percentage)
upload_status.updated_at.GetCurrentTime()
update_bento_req = UpdateBentoRequest(
bento_name=bento_service_metadata.name,
bento_version=bento_service_metadata.version,
upload_status=upload_status,
service_metadata=bento_service_metadata,
)
yatai.UpdateBento(update_bento_req)
def create_deployment(
deployment_name,
namespace,
bento_name,
bento_version,
platform,
operator_spec,
labels=None,
annotations=None,
yatai_service=None,
):
if yatai_service is None:
from bentoml.yatai import get_yatai_service
yatai_service = get_yatai_service()
# Make sure there is no active deployment with the same deployment name
get_deployment_pb = yatai_service.GetDeployment(
GetDeploymentRequest(deployment_name=deployment_name, namespace=namespace)
)
if get_deployment_pb.status.status_code == status_pb2.Status.OK:
raise YataiDeploymentException(
'Deployment "{name}" already existed, use Update or Apply for updating '
'existing deployment, delete the deployment, or use a different deployment '
'name'.format(name=deployment_name)
)
if get_deployment_pb.status.status_code != status_pb2.Status.NOT_FOUND:
raise YataiDeploymentException(
'Failed accesing YataiService deployment store. {error_code}:'
'{error_message}'.format(
error_code=Status.Name(get_deployment_pb.status.status_code),
error_message=get_deployment_pb.status.error_message,
)
)
deployment_dict = {
"name": deployment_name,
"namespace": namespace or config().get('deployment', 'default_namespace'),
"labels": labels,
"annotations": annotations,
"spec": {
"bento_name": bento_name,
"bento_version": bento_version,
"operator": platform,
},
}
operator = platform.replace('-', '_').upper()
try:
operator_value = DeploymentSpec.DeploymentOperator.Value(operator)
except ValueError:
return ApplyDeploymentResponse(
status=Status.INVALID_ARGUMENT('Invalid platform "{}"'.format(platform))
)
if operator_value == DeploymentSpec.AWS_SAGEMAKER:
deployment_dict['spec']['sagemaker_operator_config'] = {
'region': operator_spec.get('region')
or config().get('aws', 'default_region'),
'instance_count': operator_spec.get('instance_count')
or config().getint('sagemaker', 'default_instance_count'),
'instance_type': operator_spec.get('instance_type')
or config().get('sagemaker', 'default_instance_type'),
'api_name': operator_spec.get('api_name', ''),
}
elif operator_value == DeploymentSpec.AWS_LAMBDA:
deployment_dict['spec']['aws_lambda_operator_config'] = {
'region': operator_spec.get('region')
or config().get('aws', 'default_region')
}
for field in ['api_name', 'memory_size', 'timeout']:
if operator_spec.get(field):
deployment_dict['spec']['aws_lambda_operator_config'][
field
] = operator_spec[field]
elif operator_value == DeploymentSpec.GCP_FCUNTION:
deployment_dict['spec']['gcp_function_operatorConfig'] = {
'region': operator_spec.get('region')
or config().get('google-cloud', 'default_region')
}
if operator_spec.get('api_name'):
deployment_dict['spec']['gcp_function_operator_config'][
'api_name'
] = operator_spec['api_name']
elif operator_value == DeploymentSpec.KUBERNETES:
deployment_dict['spec']['kubernetes_operator_config'] = {
'kube_namespace': operator_spec.get('kube_namespace', ''),
'replicas': operator_spec.get('replicas', 0),
'service_name': operator_spec.get('service_name', ''),
'service_type': operator_spec.get('service_type', ''),
}
else:
raise YataiDeploymentException(
'Platform "{}" is not supported in the current version of '
'BentoML'.format(platform)
)
apply_response = apply_deployment(deployment_dict, yatai_service)
if apply_response.status.status_code == status_pb2.Status.OK:
describe_response = describe_deployment(
deployment_name, namespace, yatai_service
)
if describe_response.status.status_code == status_pb2.Status.OK:
deployment_state = describe_response.state
apply_response.deployment.state.CopyFrom(deployment_state)
return apply_response
return apply_response
# TODO update_deployment is not finished. It will be working on along with cli command
def update_deployment(deployment_name, namespace, yatai_service=None):
raise NotImplementedError
def apply_deployment(deployment_info, yatai_service=None):
if yatai_service is None:
from bentoml.yatai import get_yatai_service
yatai_service = get_yatai_service()
try:
if isinstance(deployment_info, dict):
deployment_pb = deployment_dict_to_pb(deployment_info)
elif isinstance(deployment_info, str):
deployment_pb = deployment_yaml_string_to_pb(deployment_info)
else:
raise YataiDeploymentException(
'Unexpected argument type, expect deployment info to be str in yaml '
'format or a dict, instead got: {}'.format(str(type(deployment_info)))
)
validation_errors = validate_deployment_pb_schema(deployment_pb)
if validation_errors:
return ApplyDeploymentResponse(
status=Status.INVALID_ARGUMENT(
'Failed to validate deployment: {errors}'.format(
errors=validation_errors
)
)
)
return yatai_service.ApplyDeployment(
ApplyDeploymentRequest(deployment=deployment_pb)
)
except BentoMLException as error:
return ApplyDeploymentResponse(status=Status.INTERNAL(str(error)))
def describe_deployment(namespace, name, yatai_service=None):
if yatai_service is None:
from bentoml.yatai import get_yatai_service
yatai_service = get_yatai_service()
return yatai_service.DescribeDeployment(
DescribeDeploymentRequest(deployment_name=name, namespace=namespace)
)
def get_deployment(namespace, name, yatai_service=None):
if yatai_service is None:
from bentoml.yatai import get_yatai_service
yatai_service = get_yatai_service()
return yatai_service.GetDeployment(
GetDeploymentRequest(deployment_name=name, namespace=namespace)
)
def delete_deployment(
deployment_name, namespace, force_delete=False, yatai_service=None
):
if yatai_service is None:
from bentoml.yatai import get_yatai_service
yatai_service = get_yatai_service()
return yatai_service.DeleteDeployment(
DeleteDeploymentRequest(
deployment_name=deployment_name,
namespace=namespace,
force_delete=force_delete,
)
)
def list_deployments(
limit=None,
filters=None,
labels=None,
namespace=None,
is_all_namespaces=False,
yatai_service=None,
):
if yatai_service is None:
from bentoml.yatai import get_yatai_service
yatai_service = get_yatai_service()
if is_all_namespaces:
if namespace is not None:
logger.warning(
'Ignoring `namespace=%s` due to the --all-namespace flag presented',
namespace,
)
namespace = ALL_NAMESPACE_TAG
return yatai_service.ListDeployments(
ListDeploymentsRequest(
limit=limit, filter=filters, labels=labels, namespace=namespace
)
)
| [
"logging.getLogger",
"requests.post",
"bentoml.yatai.get_yatai_service",
"bentoml.yatai.deployment_utils.deployment_dict_to_pb",
"tarfile.open",
"bentoml.proto.deployment_pb2.DeleteDeploymentRequest",
"io.BytesIO",
"bentoml.proto.deployment_pb2.GetDeploymentRequest",
"os.path.exists",
"bentoml.yatai.deployment_utils.deployment_yaml_string_to_pb",
"bentoml.proto.repository_pb2.UploadStatus",
"bentoml.proto.deployment_pb2.DeploymentSpec.DeploymentOperator.Value",
"json.loads",
"bentoml.yatai.status.Status.Name",
"bentoml.utils.usage_stats.track_save",
"bentoml.proto.deployment_pb2.ListDeploymentsRequest",
"bentoml.proto.repository_pb2.AddBentoRequest",
"bentoml.utils.validator.validate_deployment_pb_schema",
"bentoml.proto.deployment_pb2.DescribeDeploymentRequest",
"bentoml.bundler.load_bento_service_metadata",
"bentoml.utils.tempdir.TempDirectory",
"bentoml.proto.repository_pb2.UpdateBentoRequest",
"bentoml.proto.repository_pb2.GetBentoRequest",
"bentoml.bundler.save_to_dir",
"shutil.copytree",
"bentoml.exceptions.BentoMLException",
"shutil.rmtree",
"bentoml.proto.deployment_pb2.ApplyDeploymentRequest",
"bentoml.config"
] | [((1703, 1730), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1720, 1730), False, 'import logging\n'), ((2372, 2397), 'bentoml.utils.usage_stats.track_save', 'track_save', (['bento_service'], {}), '(bento_service)\n', (2382, 2397), False, 'from bentoml.utils.usage_stats import track_save\n'), ((2630, 2675), 'bentoml.bundler.load_bento_service_metadata', 'load_bento_service_metadata', (['saved_bento_path'], {}), '(saved_bento_path)\n', (2657, 2675), False, 'from bentoml.bundler import save_to_dir, load_bento_service_metadata\n'), ((2936, 2978), 'bentoml.yatai.get_yatai_service', 'get_yatai_service', ([], {'repo_base_url': 'base_path'}), '(repo_base_url=base_path)\n', (2953, 2978), False, 'from bentoml.yatai import get_yatai_service\n'), ((4012, 4118), 'bentoml.proto.repository_pb2.AddBentoRequest', 'AddBentoRequest', ([], {'bento_name': 'bento_service_metadata.name', 'bento_version': 'bento_service_metadata.version'}), '(bento_name=bento_service_metadata.name, bento_version=\n bento_service_metadata.version)\n', (4027, 4118), False, 'from bentoml.proto.repository_pb2 import AddBentoRequest, GetBentoRequest, BentoUri, UpdateBentoRequest, UploadStatus\n'), ((6791, 6841), 'bentoml.proto.repository_pb2.UploadStatus', 'UploadStatus', ([], {'status': 'status', 'percentage': 'percentage'}), '(status=status, percentage=percentage)\n', (6803, 6841), False, 'from bentoml.proto.repository_pb2 import AddBentoRequest, GetBentoRequest, BentoUri, UpdateBentoRequest, UploadStatus\n'), ((6911, 7094), 'bentoml.proto.repository_pb2.UpdateBentoRequest', 'UpdateBentoRequest', ([], {'bento_name': 'bento_service_metadata.name', 'bento_version': 'bento_service_metadata.version', 'upload_status': 'upload_status', 'service_metadata': 'bento_service_metadata'}), '(bento_name=bento_service_metadata.name, bento_version=\n bento_service_metadata.version, upload_status=upload_status,\n service_metadata=bento_service_metadata)\n', (6929, 7094), False, 'from bentoml.proto.repository_pb2 import AddBentoRequest, GetBentoRequest, BentoUri, UpdateBentoRequest, UploadStatus\n'), ((2408, 2423), 'bentoml.utils.tempdir.TempDirectory', 'TempDirectory', ([], {}), '()\n', (2421, 2423), False, 'from bentoml.utils.tempdir import TempDirectory\n'), ((2443, 2486), 'bentoml.bundler.save_to_dir', 'save_to_dir', (['bento_service', 'tmpdir', 'version'], {}), '(bento_service, tmpdir, version)\n', (2454, 2486), False, 'from bentoml.bundler import save_to_dir, load_bento_service_metadata\n'), ((3029, 3135), 'bentoml.proto.repository_pb2.GetBentoRequest', 'GetBentoRequest', ([], {'bento_name': 'bento_service_metadata.name', 'bento_version': 'bento_service_metadata.version'}), '(bento_name=bento_service_metadata.name, bento_version=\n bento_service_metadata.version)\n', (3044, 3135), False, 'from bentoml.proto.repository_pb2 import AddBentoRequest, GetBentoRequest, BentoUri, UpdateBentoRequest, UploadStatus\n'), ((4513, 4545), 'os.path.exists', 'os.path.exists', (['response.uri.uri'], {}), '(response.uri.uri)\n', (4527, 4545), False, 'import os\n'), ((4656, 4707), 'shutil.copytree', 'shutil.copytree', (['saved_bento_path', 'response.uri.uri'], {}), '(saved_bento_path, response.uri.uri)\n', (4671, 4707), False, 'import shutil\n'), ((7467, 7486), 'bentoml.yatai.get_yatai_service', 'get_yatai_service', ([], {}), '()\n', (7484, 7486), False, 'from bentoml.yatai import get_yatai_service\n'), ((7625, 7699), 'bentoml.proto.deployment_pb2.GetDeploymentRequest', 'GetDeploymentRequest', ([], {'deployment_name': 'deployment_name', 'namespace': 'namespace'}), '(deployment_name=deployment_name, namespace=namespace)\n', (7645, 7699), False, 'from bentoml.proto.deployment_pb2 import ApplyDeploymentRequest, DescribeDeploymentRequest, GetDeploymentRequest, DeploymentSpec, DeleteDeploymentRequest, ListDeploymentsRequest, ApplyDeploymentResponse\n'), ((8887, 8936), 'bentoml.proto.deployment_pb2.DeploymentSpec.DeploymentOperator.Value', 'DeploymentSpec.DeploymentOperator.Value', (['operator'], {}), '(operator)\n', (8926, 8936), False, 'from bentoml.proto.deployment_pb2 import ApplyDeploymentRequest, DescribeDeploymentRequest, GetDeploymentRequest, DeploymentSpec, DeleteDeploymentRequest, ListDeploymentsRequest, ApplyDeploymentResponse\n'), ((11988, 12007), 'bentoml.yatai.get_yatai_service', 'get_yatai_service', ([], {}), '()\n', (12005, 12007), False, 'from bentoml.yatai import get_yatai_service\n'), ((12526, 12570), 'bentoml.utils.validator.validate_deployment_pb_schema', 'validate_deployment_pb_schema', (['deployment_pb'], {}), '(deployment_pb)\n', (12555, 12570), False, 'from bentoml.utils.validator import validate_deployment_pb_schema\n'), ((13268, 13287), 'bentoml.yatai.get_yatai_service', 'get_yatai_service', ([], {}), '()\n', (13285, 13287), False, 'from bentoml.yatai import get_yatai_service\n'), ((13341, 13409), 'bentoml.proto.deployment_pb2.DescribeDeploymentRequest', 'DescribeDeploymentRequest', ([], {'deployment_name': 'name', 'namespace': 'namespace'}), '(deployment_name=name, namespace=namespace)\n', (13366, 13409), False, 'from bentoml.proto.deployment_pb2 import ApplyDeploymentRequest, DescribeDeploymentRequest, GetDeploymentRequest, DeploymentSpec, DeleteDeploymentRequest, ListDeploymentsRequest, ApplyDeploymentResponse\n'), ((13582, 13601), 'bentoml.yatai.get_yatai_service', 'get_yatai_service', ([], {}), '()\n', (13599, 13601), False, 'from bentoml.yatai import get_yatai_service\n'), ((13650, 13713), 'bentoml.proto.deployment_pb2.GetDeploymentRequest', 'GetDeploymentRequest', ([], {'deployment_name': 'name', 'namespace': 'namespace'}), '(deployment_name=name, namespace=namespace)\n', (13670, 13713), False, 'from bentoml.proto.deployment_pb2 import ApplyDeploymentRequest, DescribeDeploymentRequest, GetDeploymentRequest, DeploymentSpec, DeleteDeploymentRequest, ListDeploymentsRequest, ApplyDeploymentResponse\n'), ((13926, 13945), 'bentoml.yatai.get_yatai_service', 'get_yatai_service', ([], {}), '()\n', (13943, 13945), False, 'from bentoml.yatai import get_yatai_service\n'), ((13998, 14107), 'bentoml.proto.deployment_pb2.DeleteDeploymentRequest', 'DeleteDeploymentRequest', ([], {'deployment_name': 'deployment_name', 'namespace': 'namespace', 'force_delete': 'force_delete'}), '(deployment_name=deployment_name, namespace=\n namespace, force_delete=force_delete)\n', (14021, 14107), False, 'from bentoml.proto.deployment_pb2 import ApplyDeploymentRequest, DescribeDeploymentRequest, GetDeploymentRequest, DeploymentSpec, DeleteDeploymentRequest, ListDeploymentsRequest, ApplyDeploymentResponse\n'), ((14414, 14433), 'bentoml.yatai.get_yatai_service', 'get_yatai_service', ([], {}), '()\n', (14431, 14433), False, 'from bentoml.yatai import get_yatai_service\n'), ((14738, 14829), 'bentoml.proto.deployment_pb2.ListDeploymentsRequest', 'ListDeploymentsRequest', ([], {'limit': 'limit', 'filter': 'filters', 'labels': 'labels', 'namespace': 'namespace'}), '(limit=limit, filter=filters, labels=labels,\n namespace=namespace)\n', (14760, 14829), False, 'from bentoml.proto.deployment_pb2 import ApplyDeploymentRequest, DescribeDeploymentRequest, GetDeploymentRequest, DeploymentSpec, DeleteDeploymentRequest, ListDeploymentsRequest, ApplyDeploymentResponse\n'), ((4616, 4647), 'shutil.rmtree', 'shutil.rmtree', (['response.uri.uri'], {}), '(response.uri.uri)\n', (4629, 4647), False, 'import shutil\n'), ((5253, 5265), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (5263, 5265), False, 'import io\n'), ((5598, 5640), 'json.loads', 'json.loads', (['response.uri.additional_fields'], {}), '(response.uri.additional_fields)\n', (5608, 5640), False, 'import json\n'), ((5695, 5737), 'requests.post', 'requests.post', (['uri'], {'data': 'data', 'files': 'files'}), '(uri, data=data, files=files)\n', (5708, 5737), False, 'import requests\n'), ((6476, 6616), 'bentoml.exceptions.BentoMLException', 'BentoMLException', (["('Error saving Bento to target repository, URI type %s at %s not supported' %\n response.uri.type)", 'response.uri.uri'], {}), "(\n 'Error saving Bento to target repository, URI type %s at %s not supported'\n % response.uri.type, response.uri.uri)\n", (6492, 6616), False, 'from bentoml.exceptions import BentoMLException, YataiDeploymentException\n'), ((12092, 12130), 'bentoml.yatai.deployment_utils.deployment_dict_to_pb', 'deployment_dict_to_pb', (['deployment_info'], {}), '(deployment_info)\n', (12113, 12130), False, 'from bentoml.yatai.deployment_utils import deployment_yaml_string_to_pb, deployment_dict_to_pb\n'), ((12925, 12973), 'bentoml.proto.deployment_pb2.ApplyDeploymentRequest', 'ApplyDeploymentRequest', ([], {'deployment': 'deployment_pb'}), '(deployment=deployment_pb)\n', (12947, 12973), False, 'from bentoml.proto.deployment_pb2 import ApplyDeploymentRequest, DescribeDeploymentRequest, GetDeploymentRequest, DeploymentSpec, DeleteDeploymentRequest, ListDeploymentsRequest, ApplyDeploymentResponse\n'), ((4361, 4401), 'bentoml.yatai.status.Status.Name', 'Status.Name', (['response.status.status_code'], {}), '(response.status.status_code)\n', (4372, 4401), False, 'from bentoml.yatai.status import Status\n'), ((5279, 5321), 'tarfile.open', 'tarfile.open', ([], {'mode': '"""w:gz"""', 'fileobj': 'fileobj'}), "(mode='w:gz', fileobj=fileobj)\n", (5291, 5321), False, 'import tarfile\n'), ((12206, 12251), 'bentoml.yatai.deployment_utils.deployment_yaml_string_to_pb', 'deployment_yaml_string_to_pb', (['deployment_info'], {}), '(deployment_info)\n', (12234, 12251), False, 'from bentoml.yatai.deployment_utils import deployment_yaml_string_to_pb, deployment_dict_to_pb\n'), ((8303, 8352), 'bentoml.yatai.status.Status.Name', 'Status.Name', (['get_deployment_pb.status.status_code'], {}), '(get_deployment_pb.status.status_code)\n', (8314, 8352), False, 'from bentoml.yatai.status import Status\n'), ((8540, 8548), 'bentoml.config', 'config', ([], {}), '()\n', (8546, 8548), False, 'from bentoml import config\n'), ((3851, 3901), 'bentoml.yatai.status.Status.Name', 'Status.Name', (['get_bento_response.status.status_code'], {}), '(get_bento_response.status.status_code)\n', (3862, 3901), False, 'from bentoml.yatai.status import Status\n'), ((6038, 6076), 'bentoml.yatai.status.Status.Name', 'Status.Name', (['http_response.status_code'], {}), '(http_response.status_code)\n', (6049, 6076), False, 'from bentoml.yatai.status import Status\n'), ((9280, 9288), 'bentoml.config', 'config', ([], {}), '()\n', (9286, 9288), False, 'from bentoml import config\n'), ((9400, 9408), 'bentoml.config', 'config', ([], {}), '()\n', (9406, 9408), False, 'from bentoml import config\n'), ((9535, 9543), 'bentoml.config', 'config', ([], {}), '()\n', (9541, 9543), False, 'from bentoml import config\n'), ((9841, 9849), 'bentoml.config', 'config', ([], {}), '()\n', (9847, 9849), False, 'from bentoml import config\n'), ((10317, 10325), 'bentoml.config', 'config', ([], {}), '()\n', (10323, 10325), False, 'from bentoml import config\n')] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
# Imports =====================================================================
from collections import namedtuple
# Functions & classes =========================================================
class ConversionRequest(namedtuple("ConversionRequest", ["marc_xml",
"uuid",
"url"])):
"""
Request to convert MARC XML to MODS.
Attributes:
marc_xml (str): Which MARC XML you wish to convert to MODS.
uuid (str): UUID for given MARC XML.
url (str): URL of the resource in edeposit (private or not).
"""
| [
"collections.namedtuple"
] | [((306, 366), 'collections.namedtuple', 'namedtuple', (['"""ConversionRequest"""', "['marc_xml', 'uuid', 'url']"], {}), "('ConversionRequest', ['marc_xml', 'uuid', 'url'])\n", (316, 366), False, 'from collections import namedtuple\n')] |
import os
import sys
file_dir = os.path.dirname(__file__)
sys.path.append(file_dir)
parent_dir = os.path.dirname(__file__) + "/.."
sys.path.append(parent_dir)
sys.path.append("..") #./dleamse/
print(sys.path)
abspath_dleamse = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))+"/"
#print(abspath_dleamse)
from dleamse_encode_and_embed import encode_and_embed_spectra
from dleamse_encode_and_embed import SiameseNetwork2
from dleamse_faiss_index_writer import FaissWriteIndex
from dleamse_faiss_index_search import FaissIndexSearch
from numba.errors import NumbaDeprecationWarning, NumbaPendingDeprecationWarning
import logging
import click
import warnings
from click.testing import CliRunner
#from dleamse.mslookup import cli
from mslookup import cli
def embeded_db_spectra():
runner = CliRunner()
result = runner.invoke(cli,
['embed-ms-file', '-m', abspath_dleamse+'dleamse_model_references/080802_20_1000_NM500R_model.pkl', '-r', abspath_dleamse+'dleamse_model_references/0722_500_rf_spectra.mgf', '-i', abspath_dleamse+'testdata/PXD015890_114263_ArchiveSpectrum.json',
'-p', 'PXD015890'])
"""
python mslookup.py embed-ms-file -i testdata/PXD015890_114263_ArchiveSpectrum.json -p PXD015890
"""
print(result)
print(result.output)
print(result.exit_code)
assert result.exit_code == 0
def make_db():
runner = CliRunner()
result = runner.invoke(cli,
['make-index', '-d', abspath_dleamse+'testdata/db.csv',
'-e', abspath_dleamse+'testdata/', '-o', abspath_dleamse+'testdata/db.index'])
"""
python mslookup.py make-index -d testdata/db.csv -e testdata/ -o testdata/db.index
"""
print(result)
print(result.output)
print(result.exit_code)
assert result.exit_code == 0
def embeded_query_spectra():
runner = CliRunner()
result = runner.invoke(cli,
['embed-ms-file', '-m', abspath_dleamse+'dleamse_model_references/080802_20_1000_NM500R_model.pkl', '-r',abspath_dleamse+'dleamse_model_references/0722_500_rf_spectra.mgf', '-i', abspath_dleamse+'testdata/query.json',
'-p', 'PXD015890'])
"""
python mslookup.py embed-ms-file -i testdata/query.json -p PXD015890
"""
print(result)
print(result.output)
print(result.exit_code)
assert result.exit_code == 0
def clean_db():
#os.remove(abspath_dleamse+"testdata/PXD015890_114263_ArchiveSpectrum_encoded.npy")
os.remove(abspath_dleamse+"testdata/PXD015890_114263_ArchiveSpectrum_ids_usi.txt")
os.remove(abspath_dleamse+"testdata/db.index")
#os.remove(abspath_dleamse+"testdata/usi_db.csv") #No such file was generated
os.remove(abspath_dleamse+"testdata/db_ids_usi.csv")
#os.remove(abspath_dleamse+"testdata/query_encoded.npy")
os.remove(abspath_dleamse+"testdata/query_ids_usi.txt")
def clean_db2():
os.remove(abspath_dleamse+"testdata/PXD015890_114263_ArchiveSpectrum_ids_usi.txt")
os.remove(abspath_dleamse+"testdata/PXD015890_114263_ArchiveSpectrum_embedded.txt")
os.remove(abspath_dleamse+"testdata/db.index")
os.remove(abspath_dleamse+"testdata/db_ids_usi.csv")
os.remove(abspath_dleamse+"testdata/db.csv")
os.remove(abspath_dleamse+"testdata/query_ids_usi.txt")
os.remove(abspath_dleamse+"testdata/query_embedded.txt")
def clean_resultfiles():
os.remove(abspath_dleamse+"testdata/minor.csv")
os.remove(abspath_dleamse+"testdata/minortoautosearch.csv")
os.remove(abspath_dleamse+"testdata/minoronestopsearch.csv")
def search_spectra():
runner = CliRunner()
result = runner.invoke(cli,
['range-search', '-i', abspath_dleamse+'testdata/db.index',
'-u', abspath_dleamse+'testdata/db_ids_usi.csv', '-n', 100,'-e', abspath_dleamse+'testdata/query_embedded.txt', '-o', abspath_dleamse+'testdata/minor.csv', '-ut', 0.099, '-lt', 0.0])
"""
python mslookup.py range-search -i testdata/db.index -u testdata/db_ids_usi.csv -e testdata/query_embedded.txt -lt 0.0 -ut 0.099 -o testdata/minor.json
"""
print(result)
print(result.output)
print(result.exit_code)
assert result.exit_code == 0
def auto_search_spectra():
runner = CliRunner()
result = runner.invoke(cli,
['auto-range-search', '-i', abspath_dleamse+'testdata/db.index',
'-u', abspath_dleamse+'testdata/db_ids_usi.csv', '-n', 100,'-e', abspath_dleamse+'testdata/query.json', '-o', abspath_dleamse+'testdata/minortoautosearch.csv', '-ut', 0.099, '-lt', 0.0])
"""
python mslookup.py auto-range-search -i ./testdata/db.index -u ./testdata/db_ids_usi.csv -e testdata/query.json -lt 0.0 -ut 0.099 -o testdata/minortoautosearch.json
"""
print(result)
print(result.output)
print(result.exit_code)
assert result.exit_code == 0
def onestop_search_spectra():
runner = CliRunner()
result = runner.invoke(cli,
['onestop-range-search', '-d', abspath_dleamse+'testdata/db.csv', '-odb', abspath_dleamse+'testdata/db.index', '-ls', abspath_dleamse+'testdata/PXD015890_114263_ArchiveSpectrum.json', '-n', 100,'-e', abspath_dleamse+'testdata/query.json', '-o', abspath_dleamse+'testdata/minoronestopsearch.csv', '-ut', 0.099, '-lt', 0.0])
"""
python mslookup.py onestop-range-search -d testdata/db.csv -odb testdata/db.index -ls testdata/PXD015890_114263_ArchiveSpectrum.json -e testdata/query.json -lt 0.0 -ut 0.099 -o testdata/minoronestopsearch.json
"""
print(result)
print(result.output)
print(result.exit_code)
assert result.exit_code == 0
if __name__ == '__main__':
embeded_db_spectra()
make_db()
embeded_query_spectra()
search_spectra()
auto_search_spectra()
clean_db2()
onestop_search_spectra()
clean_db2()
clean_resultfiles()
| [
"click.testing.CliRunner",
"os.path.dirname",
"os.path.abspath",
"sys.path.append",
"os.remove"
] | [((34, 59), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (49, 59), False, 'import os\n'), ((60, 85), 'sys.path.append', 'sys.path.append', (['file_dir'], {}), '(file_dir)\n', (75, 85), False, 'import sys\n'), ((133, 160), 'sys.path.append', 'sys.path.append', (['parent_dir'], {}), '(parent_dir)\n', (148, 160), False, 'import sys\n'), ((161, 182), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (176, 182), False, 'import sys\n'), ((99, 124), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (114, 124), False, 'import os\n'), ((807, 818), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (816, 818), False, 'from click.testing import CliRunner\n'), ((1420, 1431), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1429, 1431), False, 'from click.testing import CliRunner\n'), ((1882, 1893), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1891, 1893), False, 'from click.testing import CliRunner\n'), ((2497, 2585), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/PXD015890_114263_ArchiveSpectrum_ids_usi.txt')"], {}), "(abspath_dleamse +\n 'testdata/PXD015890_114263_ArchiveSpectrum_ids_usi.txt')\n", (2506, 2585), False, 'import os\n'), ((2582, 2630), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/db.index')"], {}), "(abspath_dleamse + 'testdata/db.index')\n", (2591, 2630), False, 'import os\n'), ((2711, 2765), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/db_ids_usi.csv')"], {}), "(abspath_dleamse + 'testdata/db_ids_usi.csv')\n", (2720, 2765), False, 'import os\n'), ((2825, 2882), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/query_ids_usi.txt')"], {}), "(abspath_dleamse + 'testdata/query_ids_usi.txt')\n", (2834, 2882), False, 'import os\n'), ((2901, 2989), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/PXD015890_114263_ArchiveSpectrum_ids_usi.txt')"], {}), "(abspath_dleamse +\n 'testdata/PXD015890_114263_ArchiveSpectrum_ids_usi.txt')\n", (2910, 2989), False, 'import os\n'), ((2986, 3075), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/PXD015890_114263_ArchiveSpectrum_embedded.txt')"], {}), "(abspath_dleamse +\n 'testdata/PXD015890_114263_ArchiveSpectrum_embedded.txt')\n", (2995, 3075), False, 'import os\n'), ((3072, 3120), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/db.index')"], {}), "(abspath_dleamse + 'testdata/db.index')\n", (3081, 3120), False, 'import os\n'), ((3121, 3175), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/db_ids_usi.csv')"], {}), "(abspath_dleamse + 'testdata/db_ids_usi.csv')\n", (3130, 3175), False, 'import os\n'), ((3176, 3222), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/db.csv')"], {}), "(abspath_dleamse + 'testdata/db.csv')\n", (3185, 3222), False, 'import os\n'), ((3223, 3280), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/query_ids_usi.txt')"], {}), "(abspath_dleamse + 'testdata/query_ids_usi.txt')\n", (3232, 3280), False, 'import os\n'), ((3281, 3339), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/query_embedded.txt')"], {}), "(abspath_dleamse + 'testdata/query_embedded.txt')\n", (3290, 3339), False, 'import os\n'), ((3366, 3415), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/minor.csv')"], {}), "(abspath_dleamse + 'testdata/minor.csv')\n", (3375, 3415), False, 'import os\n'), ((3416, 3477), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/minortoautosearch.csv')"], {}), "(abspath_dleamse + 'testdata/minortoautosearch.csv')\n", (3425, 3477), False, 'import os\n'), ((3478, 3540), 'os.remove', 'os.remove', (["(abspath_dleamse + 'testdata/minoronestopsearch.csv')"], {}), "(abspath_dleamse + 'testdata/minoronestopsearch.csv')\n", (3487, 3540), False, 'import os\n'), ((3575, 3586), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (3584, 3586), False, 'from click.testing import CliRunner\n'), ((4213, 4224), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (4222, 4224), False, 'from click.testing import CliRunner\n'), ((4877, 4888), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (4886, 4888), False, 'from click.testing import CliRunner\n'), ((262, 287), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (277, 287), False, 'import os\n')] |
"""empty message
Revision ID: 62456de6631f
Revises:
Create Date: 2017-06-26 14:56:05.230639
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '62456de6631f'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('dataset', sa.Column('age', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('altitude', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('assembly', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('avg_read_length_maxrun', sa.Float(), nullable=True))
op.add_column('dataset', sa.Column('baseA_count_maxrun', sa.BIGINT(), nullable=True))
op.add_column('dataset', sa.Column('baseC_count_maxrun', sa.BIGINT(), nullable=True))
op.add_column('dataset', sa.Column('baseG_count_maxrun', sa.BIGINT(), nullable=True))
op.add_column('dataset', sa.Column('baseN_count_maxrun', sa.BIGINT(), nullable=True))
op.add_column('dataset', sa.Column('baseT_count_maxrun', sa.BIGINT(), nullable=True))
op.add_column('dataset', sa.Column('biomaterial_provider', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('bioproject_id', sa.String(length=40), nullable=True))
op.add_column('dataset', sa.Column('biosample_id', sa.String(length=40), nullable=True))
op.add_column('dataset', sa.Column('biosample_models', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('biosample_package', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('biosample_uid', sa.String(length=30), nullable=True))
op.add_column('dataset', sa.Column('collection_time', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('db_source', sa.String(length=20), nullable=True))
op.add_column('dataset', sa.Column('db_source_uid', sa.String(length=50), nullable=True))
op.add_column('dataset', sa.Column('depth', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('dev_stage', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('download_size_maxrun', sa.BIGINT(), nullable=True))
op.add_column('dataset', sa.Column('elevation', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('env_biome', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('env_feature', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('env_material', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('estimated_size', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('experimental_factor', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('expt_design_description', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('expt_id', sa.String(length=30), nullable=True))
op.add_column('dataset', sa.Column('expt_link', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('expt_title', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('finishing_strategy', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('gc_percent_maxrun', sa.Float(), nullable=True))
op.add_column('dataset', sa.Column('geo_loc_name', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('host_disease', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('instrument_model', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('isol_growth_condt', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('lat_lon', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('library_construction_method', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('library_construction_protocol', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('library_name', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('library_reads_sequenced_maxrun', sa.BIGINT(), nullable=True))
op.add_column('dataset', sa.Column('library_screening_strategy', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('library_strategy', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('metadata_publication_date', sa.DateTime(), nullable=True))
op.add_column('dataset', sa.Column('ncbi_taxon_id', sa.String(length=50), nullable=True))
op.add_column('dataset', sa.Column('nuccore_link', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('nuccore_uids', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('num_replicons', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('num_runs_in_accession', sa.Integer(), nullable=True))
op.add_column('dataset', sa.Column('organization_address', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('organization_contacts', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('organization_name', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('ploidy', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('project_name', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('propagation', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('ref_biomaterial', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('run_ids_maxrun', sa.String(length=30), nullable=True))
op.add_column('dataset', sa.Column('run_quality_counts_maxrun', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('sample_attributes', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('sample_description', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('sample_id', sa.String(length=30), nullable=True))
op.add_column('dataset', sa.Column('sample_type', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('sequencing_method', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('sex', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('specific_host', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('study_abstract', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('study_attributes', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('study_id', sa.String(length=30), nullable=True))
op.add_column('dataset', sa.Column('study_links', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('study_title', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('study_type', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('study_type_other', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('submission_id', sa.String(length=30), nullable=True))
op.add_column('dataset', sa.Column('subspecific_genetic_lineage', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('target_gene', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('target_subfragment', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('taxon_common_name', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('taxon_scientific_name', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('tissue', sa.Text(), nullable=True))
op.add_column('dataset', sa.Column('total_num_bases_maxrun', sa.BIGINT(), nullable=True))
op.drop_column('dataset', 'download_size')
op.drop_column('dataset', 'total_num_reads')
op.drop_column('dataset', 'avg_percent_gc')
op.drop_column('dataset', 'avg_read_length')
op.drop_column('dataset', 'total_num_bases')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('dataset', sa.Column('total_num_bases', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.add_column('dataset', sa.Column('avg_read_length', mysql.FLOAT(), nullable=True))
op.add_column('dataset', sa.Column('avg_percent_gc', mysql.FLOAT(), nullable=True))
op.add_column('dataset', sa.Column('total_num_reads', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.add_column('dataset', sa.Column('download_size', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.drop_column('dataset', 'total_num_bases_maxrun')
op.drop_column('dataset', 'tissue')
op.drop_column('dataset', 'taxon_scientific_name')
op.drop_column('dataset', 'taxon_common_name')
op.drop_column('dataset', 'target_subfragment')
op.drop_column('dataset', 'target_gene')
op.drop_column('dataset', 'subspecific_genetic_lineage')
op.drop_column('dataset', 'submission_id')
op.drop_column('dataset', 'study_type_other')
op.drop_column('dataset', 'study_type')
op.drop_column('dataset', 'study_title')
op.drop_column('dataset', 'study_links')
op.drop_column('dataset', 'study_id')
op.drop_column('dataset', 'study_attributes')
op.drop_column('dataset', 'study_abstract')
op.drop_column('dataset', 'specific_host')
op.drop_column('dataset', 'sex')
op.drop_column('dataset', 'sequencing_method')
op.drop_column('dataset', 'sample_type')
op.drop_column('dataset', 'sample_id')
op.drop_column('dataset', 'sample_description')
op.drop_column('dataset', 'sample_attributes')
op.drop_column('dataset', 'run_quality_counts_maxrun')
op.drop_column('dataset', 'run_ids_maxrun')
op.drop_column('dataset', 'ref_biomaterial')
op.drop_column('dataset', 'propagation')
op.drop_column('dataset', 'project_name')
op.drop_column('dataset', 'ploidy')
op.drop_column('dataset', 'organization_name')
op.drop_column('dataset', 'organization_contacts')
op.drop_column('dataset', 'organization_address')
op.drop_column('dataset', 'num_runs_in_accession')
op.drop_column('dataset', 'num_replicons')
op.drop_column('dataset', 'nuccore_uids')
op.drop_column('dataset', 'nuccore_link')
op.drop_column('dataset', 'ncbi_taxon_id')
op.drop_column('dataset', 'metadata_publication_date')
op.drop_column('dataset', 'library_strategy')
op.drop_column('dataset', 'library_screening_strategy')
op.drop_column('dataset', 'library_reads_sequenced_maxrun')
op.drop_column('dataset', 'library_name')
op.drop_column('dataset', 'library_construction_protocol')
op.drop_column('dataset', 'library_construction_method')
op.drop_column('dataset', 'lat_lon')
op.drop_column('dataset', 'isol_growth_condt')
op.drop_column('dataset', 'instrument_model')
op.drop_column('dataset', 'host_disease')
op.drop_column('dataset', 'geo_loc_name')
op.drop_column('dataset', 'gc_percent_maxrun')
op.drop_column('dataset', 'finishing_strategy')
op.drop_column('dataset', 'expt_title')
op.drop_column('dataset', 'expt_link')
op.drop_column('dataset', 'expt_id')
op.drop_column('dataset', 'expt_design_description')
op.drop_column('dataset', 'experimental_factor')
op.drop_column('dataset', 'estimated_size')
op.drop_column('dataset', 'env_material')
op.drop_column('dataset', 'env_feature')
op.drop_column('dataset', 'env_biome')
op.drop_column('dataset', 'elevation')
op.drop_column('dataset', 'download_size_maxrun')
op.drop_column('dataset', 'dev_stage')
op.drop_column('dataset', 'depth')
op.drop_column('dataset', 'db_source_uid')
op.drop_column('dataset', 'db_source')
op.drop_column('dataset', 'collection_time')
op.drop_column('dataset', 'biosample_uid')
op.drop_column('dataset', 'biosample_package')
op.drop_column('dataset', 'biosample_models')
op.drop_column('dataset', 'biosample_id')
op.drop_column('dataset', 'bioproject_id')
op.drop_column('dataset', 'biomaterial_provider')
op.drop_column('dataset', 'baseT_count_maxrun')
op.drop_column('dataset', 'baseN_count_maxrun')
op.drop_column('dataset', 'baseG_count_maxrun')
op.drop_column('dataset', 'baseC_count_maxrun')
op.drop_column('dataset', 'baseA_count_maxrun')
op.drop_column('dataset', 'avg_read_length_maxrun')
op.drop_column('dataset', 'assembly')
op.drop_column('dataset', 'altitude')
op.drop_column('dataset', 'age')
# ### end Alembic commands ###
| [
"sqlalchemy.Float",
"sqlalchemy.DateTime",
"sqlalchemy.Text",
"sqlalchemy.dialects.mysql.INTEGER",
"alembic.op.drop_column",
"sqlalchemy.dialects.mysql.FLOAT",
"sqlalchemy.Integer",
"sqlalchemy.String",
"sqlalchemy.BIGINT"
] | [((7416, 7458), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""download_size"""'], {}), "('dataset', 'download_size')\n", (7430, 7458), False, 'from alembic import op\n'), ((7463, 7507), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""total_num_reads"""'], {}), "('dataset', 'total_num_reads')\n", (7477, 7507), False, 'from alembic import op\n'), ((7512, 7555), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""avg_percent_gc"""'], {}), "('dataset', 'avg_percent_gc')\n", (7526, 7555), False, 'from alembic import op\n'), ((7560, 7604), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""avg_read_length"""'], {}), "('dataset', 'avg_read_length')\n", (7574, 7604), False, 'from alembic import op\n'), ((7609, 7653), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""total_num_bases"""'], {}), "('dataset', 'total_num_bases')\n", (7623, 7653), False, 'from alembic import op\n'), ((8337, 8388), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""total_num_bases_maxrun"""'], {}), "('dataset', 'total_num_bases_maxrun')\n", (8351, 8388), False, 'from alembic import op\n'), ((8393, 8428), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""tissue"""'], {}), "('dataset', 'tissue')\n", (8407, 8428), False, 'from alembic import op\n'), ((8433, 8483), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""taxon_scientific_name"""'], {}), "('dataset', 'taxon_scientific_name')\n", (8447, 8483), False, 'from alembic import op\n'), ((8488, 8534), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""taxon_common_name"""'], {}), "('dataset', 'taxon_common_name')\n", (8502, 8534), False, 'from alembic import op\n'), ((8539, 8586), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""target_subfragment"""'], {}), "('dataset', 'target_subfragment')\n", (8553, 8586), False, 'from alembic import op\n'), ((8591, 8631), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""target_gene"""'], {}), "('dataset', 'target_gene')\n", (8605, 8631), False, 'from alembic import op\n'), ((8636, 8692), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""subspecific_genetic_lineage"""'], {}), "('dataset', 'subspecific_genetic_lineage')\n", (8650, 8692), False, 'from alembic import op\n'), ((8697, 8739), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""submission_id"""'], {}), "('dataset', 'submission_id')\n", (8711, 8739), False, 'from alembic import op\n'), ((8744, 8789), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""study_type_other"""'], {}), "('dataset', 'study_type_other')\n", (8758, 8789), False, 'from alembic import op\n'), ((8794, 8833), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""study_type"""'], {}), "('dataset', 'study_type')\n", (8808, 8833), False, 'from alembic import op\n'), ((8838, 8878), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""study_title"""'], {}), "('dataset', 'study_title')\n", (8852, 8878), False, 'from alembic import op\n'), ((8883, 8923), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""study_links"""'], {}), "('dataset', 'study_links')\n", (8897, 8923), False, 'from alembic import op\n'), ((8928, 8965), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""study_id"""'], {}), "('dataset', 'study_id')\n", (8942, 8965), False, 'from alembic import op\n'), ((8970, 9015), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""study_attributes"""'], {}), "('dataset', 'study_attributes')\n", (8984, 9015), False, 'from alembic import op\n'), ((9020, 9063), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""study_abstract"""'], {}), "('dataset', 'study_abstract')\n", (9034, 9063), False, 'from alembic import op\n'), ((9068, 9110), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""specific_host"""'], {}), "('dataset', 'specific_host')\n", (9082, 9110), False, 'from alembic import op\n'), ((9115, 9147), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""sex"""'], {}), "('dataset', 'sex')\n", (9129, 9147), False, 'from alembic import op\n'), ((9152, 9198), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""sequencing_method"""'], {}), "('dataset', 'sequencing_method')\n", (9166, 9198), False, 'from alembic import op\n'), ((9203, 9243), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""sample_type"""'], {}), "('dataset', 'sample_type')\n", (9217, 9243), False, 'from alembic import op\n'), ((9248, 9286), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""sample_id"""'], {}), "('dataset', 'sample_id')\n", (9262, 9286), False, 'from alembic import op\n'), ((9291, 9338), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""sample_description"""'], {}), "('dataset', 'sample_description')\n", (9305, 9338), False, 'from alembic import op\n'), ((9343, 9389), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""sample_attributes"""'], {}), "('dataset', 'sample_attributes')\n", (9357, 9389), False, 'from alembic import op\n'), ((9394, 9448), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""run_quality_counts_maxrun"""'], {}), "('dataset', 'run_quality_counts_maxrun')\n", (9408, 9448), False, 'from alembic import op\n'), ((9453, 9496), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""run_ids_maxrun"""'], {}), "('dataset', 'run_ids_maxrun')\n", (9467, 9496), False, 'from alembic import op\n'), ((9501, 9545), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""ref_biomaterial"""'], {}), "('dataset', 'ref_biomaterial')\n", (9515, 9545), False, 'from alembic import op\n'), ((9550, 9590), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""propagation"""'], {}), "('dataset', 'propagation')\n", (9564, 9590), False, 'from alembic import op\n'), ((9595, 9636), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""project_name"""'], {}), "('dataset', 'project_name')\n", (9609, 9636), False, 'from alembic import op\n'), ((9641, 9676), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""ploidy"""'], {}), "('dataset', 'ploidy')\n", (9655, 9676), False, 'from alembic import op\n'), ((9681, 9727), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""organization_name"""'], {}), "('dataset', 'organization_name')\n", (9695, 9727), False, 'from alembic import op\n'), ((9732, 9782), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""organization_contacts"""'], {}), "('dataset', 'organization_contacts')\n", (9746, 9782), False, 'from alembic import op\n'), ((9787, 9836), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""organization_address"""'], {}), "('dataset', 'organization_address')\n", (9801, 9836), False, 'from alembic import op\n'), ((9841, 9891), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""num_runs_in_accession"""'], {}), "('dataset', 'num_runs_in_accession')\n", (9855, 9891), False, 'from alembic import op\n'), ((9896, 9938), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""num_replicons"""'], {}), "('dataset', 'num_replicons')\n", (9910, 9938), False, 'from alembic import op\n'), ((9943, 9984), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""nuccore_uids"""'], {}), "('dataset', 'nuccore_uids')\n", (9957, 9984), False, 'from alembic import op\n'), ((9989, 10030), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""nuccore_link"""'], {}), "('dataset', 'nuccore_link')\n", (10003, 10030), False, 'from alembic import op\n'), ((10035, 10077), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""ncbi_taxon_id"""'], {}), "('dataset', 'ncbi_taxon_id')\n", (10049, 10077), False, 'from alembic import op\n'), ((10082, 10136), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""metadata_publication_date"""'], {}), "('dataset', 'metadata_publication_date')\n", (10096, 10136), False, 'from alembic import op\n'), ((10141, 10186), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""library_strategy"""'], {}), "('dataset', 'library_strategy')\n", (10155, 10186), False, 'from alembic import op\n'), ((10191, 10246), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""library_screening_strategy"""'], {}), "('dataset', 'library_screening_strategy')\n", (10205, 10246), False, 'from alembic import op\n'), ((10251, 10310), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""library_reads_sequenced_maxrun"""'], {}), "('dataset', 'library_reads_sequenced_maxrun')\n", (10265, 10310), False, 'from alembic import op\n'), ((10315, 10356), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""library_name"""'], {}), "('dataset', 'library_name')\n", (10329, 10356), False, 'from alembic import op\n'), ((10361, 10419), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""library_construction_protocol"""'], {}), "('dataset', 'library_construction_protocol')\n", (10375, 10419), False, 'from alembic import op\n'), ((10424, 10480), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""library_construction_method"""'], {}), "('dataset', 'library_construction_method')\n", (10438, 10480), False, 'from alembic import op\n'), ((10485, 10521), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""lat_lon"""'], {}), "('dataset', 'lat_lon')\n", (10499, 10521), False, 'from alembic import op\n'), ((10526, 10572), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""isol_growth_condt"""'], {}), "('dataset', 'isol_growth_condt')\n", (10540, 10572), False, 'from alembic import op\n'), ((10577, 10622), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""instrument_model"""'], {}), "('dataset', 'instrument_model')\n", (10591, 10622), False, 'from alembic import op\n'), ((10627, 10668), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""host_disease"""'], {}), "('dataset', 'host_disease')\n", (10641, 10668), False, 'from alembic import op\n'), ((10673, 10714), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""geo_loc_name"""'], {}), "('dataset', 'geo_loc_name')\n", (10687, 10714), False, 'from alembic import op\n'), ((10719, 10765), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""gc_percent_maxrun"""'], {}), "('dataset', 'gc_percent_maxrun')\n", (10733, 10765), False, 'from alembic import op\n'), ((10770, 10817), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""finishing_strategy"""'], {}), "('dataset', 'finishing_strategy')\n", (10784, 10817), False, 'from alembic import op\n'), ((10822, 10861), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""expt_title"""'], {}), "('dataset', 'expt_title')\n", (10836, 10861), False, 'from alembic import op\n'), ((10866, 10904), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""expt_link"""'], {}), "('dataset', 'expt_link')\n", (10880, 10904), False, 'from alembic import op\n'), ((10909, 10945), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""expt_id"""'], {}), "('dataset', 'expt_id')\n", (10923, 10945), False, 'from alembic import op\n'), ((10950, 11002), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""expt_design_description"""'], {}), "('dataset', 'expt_design_description')\n", (10964, 11002), False, 'from alembic import op\n'), ((11007, 11055), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""experimental_factor"""'], {}), "('dataset', 'experimental_factor')\n", (11021, 11055), False, 'from alembic import op\n'), ((11060, 11103), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""estimated_size"""'], {}), "('dataset', 'estimated_size')\n", (11074, 11103), False, 'from alembic import op\n'), ((11108, 11149), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""env_material"""'], {}), "('dataset', 'env_material')\n", (11122, 11149), False, 'from alembic import op\n'), ((11154, 11194), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""env_feature"""'], {}), "('dataset', 'env_feature')\n", (11168, 11194), False, 'from alembic import op\n'), ((11199, 11237), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""env_biome"""'], {}), "('dataset', 'env_biome')\n", (11213, 11237), False, 'from alembic import op\n'), ((11242, 11280), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""elevation"""'], {}), "('dataset', 'elevation')\n", (11256, 11280), False, 'from alembic import op\n'), ((11285, 11334), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""download_size_maxrun"""'], {}), "('dataset', 'download_size_maxrun')\n", (11299, 11334), False, 'from alembic import op\n'), ((11339, 11377), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""dev_stage"""'], {}), "('dataset', 'dev_stage')\n", (11353, 11377), False, 'from alembic import op\n'), ((11382, 11416), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""depth"""'], {}), "('dataset', 'depth')\n", (11396, 11416), False, 'from alembic import op\n'), ((11421, 11463), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""db_source_uid"""'], {}), "('dataset', 'db_source_uid')\n", (11435, 11463), False, 'from alembic import op\n'), ((11468, 11506), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""db_source"""'], {}), "('dataset', 'db_source')\n", (11482, 11506), False, 'from alembic import op\n'), ((11511, 11555), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""collection_time"""'], {}), "('dataset', 'collection_time')\n", (11525, 11555), False, 'from alembic import op\n'), ((11560, 11602), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""biosample_uid"""'], {}), "('dataset', 'biosample_uid')\n", (11574, 11602), False, 'from alembic import op\n'), ((11607, 11653), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""biosample_package"""'], {}), "('dataset', 'biosample_package')\n", (11621, 11653), False, 'from alembic import op\n'), ((11658, 11703), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""biosample_models"""'], {}), "('dataset', 'biosample_models')\n", (11672, 11703), False, 'from alembic import op\n'), ((11708, 11749), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""biosample_id"""'], {}), "('dataset', 'biosample_id')\n", (11722, 11749), False, 'from alembic import op\n'), ((11754, 11796), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""bioproject_id"""'], {}), "('dataset', 'bioproject_id')\n", (11768, 11796), False, 'from alembic import op\n'), ((11801, 11850), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""biomaterial_provider"""'], {}), "('dataset', 'biomaterial_provider')\n", (11815, 11850), False, 'from alembic import op\n'), ((11855, 11902), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""baseT_count_maxrun"""'], {}), "('dataset', 'baseT_count_maxrun')\n", (11869, 11902), False, 'from alembic import op\n'), ((11907, 11954), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""baseN_count_maxrun"""'], {}), "('dataset', 'baseN_count_maxrun')\n", (11921, 11954), False, 'from alembic import op\n'), ((11959, 12006), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""baseG_count_maxrun"""'], {}), "('dataset', 'baseG_count_maxrun')\n", (11973, 12006), False, 'from alembic import op\n'), ((12011, 12058), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""baseC_count_maxrun"""'], {}), "('dataset', 'baseC_count_maxrun')\n", (12025, 12058), False, 'from alembic import op\n'), ((12063, 12110), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""baseA_count_maxrun"""'], {}), "('dataset', 'baseA_count_maxrun')\n", (12077, 12110), False, 'from alembic import op\n'), ((12115, 12166), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""avg_read_length_maxrun"""'], {}), "('dataset', 'avg_read_length_maxrun')\n", (12129, 12166), False, 'from alembic import op\n'), ((12171, 12208), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""assembly"""'], {}), "('dataset', 'assembly')\n", (12185, 12208), False, 'from alembic import op\n'), ((12213, 12250), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""altitude"""'], {}), "('dataset', 'altitude')\n", (12227, 12250), False, 'from alembic import op\n'), ((12255, 12287), 'alembic.op.drop_column', 'op.drop_column', (['"""dataset"""', '"""age"""'], {}), "('dataset', 'age')\n", (12269, 12287), False, 'from alembic import op\n'), ((441, 450), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (448, 450), True, 'import sqlalchemy as sa\n'), ((519, 528), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (526, 528), True, 'import sqlalchemy as sa\n'), ((597, 606), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (604, 606), True, 'import sqlalchemy as sa\n'), ((689, 699), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (697, 699), True, 'import sqlalchemy as sa\n'), ((778, 789), 'sqlalchemy.BIGINT', 'sa.BIGINT', ([], {}), '()\n', (787, 789), True, 'import sqlalchemy as sa\n'), ((868, 879), 'sqlalchemy.BIGINT', 'sa.BIGINT', ([], {}), '()\n', (877, 879), True, 'import sqlalchemy as sa\n'), ((958, 969), 'sqlalchemy.BIGINT', 'sa.BIGINT', ([], {}), '()\n', (967, 969), True, 'import sqlalchemy as sa\n'), ((1048, 1059), 'sqlalchemy.BIGINT', 'sa.BIGINT', ([], {}), '()\n', (1057, 1059), True, 'import sqlalchemy as sa\n'), ((1138, 1149), 'sqlalchemy.BIGINT', 'sa.BIGINT', ([], {}), '()\n', (1147, 1149), True, 'import sqlalchemy as sa\n'), ((1230, 1239), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (1237, 1239), True, 'import sqlalchemy as sa\n'), ((1313, 1333), 'sqlalchemy.String', 'sa.String', ([], {'length': '(40)'}), '(length=40)\n', (1322, 1333), True, 'import sqlalchemy as sa\n'), ((1406, 1426), 'sqlalchemy.String', 'sa.String', ([], {'length': '(40)'}), '(length=40)\n', (1415, 1426), True, 'import sqlalchemy as sa\n'), ((1503, 1512), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (1510, 1512), True, 'import sqlalchemy as sa\n'), ((1590, 1599), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (1597, 1599), True, 'import sqlalchemy as sa\n'), ((1673, 1693), 'sqlalchemy.String', 'sa.String', ([], {'length': '(30)'}), '(length=30)\n', (1682, 1693), True, 'import sqlalchemy as sa\n'), ((1769, 1778), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (1776, 1778), True, 'import sqlalchemy as sa\n'), ((1848, 1868), 'sqlalchemy.String', 'sa.String', ([], {'length': '(20)'}), '(length=20)\n', (1857, 1868), True, 'import sqlalchemy as sa\n'), ((1942, 1962), 'sqlalchemy.String', 'sa.String', ([], {'length': '(50)'}), '(length=50)\n', (1951, 1962), True, 'import sqlalchemy as sa\n'), ((2028, 2037), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2035, 2037), True, 'import sqlalchemy as sa\n'), ((2107, 2116), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2114, 2116), True, 'import sqlalchemy as sa\n'), ((2197, 2208), 'sqlalchemy.BIGINT', 'sa.BIGINT', ([], {}), '()\n', (2206, 2208), True, 'import sqlalchemy as sa\n'), ((2278, 2287), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2285, 2287), True, 'import sqlalchemy as sa\n'), ((2357, 2366), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2364, 2366), True, 'import sqlalchemy as sa\n'), ((2438, 2447), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2445, 2447), True, 'import sqlalchemy as sa\n'), ((2520, 2529), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2527, 2529), True, 'import sqlalchemy as sa\n'), ((2604, 2613), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2611, 2613), True, 'import sqlalchemy as sa\n'), ((2693, 2702), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2700, 2702), True, 'import sqlalchemy as sa\n'), ((2786, 2795), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2793, 2795), True, 'import sqlalchemy as sa\n'), ((2863, 2883), 'sqlalchemy.String', 'sa.String', ([], {'length': '(30)'}), '(length=30)\n', (2872, 2883), True, 'import sqlalchemy as sa\n'), ((2953, 2962), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2960, 2962), True, 'import sqlalchemy as sa\n'), ((3033, 3042), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (3040, 3042), True, 'import sqlalchemy as sa\n'), ((3121, 3130), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (3128, 3130), True, 'import sqlalchemy as sa\n'), ((3208, 3218), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (3216, 3218), True, 'import sqlalchemy as sa\n'), ((3291, 3300), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (3298, 3300), True, 'import sqlalchemy as sa\n'), ((3373, 3382), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (3380, 3382), True, 'import sqlalchemy as sa\n'), ((3459, 3468), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (3466, 3468), True, 'import sqlalchemy as sa\n'), ((3546, 3555), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (3553, 3555), True, 'import sqlalchemy as sa\n'), ((3623, 3632), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (3630, 3632), True, 'import sqlalchemy as sa\n'), ((3720, 3729), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (3727, 3729), True, 'import sqlalchemy as sa\n'), ((3819, 3828), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (3826, 3828), True, 'import sqlalchemy as sa\n'), ((3901, 3910), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (3908, 3910), True, 'import sqlalchemy as sa\n'), ((4001, 4012), 'sqlalchemy.BIGINT', 'sa.BIGINT', ([], {}), '()\n', (4010, 4012), True, 'import sqlalchemy as sa\n'), ((4099, 4108), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (4106, 4108), True, 'import sqlalchemy as sa\n'), ((4185, 4194), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (4192, 4194), True, 'import sqlalchemy as sa\n'), ((4280, 4293), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (4291, 4293), True, 'import sqlalchemy as sa\n'), ((4367, 4387), 'sqlalchemy.String', 'sa.String', ([], {'length': '(50)'}), '(length=50)\n', (4376, 4387), True, 'import sqlalchemy as sa\n'), ((4460, 4469), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (4467, 4469), True, 'import sqlalchemy as sa\n'), ((4542, 4551), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (4549, 4551), True, 'import sqlalchemy as sa\n'), ((4625, 4634), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (4632, 4634), True, 'import sqlalchemy as sa\n'), ((4716, 4728), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (4726, 4728), True, 'import sqlalchemy as sa\n'), ((4809, 4818), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (4816, 4818), True, 'import sqlalchemy as sa\n'), ((4900, 4909), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (4907, 4909), True, 'import sqlalchemy as sa\n'), ((4987, 4996), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (4994, 4996), True, 'import sqlalchemy as sa\n'), ((5063, 5072), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (5070, 5072), True, 'import sqlalchemy as sa\n'), ((5145, 5154), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (5152, 5154), True, 'import sqlalchemy as sa\n'), ((5226, 5235), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (5233, 5235), True, 'import sqlalchemy as sa\n'), ((5311, 5320), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (5318, 5320), True, 'import sqlalchemy as sa\n'), ((5395, 5415), 'sqlalchemy.String', 'sa.String', ([], {'length': '(30)'}), '(length=30)\n', (5404, 5415), True, 'import sqlalchemy as sa\n'), ((5501, 5510), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (5508, 5510), True, 'import sqlalchemy as sa\n'), ((5588, 5597), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (5595, 5597), True, 'import sqlalchemy as sa\n'), ((5676, 5685), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (5683, 5685), True, 'import sqlalchemy as sa\n'), ((5755, 5775), 'sqlalchemy.String', 'sa.String', ([], {'length': '(30)'}), '(length=30)\n', (5764, 5775), True, 'import sqlalchemy as sa\n'), ((5847, 5856), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (5854, 5856), True, 'import sqlalchemy as sa\n'), ((5934, 5943), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (5941, 5943), True, 'import sqlalchemy as sa\n'), ((6007, 6016), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6014, 6016), True, 'import sqlalchemy as sa\n'), ((6090, 6099), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6097, 6099), True, 'import sqlalchemy as sa\n'), ((6174, 6183), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6181, 6183), True, 'import sqlalchemy as sa\n'), ((6260, 6269), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6267, 6269), True, 'import sqlalchemy as sa\n'), ((6338, 6358), 'sqlalchemy.String', 'sa.String', ([], {'length': '(30)'}), '(length=30)\n', (6347, 6358), True, 'import sqlalchemy as sa\n'), ((6430, 6439), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6437, 6439), True, 'import sqlalchemy as sa\n'), ((6511, 6520), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6518, 6520), True, 'import sqlalchemy as sa\n'), ((6591, 6600), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6598, 6600), True, 'import sqlalchemy as sa\n'), ((6677, 6686), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6684, 6686), True, 'import sqlalchemy as sa\n'), ((6760, 6780), 'sqlalchemy.String', 'sa.String', ([], {'length': '(30)'}), '(length=30)\n', (6769, 6780), True, 'import sqlalchemy as sa\n'), ((6868, 6877), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6875, 6877), True, 'import sqlalchemy as sa\n'), ((6949, 6958), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6956, 6958), True, 'import sqlalchemy as sa\n'), ((7037, 7046), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (7044, 7046), True, 'import sqlalchemy as sa\n'), ((7124, 7133), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (7131, 7133), True, 'import sqlalchemy as sa\n'), ((7215, 7224), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (7222, 7224), True, 'import sqlalchemy as sa\n'), ((7291, 7300), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (7298, 7300), True, 'import sqlalchemy as sa\n'), ((7383, 7394), 'sqlalchemy.BIGINT', 'sa.BIGINT', ([], {}), '()\n', (7392, 7394), True, 'import sqlalchemy as sa\n'), ((7832, 7863), 'sqlalchemy.dialects.mysql.INTEGER', 'mysql.INTEGER', ([], {'display_width': '(11)'}), '(display_width=11)\n', (7845, 7863), False, 'from sqlalchemy.dialects import mysql\n'), ((7960, 7973), 'sqlalchemy.dialects.mysql.FLOAT', 'mysql.FLOAT', ([], {}), '()\n', (7971, 7973), False, 'from sqlalchemy.dialects import mysql\n'), ((8048, 8061), 'sqlalchemy.dialects.mysql.FLOAT', 'mysql.FLOAT', ([], {}), '()\n', (8059, 8061), False, 'from sqlalchemy.dialects import mysql\n'), ((8137, 8168), 'sqlalchemy.dialects.mysql.INTEGER', 'mysql.INTEGER', ([], {'display_width': '(11)'}), '(display_width=11)\n', (8150, 8168), False, 'from sqlalchemy.dialects import mysql\n'), ((8263, 8294), 'sqlalchemy.dialects.mysql.INTEGER', 'mysql.INTEGER', ([], {'display_width': '(11)'}), '(display_width=11)\n', (8276, 8294), False, 'from sqlalchemy.dialects import mysql\n')] |
import os
import os.path as osp
import datetime
def get_dir(directory):
"""
Creates the given directory if it does not exist.
@param directory: The path to the directory.
@return: The path to the directory.
"""
if not os.path.exists(directory):
os.makedirs(directory)
return directory
def get_file_dir(file_path):
directory = os.path.dirname(file_path)
get_dir(directory)
return file_path
def get_date_str():
"""
@return: A string representing the current date/time that can be used as a directory name.
"""
return str(datetime.datetime.now()).replace(' ', '-').replace(':', '-').replace('.', '-')[:-7].replace('-', '')
def get_image_list(images):
imlist = []
try:
imlist = [osp.join(osp.realpath('.'), images, img) for img in os.listdir(images) if os.path.splitext(img)[1] == '.png' or os.path.splitext(img)[1] =='.jpeg' or os.path.splitext(img)[1] =='.jpg']
except NotADirectoryError:
imlist = []
imlist.append(osp.join(osp.realpath('.'), images))
except FileNotFoundError:
print ("No file or directory with the name {}".format(images))
exit()
return imlist
def get_subdir_imagelist(directory):
image_list = []
for path, subdirs, files in os.walk(directory):
for name in files:
if os.path.splitext(name)[1] == '.jpg':
image_list.append(os.path.join(path, name))
return image_list
def get_stem(file_path):
basename = os.path.basename(file_path)
stem = os.path.splitext(basename)[0]
return stem
def get_extension(file_path):
basename = os.path.basename(file_path)
ext = basename.split('.')[-1]
return ext | [
"os.path.exists",
"os.listdir",
"os.makedirs",
"os.path.splitext",
"os.path.join",
"os.path.realpath",
"os.path.dirname",
"datetime.datetime.now",
"os.path.basename",
"os.walk"
] | [((368, 394), 'os.path.dirname', 'os.path.dirname', (['file_path'], {}), '(file_path)\n', (383, 394), False, 'import os\n'), ((1280, 1298), 'os.walk', 'os.walk', (['directory'], {}), '(directory)\n', (1287, 1298), False, 'import os\n'), ((1503, 1530), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (1519, 1530), False, 'import os\n'), ((1634, 1661), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (1650, 1661), False, 'import os\n'), ((243, 268), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (257, 268), False, 'import os\n'), ((278, 300), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (289, 300), False, 'import os\n'), ((1542, 1568), 'os.path.splitext', 'os.path.splitext', (['basename'], {}), '(basename)\n', (1558, 1568), False, 'import os\n'), ((769, 786), 'os.path.realpath', 'osp.realpath', (['"""."""'], {}), "('.')\n", (781, 786), True, 'import os.path as osp\n'), ((812, 830), 'os.listdir', 'os.listdir', (['images'], {}), '(images)\n', (822, 830), False, 'import os\n'), ((1027, 1044), 'os.path.realpath', 'osp.realpath', (['"""."""'], {}), "('.')\n", (1039, 1044), True, 'import os.path as osp\n'), ((1342, 1364), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (1358, 1364), False, 'import os\n'), ((1413, 1437), 'os.path.join', 'os.path.join', (['path', 'name'], {}), '(path, name)\n', (1425, 1437), False, 'import os\n'), ((834, 855), 'os.path.splitext', 'os.path.splitext', (['img'], {}), '(img)\n', (850, 855), False, 'import os\n'), ((872, 893), 'os.path.splitext', 'os.path.splitext', (['img'], {}), '(img)\n', (888, 893), False, 'import os\n'), ((910, 931), 'os.path.splitext', 'os.path.splitext', (['img'], {}), '(img)\n', (926, 931), False, 'import os\n'), ((586, 609), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (607, 609), False, 'import datetime\n')] |
import time
import numpy as np
import networkx as nx
import pandas as pd
import seaborn as sns
sns.set(color_codes=True)
from .utils import chunks
class NAqueryResult(object):
"""
Query result of an NeuroArch query via Client.executeNAquery.
# Arguments
task (dict):
query task in NeuroArch JSON format
x_scale, y_scale, z_scale, r_scale (float):
factors to scale the morphology data (default: 1.0).
x_shift, y_shift, z_shift, r_shift (float):
factors to shift 3D coordinates of the morphology data and
default radius (default: 0.0)
# Attributes:
task (dict):
storage for the input task
format (string):
format of the NA query return.
queryID (string):
ID of the NA query.
NLPquery (string):
If the NA query is initiated by a NLP query, the original NLPquery.
graph (networkx.MultiDiGraph):
a graph representing the data retrieved from NA database.
"""
def __init__(self, task, x_scale = 1.0, y_scale = 1.0, z_scale = 1.0,
r_scale = 1.0, x_shift = 0.0, y_shift = 0.0, z_shift = 0.0,
r_shift = 0.0):
self.task = task
self.format = task.get('format', 'morphology')
self.queryID = task.get('queryID', '')
self.NLPquery = task.get('NLPquery', None)
self.x_scale = x_scale
self.y_scale = y_scale
self.z_scale = z_scale
self.r_scale = r_scale
self.x_shift = x_shift
self.y_shift = y_shift
self.z_shift = z_shift
self.r_shift = r_shift
self.graph = nx.MultiDiGraph()
# self.initialize()
def initialize(self):
if self.format == 'no_result':
self.data = {}
return
if self.format == 'morphology':
self.data= {'nodes': {}, 'edges': []}
elif self.format == 'nx':
self.data = {'nodes': {}, 'edges': []}
self.locked = False
def receive_data(self, data):
if self.format == 'no_result':
self.data = {}
return
if self.format in ['morphology', 'nx']:
self._receive_data_from_nx_query(data)
elif self.format == 'df':
self._get_result_from_df()
elif self.format == 'get_data':
self._get_result_from_get_data()
elif self.format == 'nk':
self._get_result_from_nk()
else:
raise ValueError('NAres format "{}" unrecognized'.format(format))
def finalize(self):
if self.format == 'no_result':
return
if self.format == 'morphology':
self._finalize_morphology()
elif self.format == 'nx':
self._finalize_nx()
self.data = {}
def send_morphology(self, Comm, threshold = None):
data = {}
for rid, v in self.get('MorphologyData').items():
morphology_data = v.copy()
node = self.fromData(rid)
if node is not None:
morphology_data.update(self.graph.nodes[node])
data[rid] = morphology_data
data[rid]['orid'] = node
if threshold is None:
threshold = self.task.get('threshold', 5)
if threshold == 'auto':
threshold = 20
for c in chunks(data, threshold):
a = {}
a["messageType"] = "Data"
a["widget"] = "NLP"
a["data"] = {"data": c, "queryID": self.queryID}
Comm(a)
def _receive_data_from_nx_query(self, data):
while self.locked:
time.sleep(1)
self.locked = True
try:
self.data['nodes'].update(data['nodes'])
self.data['edges'].extend(data['edges'])
self.locked = False
except:
self.locked = False
raise
def _finalize_morphology(self):
while self.locked:
time.sleep(1)
G = self.graph
G.add_nodes_from(list(self.data['nodes'].items()))
G.add_edges_from(self.data['edges'])
for rid, morphology_data in self.get('MorphologyData').items():
if "x" in morphology_data:
morphology_data["x"] = [x*self.x_scale+self.x_shift for x in morphology_data["x"]]
if "y" in morphology_data:
morphology_data["y"] = [y*self.y_scale+self.y_shift for y in morphology_data["y"]]
if "z" in morphology_data:
morphology_data["z"] = [z*self.z_scale+self.z_shift for z in morphology_data["z"]]
if "r" in morphology_data:
morphology_data["r"] = [r*self.r_scale+self.r_shift for r in morphology_data["r"]]
if "vertices" in morphology_data:
vertices = morphology_data["vertices"]
for j in range(len(vertices)//3):
vertices[j*3] = vertices[j*3]*self.x_scale + self.x_shift
vertices[j*3+1] = vertices[j*3+1]*self.y_scale + self.y_shift
vertices[j*3+2] = vertices[j*3+2]*self.z_scale + self.z_shift
def _finalize_nx(self):
while self.locked:
time.sleep(1)
G = self.graph
G.add_nodes_from(list(self.data['nodes'].items()))
G.add_edges_from(self.data['edges'])
@property
def neurons(self):
return self.get('Neuron')
@property
def synapses(self):
return self.get(['Synapse', 'InferredSynapse'])
def fromData(self, data_rid):
obj_rids = [rid for rid, _, v in self.graph.in_edges(data_rid, data=True) if v.get('class', None) == 'HasData']
if len(obj_rids) == 1:
return obj_rids[0]
elif len(obj_rids) == 0:
return None
else:
raise ValueError('Data found to be owned by 2 nodes, should not possible.')
def getData(self, rid):
data_rids = [data_rid for _, data_rid, v in self.graph.out_edges(rid, data=True) if v.get('class', None) == 'HasData']
return data_rids
def get(self, cls):
"""
Get all data that belongs to the class cls
# Arguments:
cls (list or str):
A list of str of the classes to be retrieved
# Returns
dict: A dict with OrientDB record IDs as keys with values the attributes of data.
"""
if isinstance(cls, str):
cls = [cls]
assert isinstance(cls, list)
return {rid: v for rid, v in self.graph.nodes(data=True) if v['class'] in cls}
class NeuroNLPResult(NAqueryResult):
"""
A buffer processing commands and mirroring data to be sent to the NeuroNLP window.
# Arguments:
enableResets (bool):
If False, will not reset the NeuroNLP window on 'show' queries.
default: True.
"""
def __init__(self, enableResets = True):
self.commands = []
self.processed_commands = []
self.enableResets = enableResets
self.graph = nx.MultiDiGraph()
self.uname_to_rid = {}
def receive_cmd(self, data):
if 'commands' in data:
self.commands.append(data['commands'])
def clear_cmd(self):
self.commands = []
def process_commands(self, Comm):
while len(self.commands):
command =self.commands.pop(0)
self.processed_commands.append(command)
if 'reset' in command and self.enableResets == False:
continue
a = {"data": {'commands': command},
"messageType": "Command",
"widget": "NLP"}
Comm(a)
if 'remove' in command:
to_remove = command['remove'][0]
objs_to_remove = list(set([self.fromData(m_rid) for m_rid in to_remove])-set([None]))
self.graph.remove_nodes_from(objs_to_remove+to_remove)
self._refresh_data_map()
if 'reset' in command:
self.reset()
def clear_history(self):
self.processed_commands = []
def reset(self):
self.graph.clear()
self._refresh_data_map()
def _refresh_data_map(self):
self.uname_to_rid = {v['uname']: rid for rid, v in self.graph.nodes(data=True)
if 'uname' in v and v.get('class', None) != 'MorphologyData'}
def process_data(self, na_query_result, Comm):
if na_query_result.format == 'morphology':
self.graph.update(na_query_result.graph)
self._refresh_data_map()
na_query_result.send_morphology(Comm)
#TODO
def getInfo(self, rid):
pass
#TODO
def getStats(self, rid = None, neuron_name = None):
pass
@property
def rids(self):
return list(self.uname_to_rid.values())
def __getitem__(self, key):
if key.startswith('#'):
node = self.graph.nodes[key]
if node.get('class', None) == 'MorphologyData':
obj_node = self.graph.nodes[self.fromData(key)].copy()
data_node = node.copy()
else:
data_nodes = [self.graph.nodes[n].copy() for n in self.getData(key)]
obj_node = node.copy()
for i, data_node in enumerate(data_nodes):
obj_node[data_node.get('class', 'Data{}'.format(i))] = data_node
return obj_node
else:
rid = self.uname_to_rid.get(key, None)
if rid is None:
raise KeyError('Node with uname {} is not in the NLP result'.format(key))
data_node = self.graph.nodes[self.getData(rid)].copy()
obj_node = self.graph.nodes[rid].copy()
obj_node[data_node.get('class', 'Data')] = data_node
return obj_node
def __setitem__(self, key, value):
if key.startswith('#'):
rid = key
else:
rid = self.uname_to_rid.get(key, None)
if rid is None:
raise KeyError('Node with uname {} is not in the NLP result'.format(key))
self.graph.add_node(rid, value)
class NeuronGraph(nx.DiGraph):
"""
Construct a graph of neurons where nodes are neurons and
synapses are represented as edges. The weight of an edge equals to
the number of synapses between the two connected neurons.
# Arguments
connectivity_query_result (NeuroNLPResult or networkx.(Multi)DiGraph):
query result from Client.getConnectivity() or equivalent query
"""
def __init__(self, connectivity_query_result):
super(NeuronGraph, self).__init__()
if isinstance(connectivity_query_result, NAqueryResult):
if connectivity_query_result.graph is None:
raise AttributeError('query result does not have a graph')
else:
graph = connectivity_query_result.graph
elif isinstance(connectivity_query_result, nx.Graph):
graph = connectivity_query_result
neurons = {n: v for n, v in graph.nodes(data = True) \
if v['class'] in ['Neuron']}
synapses = {n: v for n, v in graph.nodes(data = True) \
if v['class'] in ['Synapse', 'InferredSynapse']}
pre_to_synapse_edges = {post:pre for pre, post, prop in graph.edges(data = True) \
if prop.get('class', None) == 'SendsTo' and pre in neurons}
synapse_to_post_edges = {pre:post for pre, post, prop in graph.edges(data = True) \
if prop.get('class', None) == 'SendsTo' and post in neurons}
connections = [(pre, synapse_to_post_edges[syn], synapses[syn]['N']) \
for syn, pre in pre_to_synapse_edges.items()]
self.add_nodes_from(list(neurons.items()))
self.add_weighted_edges_from(connections)
def names(self):
return sorted([self.nodes[n]['uname'] for n in self.nodes()])
def adjacency_matrix(self, uname_order = None, rid_order = None):
"""
Get adjacency matrix between Neurons.
# Arguments
uname_order (list):
A list of the uname of neurons to order the rows and columns of the adjacency matrix.
If None, use rid_order. If rid_order is None, will sort uname for order.
rid_order (list):
A list of the rids of neurons to order the rows and columns of the adjacency matrix.
If None, use uname_order. if uname_order is None, will sort uname for order.
# Returns
M (numpy.ndarray):
A graph representing the connectivity of the neurons.
uname_oder (list):
A list of unames by which the rows and columns of M are ordered.
"""
if uname_order is None and rid_order is None:
order = sorted([(self.nodes[n]['uname'], n) for n in self.nodes()])
uname_order = [uname for uname, _ in order]
rid_order = [rid for _, rid in order]
elif uname_order is None:
# rid_order
uname_order = [self.nodes[n]['uname'] for n in rid_order]
else:
# uname_order
order_dict = {self.nodes[n]['uname']: n for n in self.nodes()}
rid_order = [order_dict[uname] for uname in uname_order]
M = nx.adj_matrix(self, nodelist = rid_order).todense()
return M, uname_order
class CircuitGraph(nx.MultiDiGraph):
def __init__(self, connectivity_query_result):
super(CircuitGraph, self).__init__()
if isinstance(connectivity_query_result, NAqueryResult):
if connectivity_query_result.graph is None:
raise AttributeError('query result does not have a graph')
else:
graph = connectivity_query_result.graph
elif isinstance(connectivity_query_result, nx.Graph):
graph = connectivity_query_result
self.add_nodes_from(list(graph.nodes(data = True)))
self.add_edges_from(graph.edges(data = True))
def copy(self):
return self.__class__(self)
| [
"networkx.adj_matrix",
"seaborn.set",
"networkx.MultiDiGraph",
"time.sleep"
] | [((97, 122), 'seaborn.set', 'sns.set', ([], {'color_codes': '(True)'}), '(color_codes=True)\n', (104, 122), True, 'import seaborn as sns\n'), ((1683, 1700), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ([], {}), '()\n', (1698, 1700), True, 'import networkx as nx\n'), ((7070, 7087), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ([], {}), '()\n', (7085, 7087), True, 'import networkx as nx\n'), ((3668, 3681), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3678, 3681), False, 'import time\n'), ((4003, 4016), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4013, 4016), False, 'import time\n'), ((5232, 5245), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (5242, 5245), False, 'import time\n'), ((13433, 13472), 'networkx.adj_matrix', 'nx.adj_matrix', (['self'], {'nodelist': 'rid_order'}), '(self, nodelist=rid_order)\n', (13446, 13472), True, 'import networkx as nx\n')] |
#
# CFNS - Rijkswaterstaat CIV, Delft © 2020 - 2021 <<EMAIL>>
#
# Copyright 2020 - 2021 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from base64 import b64encode
from ..forms import viewBase64AuthcodeForm
# returns the profile template with the Base64Authentication code form
@login_required(login_url='/login') #if not logged in redirect to /login
def profileView(request):
form = viewBase64AuthcodeForm(request.POST)
info_msg = None
info_type = None
if form.is_valid():
if request.user.check_password(request.POST['password']):
encodedUserAndPass = "{}:{}".format(request.user.username, request.POST['password']).encode("ascii")
userAndPass = b64encode(encodedUserAndPass).decode("ascii")
return render(request, 'profile.html', {'form': form, 'b64code': userAndPass})
else:
info_msg = 'The entered password does not match your current password...'
info_type = 'alert-danger'
return render(request, 'profile.html', {'form': viewBase64AuthcodeForm(), 'info_msg': info_msg, 'info_type': info_type})
| [
"django.shortcuts.render",
"base64.b64encode",
"django.contrib.auth.decorators.login_required"
] | [((1404, 1438), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (1418, 1438), False, 'from django.contrib.auth.decorators import login_required\n'), ((1886, 1957), 'django.shortcuts.render', 'render', (['request', '"""profile.html"""', "{'form': form, 'b64code': userAndPass}"], {}), "(request, 'profile.html', {'form': form, 'b64code': userAndPass})\n", (1892, 1957), False, 'from django.shortcuts import render\n'), ((1821, 1850), 'base64.b64encode', 'b64encode', (['encodedUserAndPass'], {}), '(encodedUserAndPass)\n', (1830, 1850), False, 'from base64 import b64encode\n')] |
# Generated by Django 2.2.3 on 2019-08-27 17:35
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("main", "0018_provisional-types")]
operations = [
migrations.RemoveField(model_name="metadatatype", name="input_size"),
migrations.RemoveField(model_name="metadatatype", name="type_class"),
]
| [
"django.db.migrations.RemoveField"
] | [((208, 276), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""metadatatype"""', 'name': '"""input_size"""'}), "(model_name='metadatatype', name='input_size')\n", (230, 276), False, 'from django.db import migrations\n'), ((286, 354), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""metadatatype"""', 'name': '"""type_class"""'}), "(model_name='metadatatype', name='type_class')\n", (308, 354), False, 'from django.db import migrations\n')] |
#===========================================================================
#
# Copyright (c) 2014, California Institute of Technology.
# U.S. Government Sponsorship under NASA Contract NAS7-03001 is
# acknowledged. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#===========================================================================
"Unit test for the MplPatchStyle class."
__version__ = "$Revision: #1 $"
#===========================================================================
# Required imports. Do not modify these.
import unittest
#===========================================================================
# Place all imports after here.
#
import matplotlib as mpl
import matplotlib.patches
from mplStyle import MplPatchStyle
#
# Place all imports before here.
#===========================================================================
#===========================================================================
class TestMplPatchStyle( unittest.TestCase ):
"""Test the MplPatchStyle class."""
#-----------------------------------------------------------------------
def setUp( self ):
"""This method is called before any tests are run."""
pass
#-----------------------------------------------------------------------
def tearDown( self ):
"""This method is called after all tests are run."""
pass
#=======================================================================
# Add tests methods below.
# Any method whose name begins with 'test' will be run by the framework.
#-----------------------------------------------------------------------
def checkElement( self, testName, values, element ):
for property in values:
expected = values[ property ]
msg = "%s: Incorrect value for property: %s" % (testName, property)
getFunc = getattr( element, 'get_%s' % property )
self.assertEqual( expected, getFunc(), msg = msg )
#-----------------------------------------------------------------------
def testBasic( self ):
"""A basic test of MplPatchStyle."""
values = {
# Artist Properties
'alpha' : 0.95,
'clip_on' : True,
'snap' : True,
'visible' : False,
'zorder' : 5,
# Patch Properties
'antialiased' : True,
'facecolor' : (1.0, 0.0, 0.0, 0.95),
'fill' : True,
'edgecolor' : (1.0, 0.0, 0.0, 0.95),
'linestyle' : 'dashdot',
'linewidth' : 2.5,
}
element = mpl.patches.Patch()
style = MplPatchStyle(
# Artist Properties
alpha = values['alpha'],
clip = values['clip_on'],
snap = values['snap'],
visible = values['visible'],
zOrder = values['zorder'],
# Patch Properties
antialiased = values['antialiased'],
color = values['facecolor'],
edgeStyle = values['linestyle'],
edgeWidth = values['linewidth'],
filled = values['fill'],
)
style.apply( element )
self.checkElement( "Apply", values, element )
self.assertRaises( Exception, style.apply, 'invalid',
msg = "Failed to throw on invalid element." )
#-----------------------------------------------------------------------
| [
"mplStyle.MplPatchStyle",
"matplotlib.patches.Patch"
] | [((3993, 4012), 'matplotlib.patches.Patch', 'mpl.patches.Patch', ([], {}), '()\n', (4010, 4012), True, 'import matplotlib as mpl\n'), ((4028, 4322), 'mplStyle.MplPatchStyle', 'MplPatchStyle', ([], {'alpha': "values['alpha']", 'clip': "values['clip_on']", 'snap': "values['snap']", 'visible': "values['visible']", 'zOrder': "values['zorder']", 'antialiased': "values['antialiased']", 'color': "values['facecolor']", 'edgeStyle': "values['linestyle']", 'edgeWidth': "values['linewidth']", 'filled': "values['fill']"}), "(alpha=values['alpha'], clip=values['clip_on'], snap=values[\n 'snap'], visible=values['visible'], zOrder=values['zorder'],\n antialiased=values['antialiased'], color=values['facecolor'], edgeStyle\n =values['linestyle'], edgeWidth=values['linewidth'], filled=values['fill'])\n", (4041, 4322), False, 'from mplStyle import MplPatchStyle\n')] |
#!/usr/bin/env python
#coding=utf-8
"""..."""
__author__ = '<NAME> <<EMAIL>>'
__copyright__ = 'Copyright (c) 2016 <NAME>'
__license__ = 'New-style BSD'
import re
import csv
import codecs
import Levenshtein
from collections import defaultdict
def csv_dict_reader(fname):
with codecs.open(fname, 'r', encoding="utf8") as fp:
for dict_row in csv.DictReader(fp, dialect="excel"):
for k in dict_row:
if dict_row[k] is None:
continue
dict_row[k] = dict_row[k].strip()
yield dict_row
def nopage(k):
return k.split(":")[0]
def check1(refs):
# check 1 :: check competing refs
errors = 0
for r in sorted(refs):
if len(refs[r]) > 1:
print("%s\t%d" % (r.ljust(50), len(refs[r])))
for var in refs[r]:
#out = "\n".join(
# textwrap.wrap(
# var, initial_indent=" ", subsequent_indent=' '
# )
#)
print(" %s" % var)
errors += 1
print("")
print("")
print(len(refs))
print("ERRORS", errors)
def check2(refs, threshold=0.9):
errors = 0
is_year = re.compile(r"""\d{4}[\w{1}]?""")
def check_year(ref1, ref2):
return is_year.findall(ref1) == is_year.findall(ref2)
for ref in sorted(refs):
simil = [r for r in refs if r != ref]
# check distance
simil = [
r for r in simil if Levenshtein.ratio(ref, r) > threshold
]
# check years
simil = [r for r in simil if check_year(ref, r)]
# check years
if len(simil):
print(ref)
for s in simil:
print("\t%s" % s)
print("")
errors += 1
print("Errors: %d" % errors)
return
if __name__ == '__main__':
refs = defaultdict(set)
for r in csv_dict_reader('BinfordReferenceMapping_3Mar2016_utf8.csv'):
short = nopage(r['ShortRef_w_AND_wout_pp'])
refs[short].add(r['LongRef'])
for r in csv_dict_reader('ReferenceMapping_1Mar2016.csv'):
refs[r['ReferenceShort']].add(r['ReferenceComplete'])
#check1(refs)
check2(refs)
| [
"csv.DictReader",
"re.compile",
"Levenshtein.ratio",
"collections.defaultdict",
"codecs.open"
] | [((1233, 1262), 're.compile', 're.compile', (['"""\\\\d{4}[\\\\w{1}]?"""'], {}), "('\\\\d{4}[\\\\w{1}]?')\n", (1243, 1262), False, 'import re\n'), ((1913, 1929), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (1924, 1929), False, 'from collections import defaultdict\n'), ((281, 321), 'codecs.open', 'codecs.open', (['fname', '"""r"""'], {'encoding': '"""utf8"""'}), "(fname, 'r', encoding='utf8')\n", (292, 321), False, 'import codecs\n'), ((353, 388), 'csv.DictReader', 'csv.DictReader', (['fp'], {'dialect': '"""excel"""'}), "(fp, dialect='excel')\n", (367, 388), False, 'import csv\n'), ((1516, 1541), 'Levenshtein.ratio', 'Levenshtein.ratio', (['ref', 'r'], {}), '(ref, r)\n', (1533, 1541), False, 'import Levenshtein\n')] |
import spacy
from spacy.symbols import nsubj, VERB
from spacy.tokens import Doc
import resources_pb2 as res
import protobuf_utils
from tqdm import tqdm, trange
from sagas.nlu.spacy_helper import chunks_df
from sagas.conf.conf import cf
def lines(filename):
with open(filename) as f:
lines = f.readlines()
return [line.split('\t') for line in lines]
def data_ja_en():
dataf = f"{cf.conf_dir}/ai/seq2seq/jpn-eng/jpn.txt"
pairs = lines(dataf)
return pairs
def put_entities(doc, props):
"""
props={}
put_entities(doc, props)
print(json.dumps(props, indent=2))
:param doc:
:param props:
:return:
"""
for ent in doc.ents:
# print(ent.text, ent.start_char, ent.end_char, ent.label_)
props[ent.label_]=ent.text
facet="%s|%s"%(ent.label_, 'loc')
props[facet]="%d %d"%(ent.start_char, ent.end_char)
def put_chunks(doc, props):
"""
props={}
put_chunks(doc, props)
print(json.dumps(props, indent=2))
:param doc:
:param props:
:return:
"""
toks = {'text': [], 'root_text': [], 'root_dep': [], 'head': []}
for chunk in doc.noun_chunks:
# print(chunk.text, chunk.root.text, chunk.root.dep_,
# chunk.root.head.text)
toks['text'].append(chunk.text)
toks['root_text'].append(chunk.root.text)
toks['root_dep'].append(chunk.root.dep_)
toks['head'].append(chunk.root.head.text)
props[chunk.root.dep_]=chunk.root.text
props[chunk.root.dep_+'|text']=chunk.text
props[chunk.root.dep_+'|head']=chunk.root.head.text
return toks
class SpacyViz(object):
def __init__(self, nlp=None):
from graphviz import Digraph
self.f = Digraph('deps', filename='deps.gv')
self.f.attr(rankdir='LR', size='6,4')
self.f.attr('node', shape='circle')
if nlp is not None:
self.nlp=nlp
else:
self.nlp = spacy.load("en_core_web_sm")
def print_dependencies(self, doc, segs, file=None):
for word in doc:
print("%s --(%s)--> %s" % (word.lemma_, word.dep_, word.head.lemma_))
self.f.edge(word.lemma_, word.head.lemma_, label=word.dep_)
def analyse(self, sents):
"""
SpacyViz().analyse('Revenue exceeded twelve billion dollars')
:param sents:
:return:
"""
segs = []
doc = self.nlp(sents)
for word in doc:
self.f.node(word.lemma_)
segs.append(word.lemma_)
self.print_dependencies(doc, segs)
return self.f
def analyse_chunks(self, sents):
"""
SpacyViz().analyse_chunks('Revenue exceeded twelve billion dollars')
:param sents:
:return:
"""
segs = []
doc = self.nlp(sents)
print(chunks_df(doc))
for chunk in doc.noun_chunks:
self.f.edge(chunk.root.text,
chunk.root.head.text,
label=chunk.root.dep_)
return self.f
"""
ref# procs-dgraph-spacy.ipynb
"""
class SpacyBuilder(object):
def __init__(self):
# from ipywidgets import IntProgress
# from IPython.display import display
self.nlp = spacy.load("en_core_web_sm")
self.pairs = data_ja_en()
max_count = len(self.pairs)
# self.f = IntProgress(min=0, max=max_count) # instantiate the bar
# display(self.f) # display the bar
# def step(self, val=1):
# self.f.value += val # signal to increment the progress bar
def procs(self, out_file):
"""
$ python -m sagas.graph.dgraph_spacy procs '~/pi/data/langs/jpn_eng_spacy.data'
:param out_file:
:return:
"""
import numpy as np
englist = []
for lang in self.pairs:
englist.append(lang[0])
x = np.array(englist)
lang_rs = np.unique(x)
verb_maps = {}
rs = []
# for pair in tqdm(self.pairs):
for lang in tqdm(lang_rs):
# doc = self.nlp(pair[0])
doc = self.nlp(str(lang))
# Finding a verb with a subject from below — good
# verbs = set()
verbs =[]
lemmas=[]
for possible_subject in doc:
if possible_subject.dep == nsubj and possible_subject.head.pos == VERB:
verbs.append(possible_subject.head.text)
lemmas.append(possible_subject.head.lemma_)
if len(verbs) > 0:
verb_maps[lang] = verbs
# self.step()
data = doc.to_bytes()
lang = res.RsLang(entries=[lang], store=data, verbs=verbs, verbLemmas=lemmas)
rs.append(lang)
print(len(verb_maps))
# randomly print some data
print(self.pairs[2000], verb_maps[self.pairs[2000][0]])
print(self.pairs[3000], verb_maps[self.pairs[3000][0]])
# write to file
print('.. write to file')
# self.write_samples(False, './data/langs/jpn_eng_spacy.data')
langs = res.RsLangs(langs=rs)
protobuf_utils.write_proto_to(langs, out_file)
print('done.')
def parse(self, pair, rs):
doc = self.nlp(pair[0])
data = doc.to_bytes()
lang = res.RsLang(entries=pair, store=data)
rs.append(lang)
def write_samples(self, only_samples=True, out_file=f'{cf.conf_dir}/data/langs/samples_100.data'):
rs = []
if only_samples:
for i in range(2000, 2100):
self.parse(self.pairs[i], rs)
else:
for p in self.pairs:
self.parse(p, rs)
langs = res.RsLangs(langs=rs)
protobuf_utils.write_proto_to(langs, out_file)
def load_samples(self, input_file=f'{cf.conf_dir}/data/langs/samples_100.data'):
load_langs = res.RsLangs()
protobuf_utils.read_proto(load_langs, input_file)
print(len(load_langs.langs))
for lang in load_langs.langs:
doc = Doc(self.nlp.vocab).from_bytes(lang.store)
print(lang.entries[0], self.get_verbs(doc))
def get_verbs(self, doc):
verbs = set()
for possible_subject in doc:
if possible_subject.dep == nsubj and possible_subject.head.pos == VERB:
verbs.add(possible_subject.head)
return verbs
if __name__ == '__main__':
import fire
fire.Fire(SpacyBuilder)
| [
"numpy.unique",
"fire.Fire",
"spacy.load",
"resources_pb2.RsLang",
"tqdm.tqdm",
"resources_pb2.RsLangs",
"protobuf_utils.read_proto",
"spacy.tokens.Doc",
"sagas.nlu.spacy_helper.chunks_df",
"numpy.array",
"protobuf_utils.write_proto_to",
"graphviz.Digraph"
] | [((6446, 6469), 'fire.Fire', 'fire.Fire', (['SpacyBuilder'], {}), '(SpacyBuilder)\n', (6455, 6469), False, 'import fire\n'), ((1742, 1777), 'graphviz.Digraph', 'Digraph', (['"""deps"""'], {'filename': '"""deps.gv"""'}), "('deps', filename='deps.gv')\n", (1749, 1777), False, 'from graphviz import Digraph\n'), ((3252, 3280), 'spacy.load', 'spacy.load', (['"""en_core_web_sm"""'], {}), "('en_core_web_sm')\n", (3262, 3280), False, 'import spacy\n'), ((3887, 3904), 'numpy.array', 'np.array', (['englist'], {}), '(englist)\n', (3895, 3904), True, 'import numpy as np\n'), ((3923, 3935), 'numpy.unique', 'np.unique', (['x'], {}), '(x)\n', (3932, 3935), True, 'import numpy as np\n'), ((4036, 4049), 'tqdm.tqdm', 'tqdm', (['lang_rs'], {}), '(lang_rs)\n', (4040, 4049), False, 'from tqdm import tqdm, trange\n'), ((5105, 5126), 'resources_pb2.RsLangs', 'res.RsLangs', ([], {'langs': 'rs'}), '(langs=rs)\n', (5116, 5126), True, 'import resources_pb2 as res\n'), ((5135, 5181), 'protobuf_utils.write_proto_to', 'protobuf_utils.write_proto_to', (['langs', 'out_file'], {}), '(langs, out_file)\n', (5164, 5181), False, 'import protobuf_utils\n'), ((5314, 5350), 'resources_pb2.RsLang', 'res.RsLang', ([], {'entries': 'pair', 'store': 'data'}), '(entries=pair, store=data)\n', (5324, 5350), True, 'import resources_pb2 as res\n'), ((5704, 5725), 'resources_pb2.RsLangs', 'res.RsLangs', ([], {'langs': 'rs'}), '(langs=rs)\n', (5715, 5725), True, 'import resources_pb2 as res\n'), ((5734, 5780), 'protobuf_utils.write_proto_to', 'protobuf_utils.write_proto_to', (['langs', 'out_file'], {}), '(langs, out_file)\n', (5763, 5780), False, 'import protobuf_utils\n'), ((5888, 5901), 'resources_pb2.RsLangs', 'res.RsLangs', ([], {}), '()\n', (5899, 5901), True, 'import resources_pb2 as res\n'), ((5910, 5959), 'protobuf_utils.read_proto', 'protobuf_utils.read_proto', (['load_langs', 'input_file'], {}), '(load_langs, input_file)\n', (5935, 5959), False, 'import protobuf_utils\n'), ((1959, 1987), 'spacy.load', 'spacy.load', (['"""en_core_web_sm"""'], {}), "('en_core_web_sm')\n", (1969, 1987), False, 'import spacy\n'), ((2840, 2854), 'sagas.nlu.spacy_helper.chunks_df', 'chunks_df', (['doc'], {}), '(doc)\n', (2849, 2854), False, 'from sagas.nlu.spacy_helper import chunks_df\n'), ((4666, 4736), 'resources_pb2.RsLang', 'res.RsLang', ([], {'entries': '[lang]', 'store': 'data', 'verbs': 'verbs', 'verbLemmas': 'lemmas'}), '(entries=[lang], store=data, verbs=verbs, verbLemmas=lemmas)\n', (4676, 4736), True, 'import resources_pb2 as res\n'), ((6054, 6073), 'spacy.tokens.Doc', 'Doc', (['self.nlp.vocab'], {}), '(self.nlp.vocab)\n', (6057, 6073), False, 'from spacy.tokens import Doc\n')] |
from torchvision.transforms.functional import normalize
from basicsr.utils import img2tensor
import lpips
import numpy as np
from basicsr.metrics.metric_util import reorder_image, to_y_channel
from basicsr.utils.registry import METRIC_REGISTRY
import torch
@METRIC_REGISTRY.register()
def calculate_lpips(img, img2, crop_border, input_order='HWC', test_y_channel=False, **kwargs):
"""Calculate LPIPS.
Ref: https://github.com/xinntao/BasicSR/pull/367
Args:
img (ndarray): Images with range [0, 255].
img2 (ndarray): Images with range [0, 255].
crop_border (int): Cropped pixels in each edge of an image. These
pixels are not involved in the PSNR calculation.
input_order (str): Whether the input order is 'HWC' or 'CHW'.
Default: 'HWC'.
test_y_channel (bool): Test on Y channel of YCbCr. Default: False.
Returns:
float: LPIPS result.
"""
assert img.shape == img2.shape, (f'Image shapes are differnet: {img.shape}, {img2.shape}.')
if input_order not in ['HWC', 'CHW']:
raise ValueError(f'Wrong input_order {input_order}. Supported input_orders are ' '"HWC" and "CHW"')
img = reorder_image(img, input_order=input_order)
img2 = reorder_image(img2, input_order=input_order)
img = img.astype(np.float64)
img2 = img2.astype(np.float64)
if crop_border != 0:
img = img[crop_border:-crop_border, crop_border:-crop_border, ...]
img2 = img2[crop_border:-crop_border, crop_border:-crop_border, ...]
if test_y_channel:
img = to_y_channel(img)
img2 = to_y_channel(img2)
# start calculating LPIPS metrics
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
loss_fn_vgg = lpips.LPIPS(net='vgg', verbose=False).to(DEVICE) # RGB, normalized to [-1,1]
mean = [0.5, 0.5, 0.5]
std = [0.5, 0.5, 0.5]
img_gt = img2 / 255.
img_restored = img / 255.
img_gt, img_restored = img2tensor([img_gt, img_restored], bgr2rgb=True, float32=True)
# norm to [-1, 1]
normalize(img_gt, mean, std, inplace=True)
normalize(img_restored, mean, std, inplace=True)
# calculate lpips
img_gt = img_gt.to(DEVICE)
img_restored = img_restored.to(DEVICE)
loss_fn_vgg.eval()
lpips_val = loss_fn_vgg(img_restored.unsqueeze(0), img_gt.unsqueeze(0))
return lpips_val.detach().cpu().numpy().mean()
| [
"lpips.LPIPS",
"basicsr.metrics.metric_util.to_y_channel",
"basicsr.utils.img2tensor",
"torch.cuda.is_available",
"basicsr.utils.registry.METRIC_REGISTRY.register",
"basicsr.metrics.metric_util.reorder_image",
"torchvision.transforms.functional.normalize"
] | [((260, 286), 'basicsr.utils.registry.METRIC_REGISTRY.register', 'METRIC_REGISTRY.register', ([], {}), '()\n', (284, 286), False, 'from basicsr.utils.registry import METRIC_REGISTRY\n'), ((1189, 1232), 'basicsr.metrics.metric_util.reorder_image', 'reorder_image', (['img'], {'input_order': 'input_order'}), '(img, input_order=input_order)\n', (1202, 1232), False, 'from basicsr.metrics.metric_util import reorder_image, to_y_channel\n'), ((1244, 1288), 'basicsr.metrics.metric_util.reorder_image', 'reorder_image', (['img2'], {'input_order': 'input_order'}), '(img2, input_order=input_order)\n', (1257, 1288), False, 'from basicsr.metrics.metric_util import reorder_image, to_y_channel\n'), ((1972, 2034), 'basicsr.utils.img2tensor', 'img2tensor', (['[img_gt, img_restored]'], {'bgr2rgb': '(True)', 'float32': '(True)'}), '([img_gt, img_restored], bgr2rgb=True, float32=True)\n', (1982, 2034), False, 'from basicsr.utils import img2tensor\n'), ((2061, 2103), 'torchvision.transforms.functional.normalize', 'normalize', (['img_gt', 'mean', 'std'], {'inplace': '(True)'}), '(img_gt, mean, std, inplace=True)\n', (2070, 2103), False, 'from torchvision.transforms.functional import normalize\n'), ((2108, 2156), 'torchvision.transforms.functional.normalize', 'normalize', (['img_restored', 'mean', 'std'], {'inplace': '(True)'}), '(img_restored, mean, std, inplace=True)\n', (2117, 2156), False, 'from torchvision.transforms.functional import normalize\n'), ((1573, 1590), 'basicsr.metrics.metric_util.to_y_channel', 'to_y_channel', (['img'], {}), '(img)\n', (1585, 1590), False, 'from basicsr.metrics.metric_util import reorder_image, to_y_channel\n'), ((1606, 1624), 'basicsr.metrics.metric_util.to_y_channel', 'to_y_channel', (['img2'], {}), '(img2)\n', (1618, 1624), False, 'from basicsr.metrics.metric_util import reorder_image, to_y_channel\n'), ((1700, 1725), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1723, 1725), False, 'import torch\n'), ((1756, 1793), 'lpips.LPIPS', 'lpips.LPIPS', ([], {'net': '"""vgg"""', 'verbose': '(False)'}), "(net='vgg', verbose=False)\n", (1767, 1793), False, 'import lpips\n')] |
from django.test import Client
from django.urls import reverse
import unittest
client = Client()
class TestResizeURLs(unittest.TestCase):
def test_index(self):
response = client.get('/')
self.assertEqual(response.status_code, 200)
if __name__ == '__main__':
unittest.main() | [
"unittest.main",
"django.test.Client"
] | [((89, 97), 'django.test.Client', 'Client', ([], {}), '()\n', (95, 97), False, 'from django.test import Client\n'), ((268, 283), 'unittest.main', 'unittest.main', ([], {}), '()\n', (281, 283), False, 'import unittest\n')] |
#!/usr/bin/env python3.6
#coding:utf-8
import argparse
import datetime
import MySQLdb
import json
class DateEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
elif isinstance(obj, date):
return obj.strftime("%Y-%m-%d")
else:
return json.JSONEncoder.default(self, obj)
def load_data(args):
db = MySQLdb.connect(host=args.host, port=args.port, user=args.user, password=args.password, db=args.db, charset=args.charset)
cursor = db.cursor(MySQLdb.cursors.DictCursor)
try:
sql = "select %s from %s" % (args.field, args.table)
cursor.execute(sql)
results = cursor.fetchall()
except MySQLdb.OperationalError as e:
print("error:", e)
return None
except MySQLdb.ProgrammingError as e:
print("error:", e)
return None
db.close()
return results
def main():
parser = argparse.ArgumentParser(description='Dump data from mysql to json')
parser.add_argument('--host', type=str, default="127.0.0.1", help='Connect to host')
parser.add_argument('--port', type=int, default=3306, help='Port number to use for connection')
parser.add_argument('--user', type=str, default="root", help='User for login')
parser.add_argument('--password', default="<PASSWORD>", type=str, help='Password to use when connecting to server')
parser.add_argument('--db', default="qos", type=str, help='Database to use')
parser.add_argument('--table', default="app_symbol", type=str, help='Table to use')
parser.add_argument('--field', type=str, default="*", help='Filed to select')
parser.add_argument('--charset', type=str, default="utf8", help='the connection character set will be changed to this character set')
args = parser.parse_args()
data = load_data(args)
if data:
json_data = json.dumps(data, ensure_ascii=False, cls=DateEncoder)
print(json_data)
if __name__ == '__main__':
main()
| [
"MySQLdb.connect",
"json.dumps",
"json.JSONEncoder.default",
"argparse.ArgumentParser"
] | [((462, 588), 'MySQLdb.connect', 'MySQLdb.connect', ([], {'host': 'args.host', 'port': 'args.port', 'user': 'args.user', 'password': 'args.password', 'db': 'args.db', 'charset': 'args.charset'}), '(host=args.host, port=args.port, user=args.user, password=\n args.password, db=args.db, charset=args.charset)\n', (477, 588), False, 'import MySQLdb\n'), ((1013, 1080), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Dump data from mysql to json"""'}), "(description='Dump data from mysql to json')\n", (1036, 1080), False, 'import argparse\n'), ((1953, 2006), 'json.dumps', 'json.dumps', (['data'], {'ensure_ascii': '(False)', 'cls': 'DateEncoder'}), '(data, ensure_ascii=False, cls=DateEncoder)\n', (1963, 2006), False, 'import json\n'), ((393, 428), 'json.JSONEncoder.default', 'json.JSONEncoder.default', (['self', 'obj'], {}), '(self, obj)\n', (417, 428), False, 'import json\n')] |
'''
Created on 30-Jan-2021
@author: <NAME>
'''
from support.razor_logger import logger
class ArgumentsManager(object):
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
from argparse import ArgumentParser
self.__args_parser = ArgumentParser()
self.__parsed_args = None
def load_arguments(self):
logger.info("Loading all the required arguments...")
self.__args_parser.add_argument('-f', '--file', type=str, help='Path of the razor YAML files containing the required actions to be performed.')
logger.info("Arguments added successfully in the parser...")
logger.info("Parsing command line arguments....")
self.__parsed_args = self.__args_parser.parse_args()
logger.info("Arguments parsed successfully!")
return self
def get_arg_value(self, arg_key):
if arg_key in self.__parsed_args:
return getattr(self.__parsed_args,arg_key)
else:
err_msg = 'Unable to retrieve value from the parsed argument using the following key: {1}'.format(arg_key)
logger.error(err_msg)
raise KeyError(err_msg)
| [
"support.razor_logger.logger.error",
"support.razor_logger.logger.info",
"argparse.ArgumentParser"
] | [((299, 315), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (313, 315), False, 'from argparse import ArgumentParser\n'), ((403, 455), 'support.razor_logger.logger.info', 'logger.info', (['"""Loading all the required arguments..."""'], {}), "('Loading all the required arguments...')\n", (414, 455), False, 'from support.razor_logger import logger\n'), ((625, 685), 'support.razor_logger.logger.info', 'logger.info', (['"""Arguments added successfully in the parser..."""'], {}), "('Arguments added successfully in the parser...')\n", (636, 685), False, 'from support.razor_logger import logger\n'), ((694, 743), 'support.razor_logger.logger.info', 'logger.info', (['"""Parsing command line arguments...."""'], {}), "('Parsing command line arguments....')\n", (705, 743), False, 'from support.razor_logger import logger\n'), ((814, 859), 'support.razor_logger.logger.info', 'logger.info', (['"""Arguments parsed successfully!"""'], {}), "('Arguments parsed successfully!')\n", (825, 859), False, 'from support.razor_logger import logger\n'), ((1165, 1186), 'support.razor_logger.logger.error', 'logger.error', (['err_msg'], {}), '(err_msg)\n', (1177, 1186), False, 'from support.razor_logger import logger\n')] |
import json
from graphene_django.utils.testing import GraphQLTestCase
from .helpers import getClearResponse
class GraphQLTestCase(GraphQLTestCase):
def assertKanbonError(self, resp):
self.assertEqual(resp.status_code, 200)
content = json.loads(resp.content)
values = getClearResponse(response=content)
# Assert that there is no error or the error is empty.
self.assertEqual('error' in values and values['error'], None)
# Assert that the response is 'ok'
self.assertEqual(values.get('ok'), True)
| [
"json.loads"
] | [((257, 281), 'json.loads', 'json.loads', (['resp.content'], {}), '(resp.content)\n', (267, 281), False, 'import json\n')] |
# -*- coding: utf-8 -*-
"""
Created on Sun Sep 6 22:41:15 2020
@author: <NAME>
"""
from keras.datasets import mnist
import matplotlib.pyplot as plt
# Scientific and vector computation for python
import numpy as np
# Optimization module in scipy
from scipy import optimize
from keras.utils import np_utils
def randInitializeWeights(L_in, L_out, epsilon_init=0.12):
"""Initializing weitghs for all neurons between given input layer 'L_in'
and output layer 'L_out' to some initila value different from each
other hence avoiding 'PROBLEM OF SYMMETRIC WEIGHTS'
NOTE:this fucntion take layer l as L_in and layer l+1 as layer L_out and
return a matrix of shape (L_out)*(L_in +1)
"""
W = np.zeros((L_out, 1 + L_in))
W = np.random.rand(L_out, 1 + L_in) * 2 * epsilon_init - epsilon_init
return W
def sigmoid(z):
"""Computes the sigmoid of z.
"""
g =1.0 / (1.0 + np.exp(-z))
return g
def sigmoidGradient(z):
"""Computes the gradient of sigmoid of z.
"""
g = np.zeros(z.shape)
g = sigmoid(z) * (1 - sigmoid(z))
return g
def nnCostFunction(nn_params,
input_layer_size,
hidden_layer_size,
num_labels,
X, y, lambda_=0.0):
# Reshape nn_params back into the parameters Theta1 and Theta2, the weight matrices
# for our 2 layer neural network
Theta1 = np.reshape(nn_params[:hidden_layer_size * (input_layer_size + 1)],
(hidden_layer_size, (input_layer_size + 1)))
Theta2 = np.reshape(nn_params[(hidden_layer_size * (input_layer_size + 1)):],
(num_labels, (hidden_layer_size + 1)))
# Setup some useful variables
m = y.size
colOFonesFora1=np.ones([m,1], dtype = int)
a1=np.c_[colOFonesFora1,X]
z2=a1.dot(Theta1.T)
# Calculate activations in the second layer.
a2=sigmoid(z2)
colOfonesFora2=np.ones([m,1], dtype = int)
a2=np.c_[colOfonesFora2,a2]
z3=a2.dot(Theta2.T)
# Calculate the activation of the third layer.
a3=sigmoid(z3)
#a3 is a 5000by10 matrix
#converts y into vecot if it was vector
y_matrix = y.reshape(-1)
'''we had vector of y with each value of vetor ith indicating
value of corresponding X in feature vector ith we have to convert
eaach value into row of 10 values '''
y_matrix = np.eye(num_labels)[y_matrix]
#prepariring regularization terms
temp1 = Theta1
temp2 = Theta2
J = 0
Theta1_grad = np.zeros(Theta1.shape)
Theta2_grad = np.zeros(Theta2.shape)
reg_term = (lambda_ / (2 * m)) * (np.sum(np.square(temp1[:, 1:])) + np.sum(np.square(temp2[:, 1:])))
#this (temp1[:, 1:] is chosing all of theta apart from first rwo of bais terms
J=(-1/ (m))*(np.sum((y_matrix*(np.log(a3)))+((1-y_matrix)*np.log(1-a3))))+reg_term
#instead of using loop we are caculating delta vectoried
delta_3 = a3 - y_matrix
delta_2 = delta_3.dot(Theta2)[:, 1:] * sigmoidGradient(a1.dot(Theta1.T))
Delta1 = delta_2.T.dot(a1)
Delta2 = delta_3.T.dot(a2)
Theta1_grad = (1 / m) * Delta1
Theta1_grad[:, 1:] = Theta1_grad[:, 1:] + (lambda_ / m) * Theta1[:, 1:]
Theta2_grad = (1 / m) * Delta2
Theta2_grad[:, 1:] = Theta2_grad[:, 1:] + (lambda_ / m) * Theta2[:, 1:]
# ================================================================
# Unroll gradients
# grad = np.concatenate([Theta1_grad.ravel(order=order), Theta2_grad.ravel(order=order)])
grad = np.concatenate([Theta1_grad.ravel(), Theta2_grad.ravel()])
return J, grad
def predict(Theta1, Theta2, X):
"""
Predict the label of an input given a trained neural network
Outputs the predicted label of X given the trained weights of a neural
network(Theta1, Theta2)
"""
# Useful values
m = X.shape[0]
num_labels = Theta2.shape[0]
# You need to return the following variables correctly
p = np.zeros(m)
h1 = sigmoid(np.dot(np.concatenate([np.ones((m, 1)), X], axis=1), Theta1.T))
h2 = sigmoid(np.dot(np.concatenate([np.ones((m, 1)), h1], axis=1), Theta2.T))
p = np.argmax(h2, axis=1)
return p
# load (downloaded if needed) the MNIST dataset
(X_train, y_train), (X_test, y_test) = mnist.load_data()
# plot 4 images as gray scale
plt.subplot(221)
plt.imshow(X_train[0], cmap=plt.get_cmap('gray'))
plt.subplot(222)
plt.imshow(X_train[1], cmap=plt.get_cmap('gray'))
plt.subplot(223)
plt.imshow(X_train[2], cmap=plt.get_cmap('gray'))
plt.subplot(224)
plt.imshow(X_train[3], cmap=plt.get_cmap('gray'))
# show the plot
plt.show()
(X_train, y_train), (X_test, y_test) = mnist.load_data()
# flatten 28*28 images to a 784 vector for each image
'''The training dataset is structured as a 3-dimensional array of instance,
image width and image height. For a multi-layer perceptron model we must
reduce the images down into a vector of pixels. In this case the 28×28
sized images will be 784 pixel input values.
We can also reduce our memory requirements by forcing the precision of
the pixel values to be 32 bit, the default precision used by Keras anyway.'''
num_pixels = X_train.shape[1] * X_train.shape[2]#num_pixels=row*column
X_train = X_train.reshape((X_train.shape[0], num_pixels)).astype('float32')
X_test = X_test.reshape((X_test.shape[0], num_pixels)).astype('float32')
'''The pixel values are gray scale between 0 and 255. It is almost always
a good idea to perform some scaling of input values when using neural
network models. Because the scale is well known and well behaved, we
can very quickly normalize the pixel values to the range 0 and 1 by
dividing each value by the maximum of 255.'''
# normalize inputs from 0-255 to 0-1
X_train = X_train / 255
X_test = X_test / 255
# one hot encode outputs
#y_train = np_utils.to_categorical(y_train)
#y_test = np_utils.to_categorical(y_test)
num_classes = y_test.shape[0]
input_layer_size=num_pixels
hidden_layer_size=800
output_layer_size=10
initial_Theta1 = randInitializeWeights(input_layer_size, hidden_layer_size)
initial_Theta2 = randInitializeWeights(hidden_layer_size,output_layer_size)
# Unroll parameters
initial_nn_params = np.concatenate([initial_Theta1.ravel(), initial_Theta2.ravel()], axis=0)
lambda_ = 0.5
J, _ = nnCostFunction(initial_nn_params, input_layer_size, hidden_layer_size,
output_layer_size,X_train,y_train, lambda_)
options= {'maxiter': 400}
# You should also try different values of lambda
#lambda_ = 3
# Create "short hand" for the cost function to be minimized
costFunction = lambda p: nnCostFunction(p, input_layer_size,
hidden_layer_size,
output_layer_size,X_train,y_train, lambda_)
# Now, costFunction is a function that takes in only one argument
# (the neural network parameters)
res = optimize.minimize(costFunction,
initial_nn_params,
jac=True,
method='TNC',
options=options)
# get the solution of the optimization
nn_params = res.x
# Obtain Theta1 and Theta2 back from nn_params
Theta1 = np.reshape(nn_params[:hidden_layer_size * (input_layer_size + 1)],
(hidden_layer_size, (input_layer_size + 1)))
Theta2 = np.reshape(nn_params[(hidden_layer_size * (input_layer_size + 1)):],
(output_layer_size, (hidden_layer_size + 1)))
pred = predict(Theta1, Theta2, X_train)
print('Training Set Accuracy: %f' % (np.mean(pred == y_train) * 100))
pred = predict(Theta1, Theta2, X_test)
print('Test Set Accuracy: %f' % (np.mean(pred == y_test) * 100))
| [
"numpy.mean",
"numpy.eye",
"numpy.reshape",
"numpy.ones",
"keras.datasets.mnist.load_data",
"numpy.random.rand",
"scipy.optimize.minimize",
"numpy.log",
"numpy.argmax",
"numpy.square",
"numpy.exp",
"numpy.zeros",
"matplotlib.pyplot.get_cmap",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show"
] | [((4396, 4413), 'keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (4411, 4413), False, 'from keras.datasets import mnist\n'), ((4446, 4462), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(221)'], {}), '(221)\n', (4457, 4462), True, 'import matplotlib.pyplot as plt\n'), ((4515, 4531), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(222)'], {}), '(222)\n', (4526, 4531), True, 'import matplotlib.pyplot as plt\n'), ((4584, 4600), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(223)'], {}), '(223)\n', (4595, 4600), True, 'import matplotlib.pyplot as plt\n'), ((4653, 4669), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(224)'], {}), '(224)\n', (4664, 4669), True, 'import matplotlib.pyplot as plt\n'), ((4739, 4749), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4747, 4749), True, 'import matplotlib.pyplot as plt\n'), ((4792, 4809), 'keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (4807, 4809), False, 'from keras.datasets import mnist\n'), ((7082, 7177), 'scipy.optimize.minimize', 'optimize.minimize', (['costFunction', 'initial_nn_params'], {'jac': '(True)', 'method': '"""TNC"""', 'options': 'options'}), "(costFunction, initial_nn_params, jac=True, method='TNC',\n options=options)\n", (7099, 7177), False, 'from scipy import optimize\n'), ((7403, 7517), 'numpy.reshape', 'np.reshape', (['nn_params[:hidden_layer_size * (input_layer_size + 1)]', '(hidden_layer_size, input_layer_size + 1)'], {}), '(nn_params[:hidden_layer_size * (input_layer_size + 1)], (\n hidden_layer_size, input_layer_size + 1))\n', (7413, 7517), True, 'import numpy as np\n'), ((7548, 7663), 'numpy.reshape', 'np.reshape', (['nn_params[hidden_layer_size * (input_layer_size + 1):]', '(output_layer_size, hidden_layer_size + 1)'], {}), '(nn_params[hidden_layer_size * (input_layer_size + 1):], (\n output_layer_size, hidden_layer_size + 1))\n', (7558, 7663), True, 'import numpy as np\n'), ((754, 781), 'numpy.zeros', 'np.zeros', (['(L_out, 1 + L_in)'], {}), '((L_out, 1 + L_in))\n', (762, 781), True, 'import numpy as np\n'), ((1071, 1088), 'numpy.zeros', 'np.zeros', (['z.shape'], {}), '(z.shape)\n', (1079, 1088), True, 'import numpy as np\n'), ((1463, 1577), 'numpy.reshape', 'np.reshape', (['nn_params[:hidden_layer_size * (input_layer_size + 1)]', '(hidden_layer_size, input_layer_size + 1)'], {}), '(nn_params[:hidden_layer_size * (input_layer_size + 1)], (\n hidden_layer_size, input_layer_size + 1))\n', (1473, 1577), True, 'import numpy as np\n'), ((1616, 1724), 'numpy.reshape', 'np.reshape', (['nn_params[hidden_layer_size * (input_layer_size + 1):]', '(num_labels, hidden_layer_size + 1)'], {}), '(nn_params[hidden_layer_size * (input_layer_size + 1):], (\n num_labels, hidden_layer_size + 1))\n', (1626, 1724), True, 'import numpy as np\n'), ((1822, 1848), 'numpy.ones', 'np.ones', (['[m, 1]'], {'dtype': 'int'}), '([m, 1], dtype=int)\n', (1829, 1848), True, 'import numpy as np\n'), ((1997, 2023), 'numpy.ones', 'np.ones', (['[m, 1]'], {'dtype': 'int'}), '([m, 1], dtype=int)\n', (2004, 2023), True, 'import numpy as np\n'), ((2594, 2616), 'numpy.zeros', 'np.zeros', (['Theta1.shape'], {}), '(Theta1.shape)\n', (2602, 2616), True, 'import numpy as np\n'), ((2636, 2658), 'numpy.zeros', 'np.zeros', (['Theta2.shape'], {}), '(Theta2.shape)\n', (2644, 2658), True, 'import numpy as np\n'), ((4083, 4094), 'numpy.zeros', 'np.zeros', (['m'], {}), '(m)\n', (4091, 4094), True, 'import numpy as np\n'), ((4269, 4290), 'numpy.argmax', 'np.argmax', (['h2'], {'axis': '(1)'}), '(h2, axis=1)\n', (4278, 4290), True, 'import numpy as np\n'), ((2456, 2474), 'numpy.eye', 'np.eye', (['num_labels'], {}), '(num_labels)\n', (2462, 2474), True, 'import numpy as np\n'), ((4492, 4512), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (4504, 4512), True, 'import matplotlib.pyplot as plt\n'), ((4561, 4581), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (4573, 4581), True, 'import matplotlib.pyplot as plt\n'), ((4630, 4650), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (4642, 4650), True, 'import matplotlib.pyplot as plt\n'), ((4699, 4719), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (4711, 4719), True, 'import matplotlib.pyplot as plt\n'), ((955, 965), 'numpy.exp', 'np.exp', (['(-z)'], {}), '(-z)\n', (961, 965), True, 'import numpy as np\n'), ((7765, 7789), 'numpy.mean', 'np.mean', (['(pred == y_train)'], {}), '(pred == y_train)\n', (7772, 7789), True, 'import numpy as np\n'), ((7872, 7895), 'numpy.mean', 'np.mean', (['(pred == y_test)'], {}), '(pred == y_test)\n', (7879, 7895), True, 'import numpy as np\n'), ((791, 822), 'numpy.random.rand', 'np.random.rand', (['L_out', '(1 + L_in)'], {}), '(L_out, 1 + L_in)\n', (805, 822), True, 'import numpy as np\n'), ((2705, 2728), 'numpy.square', 'np.square', (['temp1[:, 1:]'], {}), '(temp1[:, 1:])\n', (2714, 2728), True, 'import numpy as np\n'), ((2739, 2762), 'numpy.square', 'np.square', (['temp2[:, 1:]'], {}), '(temp2[:, 1:])\n', (2748, 2762), True, 'import numpy as np\n'), ((4136, 4151), 'numpy.ones', 'np.ones', (['(m, 1)'], {}), '((m, 1))\n', (4143, 4151), True, 'import numpy as np\n'), ((4218, 4233), 'numpy.ones', 'np.ones', (['(m, 1)'], {}), '((m, 1))\n', (4225, 4233), True, 'import numpy as np\n'), ((2885, 2895), 'numpy.log', 'np.log', (['a3'], {}), '(a3)\n', (2891, 2895), True, 'import numpy as np\n'), ((2912, 2926), 'numpy.log', 'np.log', (['(1 - a3)'], {}), '(1 - a3)\n', (2918, 2926), True, 'import numpy as np\n')] |
import os
from flask import Flask
def create_app(test_config=None):
# create and configure the app
app = Flask(__name__, instance_relative_config=True)
FLASK_ENV = os.environ.get('FLASK_ENV', None)
if FLASK_ENV == 'production':
app.config.from_mapping(
SECRET_KEY='prod',
MONGO_URI=os.environ.get('MONGODB_URI')
)
else:
app.config.from_mapping(
SECRET_KEY='dev',
MONGO_URI="mongodb://mongo:27017/bolBandsDB"
)
# mongo = PyMongo(app)
if test_config is None:
# load the instance config, if it exists, when not testing
app.config.from_pyfile('config.py', silent=True)
else:
# load the test config if passed in
app.config.from_mapping(test_config)
# ensure the instance folder exists
try:
os.makedirs(app.instance_path)
except OSError:
pass
from . import api
app.register_blueprint(api.bp)
return app
| [
"os.environ.get",
"os.makedirs",
"flask.Flask"
] | [((116, 162), 'flask.Flask', 'Flask', (['__name__'], {'instance_relative_config': '(True)'}), '(__name__, instance_relative_config=True)\n', (121, 162), False, 'from flask import Flask\n'), ((180, 213), 'os.environ.get', 'os.environ.get', (['"""FLASK_ENV"""', 'None'], {}), "('FLASK_ENV', None)\n", (194, 213), False, 'import os\n'), ((853, 883), 'os.makedirs', 'os.makedirs', (['app.instance_path'], {}), '(app.instance_path)\n', (864, 883), False, 'import os\n'), ((335, 364), 'os.environ.get', 'os.environ.get', (['"""MONGODB_URI"""'], {}), "('MONGODB_URI')\n", (349, 364), False, 'import os\n')] |
import logging
import numpy as np
from easydict import EasyDict as edict
logger = logging.getLogger()
__C = edict()
cfg = __C
__C.ANN_NAME = 'mpii.json'
__C.DATA_DIR = 'data'
__C.CONFIG_NAME = ''
__C.CUDA = True
__C.WORKERS = 6
__C.SEED = -1
__C.DEBUG = False
# Training options
__C.TRAIN = edict()
__C.TRAIN.BATCH_SIZE = [24]
__C.TRAIN.MAX_EPOCH = 120
__C.TRAIN.DISCRIMINATOR_LR = 2e-4
__C.TRAIN.GENERATOR_LR = 2e-4
__C.TRAIN.NET_E = ''
__C.TRAIN.NET_G = ''
# Modal options
__C.GAN = edict()
def _merge_a_into_b(a, b):
"""Merge config dictionary a into config dictionary b, clobbering the
options in b whenever they are also specified in a.
"""
if type(a) is not edict:
return
for k, v in a.items():
# a must specify keys that are in b
if not k in b:
raise KeyError('{} is not a valid config key'.format(k))
# the types must match, too
old_type = type(b[k])
if old_type is not type(v):
if isinstance(b[k], np.ndarray):
v = np.array(v, dtype=b[k].dtype)
else:
raise ValueError(('Type mismatch ({} vs. {}) '
'for config key: {}').format(type(b[k]),
type(v), k))
# recursively merge dicts
if type(v) is edict:
try:
_merge_a_into_b(a[k], b[k])
except:
logger.info('Error under config key: {}'.format(k))
raise
else:
b[k] = v
def cfg_from_file(filename):
"""Load a config file and merge it into the default options."""
import yaml
with open(filename, 'r') as f:
yaml_cfg = edict(yaml.load(f, Loader=yaml.FullLoader))
_merge_a_into_b(yaml_cfg, __C)
| [
"logging.getLogger",
"easydict.EasyDict",
"numpy.array",
"yaml.load"
] | [((84, 103), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (101, 103), False, 'import logging\n'), ((111, 118), 'easydict.EasyDict', 'edict', ([], {}), '()\n', (116, 118), True, 'from easydict import EasyDict as edict\n'), ((296, 303), 'easydict.EasyDict', 'edict', ([], {}), '()\n', (301, 303), True, 'from easydict import EasyDict as edict\n'), ((491, 498), 'easydict.EasyDict', 'edict', ([], {}), '()\n', (496, 498), True, 'from easydict import EasyDict as edict\n'), ((1749, 1785), 'yaml.load', 'yaml.load', (['f'], {'Loader': 'yaml.FullLoader'}), '(f, Loader=yaml.FullLoader)\n', (1758, 1785), False, 'import yaml\n'), ((1042, 1071), 'numpy.array', 'np.array', (['v'], {'dtype': 'b[k].dtype'}), '(v, dtype=b[k].dtype)\n', (1050, 1071), True, 'import numpy as np\n')] |
from typing import Any
from unittest import TestCase
from lf3py.lang.sequence import first, last
from lf3py.test.helper import data_provider
class TestSequence(TestCase):
@data_provider([
([1, 2, 3], 1),
(map(lambda a: a, [1, 2, 3]), 1),
(filter(lambda a: True, [1, 2, 3]), 1),
({'a': 1, 'b': 2, 'c': 3}.values(), 1),
({'a': 1, 'b': 2, 'c': 3}.keys(), 'a'),
])
def test_first(self, iter: Any, expected: Any):
self.assertEqual(expected, first(iter))
@data_provider([
([1, 2, 3], 3),
(map(lambda a: a, [1, 2, 3]), 3),
(filter(lambda a: True, [1, 2, 3]), 3),
({'a': 1, 'b': 2, 'c': 3}.values(), 3),
({'a': 1, 'b': 2, 'c': 3}.keys(), 'c'),
])
def test_last(self, iter: Any, expected: Any):
self.assertEqual(expected, last(iter))
| [
"lf3py.lang.sequence.last",
"lf3py.lang.sequence.first"
] | [((499, 510), 'lf3py.lang.sequence.first', 'first', (['iter'], {}), '(iter)\n', (504, 510), False, 'from lf3py.lang.sequence import first, last\n'), ((837, 847), 'lf3py.lang.sequence.last', 'last', (['iter'], {}), '(iter)\n', (841, 847), False, 'from lf3py.lang.sequence import first, last\n')] |
from torch.utils import data
import torch
from config import PROJECT_PATH
import numpy as np
class NpzDataset(data.Dataset):
def __init__(self, dataset):
file_path = "{}/attacked_images/{}/{}_images.npz".format(PROJECT_PATH, dataset, dataset)
file_data = np.load(file_path)
self.dataset = dataset
self.images = file_data["images"]
self.labels = file_data["labels"]
def __len__(self):
return len(self.labels)
def __getitem__(self, index):
image = self.images[index]
label = self.labels[index]
return torch.from_numpy(image),label
class NpzExtraDataset(NpzDataset):
def __init__(self, dataset):
super(NpzExtraDataset, self).__init__(dataset)
file_path = "{}/attacked_images/{}/{}_images_for_candidate.npz".format(PROJECT_PATH, dataset, dataset)
file_data = np.load(file_path)
self.dataset = dataset
self.images = file_data["images"]
self.labels = file_data["labels"]
| [
"numpy.load",
"torch.from_numpy"
] | [((277, 295), 'numpy.load', 'np.load', (['file_path'], {}), '(file_path)\n', (284, 295), True, 'import numpy as np\n'), ((873, 891), 'numpy.load', 'np.load', (['file_path'], {}), '(file_path)\n', (880, 891), True, 'import numpy as np\n'), ((587, 610), 'torch.from_numpy', 'torch.from_numpy', (['image'], {}), '(image)\n', (603, 610), False, 'import torch\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" 工程文件生成
so_ 模块方案
pj_ 模块工程
tp_ 测试工程
"""
import basebuilder
import os
import hashlib as md5
import web.template
render = web.template.render(os.path.join( os.path.split(os.path.realpath(__file__))[0], "template") ,
globals={'type':type,"hasattr":hasattr})
class Msvc2008SolutionBuilder(object):
def __init__(self):
self.props = {}
def GenerateUUIDByName(self, name, ns=None):
""" Generate UUID """
if ns != None:
name = ns + "." + name
if type(name) == unicode:
name = name.encode('utf-8')
s = md5.md5(md5.md5(name).hexdigest()).hexdigest()
return "-".join( (s[:8],s[8:12], s[12:16], s[16:20],s[20:]) ).upper()
def BuildProjectFolder(self):
""" 生成工程文件路径和项目文件路径 root/code/{so_folder} """
if not os.path.exists(self.props["pj_path"]):
os.makedirs(self.props["pj_path"])
if not os.path.exists(self.props["tp_path"]):
os.makedirs(self.props["tp_path"])
return
def BuildSolutionFile(self):
#print self.props.keys
ctx = render.msvc2008_sln_tmpl(self.props)
name = os.path.join(self.props["so_path"], "%s.sln" % self.props["so_name"])
f=open(name, "w")
f.write( str(ctx) )
f.close()
return name
def BuildSolution(self, pps):
self.props = pps
self.BuildProjectFolder()
return self.BuildSolutionFile()
| [
"os.path.exists",
"hashlib.md5",
"os.makedirs",
"os.path.join",
"os.path.realpath"
] | [((1181, 1250), 'os.path.join', 'os.path.join', (["self.props['so_path']", "('%s.sln' % self.props['so_name'])"], {}), "(self.props['so_path'], '%s.sln' % self.props['so_name'])\n", (1193, 1250), False, 'import os\n'), ((848, 885), 'os.path.exists', 'os.path.exists', (["self.props['pj_path']"], {}), "(self.props['pj_path'])\n", (862, 885), False, 'import os\n'), ((899, 933), 'os.makedirs', 'os.makedirs', (["self.props['pj_path']"], {}), "(self.props['pj_path'])\n", (910, 933), False, 'import os\n'), ((949, 986), 'os.path.exists', 'os.path.exists', (["self.props['tp_path']"], {}), "(self.props['tp_path'])\n", (963, 986), False, 'import os\n'), ((1000, 1034), 'os.makedirs', 'os.makedirs', (["self.props['tp_path']"], {}), "(self.props['tp_path'])\n", (1011, 1034), False, 'import os\n'), ((220, 246), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (236, 246), False, 'import os\n'), ((627, 640), 'hashlib.md5', 'md5.md5', (['name'], {}), '(name)\n', (634, 640), True, 'import hashlib as md5\n')] |
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Metrics support via influxdb.
https://docs.influxdata.com/influxdb/v1.4/write_protocols/line_protocol_tutorial/
To install influx-db
curl -sL https://repos.influxdata.com/influxdb.key \
| sudo apt-key add - source /etc/lsb-release
source /etc/lsb-release
echo "deb https://repos.influxdata.com/${DISTRIB_ID,,} ${DISTRIB_CODENAME} stable" \
| sudo tee /etc/apt/sources.list.d/influxdb.list
sudo apt-get update && sudo apt-get install influxdb -y
sudo service influxdb start
To create a database:
curl -i -XPOST http://localhost:8086/query --data-urlencode \
"q=CREATE DATABASE SpinnakerBuildTool"
"""
import datetime
import logging
try:
from urllib2 import urlopen, Request
except ImportError:
from urllib.request import urlopen, Request
from buildtool import add_parser_argument
from buildtool.inmemory_metrics import InMemoryMetricsRegistry
EPOCH = datetime.datetime(1970, 1, 1)
SECONDS_PER_DAY = 24 * 60 * 60
NANOS_PER_SECOND = 1000000000
def to_timestamp(utc):
"""Convert UTC datetime into epoch timestamp in nanoseconds for influxdb."""
time_delta = utc - EPOCH
epoch_secs = time_delta.seconds + time_delta.days * SECONDS_PER_DAY
epoch_nanos = epoch_secs * NANOS_PER_SECOND + time_delta.microseconds * 1000
return epoch_nanos
class InfluxDbMetricsRegistry(InMemoryMetricsRegistry):
@staticmethod
def init_argument_parser(parser, defaults):
InMemoryMetricsRegistry.init_argument_parser(parser, defaults)
add_parser_argument(parser, 'influxdb_url', defaults,
'http://localhost:8086',
help='Server address to push metrics to.')
add_parser_argument(parser, 'influxdb_database', defaults,
'SpinnakerBuildTool',
help='Influxdb to push metrics to.')
add_parser_argument(
parser, 'influxdb_reiterate_gauge_secs', defaults, 60,
help='Reiterate gauge values for the specified period of seconds.'
' This is because when they get chunked into time blocks, the'
'values become lost, in particular settling back to 0.')
def __init__(self, *pos_args, **kwargs):
super(InfluxDbMetricsRegistry, self).__init__(*pos_args, **kwargs)
self.__export_func_map = {
'COUNTER': self.__export_counter_points,
'GAUGE': self.__export_gauge_points,
'TIMER': self.__export_timer_points,
}
self.__recent_gauges = set([])
def _do_flush_final_metrics(self):
"""Implements interface."""
self.flush_updated_metrics()
def _do_flush_updated_metrics(self, updated_metrics):
"""Implements interface.
We'll turn the metrics into events for when they changed
because influxDb doesnt really handle counters, rather it
just aggregates events. So we'll treat counter changes as events
with delta values from the prior counter.
"""
super(InfluxDbMetricsRegistry, self)._do_flush_updated_metrics(
updated_metrics)
payload = []
recent_gauges = self.__recent_gauges
self.__recent_gauges = set([])
for metric in updated_metrics:
name = metric.name
label_text = self.__to_label_text(metric)
ingest = self.__export_func_map[metric.family.family_type]
ingest(name, label_text, metric, payload)
remaining_gauges = recent_gauges - self.__recent_gauges
self.__reiterate_recent_gauges(remaining_gauges, payload)
if not payload:
logging.debug('No metrics updated.')
return
url = '{prefix}/write?db={db}'.format(
prefix=self.options.influxdb_url, db=self.options.influxdb_database)
payload_text = '\n'.join(payload)
request = Request(url, data=str.encode(payload_text))
request.get_method = lambda: 'POST'
try:
urlopen(request)
logging.debug('Updated %d metrics to %s', len(payload), url)
except IOError as ioex:
logging.error('Cannot write metrics to %s:\n%s', url, ioex)
def __to_label_text(self, metric):
return ','.join(['%s=%s' % (key, value)
for key, value in metric.labels.items()
if value != ''])
def __reiterate_recent_gauges(self, gauges, payload):
now = datetime.datetime.utcnow()
keep_if_newer_than = (
now - datetime.timedelta(0, self.options.influxdb_reiterate_gauge_secs))
for gauge in gauges:
current = gauge.timeseries[-1]
if gauge.value != 0 or current.utc > keep_if_newer_than:
# Gauge is still lingering in our reporting
self.__recent_gauges.add(gauge)
payload.append(
self.__to_payload_line('gauge', gauge.name,
self.__to_label_text(gauge),
current.value, now))
def __to_payload_line(self, type_name, name, labels, value, utc):
if labels.endswith(','):
# This can happen if we have a context but no labels for this occurance
labels = labels[:-1]
if labels:
series = '{name}__{type},{labels}'.format(
name=name, type=type_name, labels=labels)
else:
series = '{name}__{type}'.format(name=name, type=type_name)
return '{series} value={value} {time}'.format(
series=series, value=value, time=to_timestamp(utc))
def __export_counter_points(self, name, label_text, metric, payload):
prev_value = 0
for entry in metric.mark_as_delta():
delta_value = entry.value - prev_value
prev_value = entry.value
payload.append(
self.__to_payload_line('counter', name, label_text,
delta_value, entry.utc))
def __export_gauge_points(self, name, label_text, metric, payload):
self.__recent_gauges.add(metric)
for entry in metric.mark_as_delta():
payload.append(
self.__to_payload_line('gauge', name, label_text,
entry.value, entry.utc))
def __export_timer_points(self, name, label_text, metric, payload):
prev_count = 0
prev_total = 0
for entry in metric.mark_as_delta():
count = entry.value[0]
total_secs = entry.value[1]
delta_count = count - prev_count
delta_total = total_secs - prev_total
prev_count = count
prev_total = total_secs
payload.append(
self.__to_payload_line('count', name, label_text,
delta_count, entry.utc))
payload.append(
self.__to_payload_line('totalSecs', name, label_text,
delta_total, entry.utc))
avg_secs = delta_total / delta_count
payload.append(
self.__to_payload_line('AvgSecs', name, label_text,
avg_secs, entry.utc))
| [
"datetime.datetime",
"buildtool.inmemory_metrics.InMemoryMetricsRegistry.init_argument_parser",
"logging.debug",
"datetime.datetime.utcnow",
"buildtool.add_parser_argument",
"datetime.timedelta",
"logging.error",
"urllib.request.urlopen"
] | [((1493, 1522), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(1)', '(1)'], {}), '(1970, 1, 1)\n', (1510, 1522), False, 'import datetime\n'), ((2008, 2070), 'buildtool.inmemory_metrics.InMemoryMetricsRegistry.init_argument_parser', 'InMemoryMetricsRegistry.init_argument_parser', (['parser', 'defaults'], {}), '(parser, defaults)\n', (2052, 2070), False, 'from buildtool.inmemory_metrics import InMemoryMetricsRegistry\n'), ((2075, 2200), 'buildtool.add_parser_argument', 'add_parser_argument', (['parser', '"""influxdb_url"""', 'defaults', '"""http://localhost:8086"""'], {'help': '"""Server address to push metrics to."""'}), "(parser, 'influxdb_url', defaults,\n 'http://localhost:8086', help='Server address to push metrics to.')\n", (2094, 2200), False, 'from buildtool import add_parser_argument\n'), ((2249, 2370), 'buildtool.add_parser_argument', 'add_parser_argument', (['parser', '"""influxdb_database"""', 'defaults', '"""SpinnakerBuildTool"""'], {'help': '"""Influxdb to push metrics to."""'}), "(parser, 'influxdb_database', defaults,\n 'SpinnakerBuildTool', help='Influxdb to push metrics to.')\n", (2268, 2370), False, 'from buildtool import add_parser_argument\n'), ((2419, 2688), 'buildtool.add_parser_argument', 'add_parser_argument', (['parser', '"""influxdb_reiterate_gauge_secs"""', 'defaults', '(60)'], {'help': '"""Reiterate gauge values for the specified period of seconds. This is because when they get chunked into time blocks, thevalues become lost, in particular settling back to 0."""'}), "(parser, 'influxdb_reiterate_gauge_secs', defaults, 60,\n help=\n 'Reiterate gauge values for the specified period of seconds. This is because when they get chunked into time blocks, thevalues become lost, in particular settling back to 0.'\n )\n", (2438, 2688), False, 'from buildtool import add_parser_argument\n'), ((4792, 4818), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4816, 4818), False, 'import datetime\n'), ((4044, 4080), 'logging.debug', 'logging.debug', (['"""No metrics updated."""'], {}), "('No metrics updated.')\n", (4057, 4080), False, 'import logging\n'), ((4366, 4382), 'urllib.request.urlopen', 'urlopen', (['request'], {}), '(request)\n', (4373, 4382), False, 'from urllib.request import urlopen, Request\n'), ((4860, 4925), 'datetime.timedelta', 'datetime.timedelta', (['(0)', 'self.options.influxdb_reiterate_gauge_secs'], {}), '(0, self.options.influxdb_reiterate_gauge_secs)\n', (4878, 4925), False, 'import datetime\n'), ((4484, 4546), 'logging.error', 'logging.error', (['"""Cannot write metrics to %s:\n%s"""', 'url', 'ioex'], {}), '("""Cannot write metrics to %s:\n%s""", url, ioex)\n', (4497, 4546), False, 'import logging\n')] |
import numpy as np
import cv2
import os
import re
import time
def head_movement(video_path):
face_cascade = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
cap = cv2.VideoCapture(f"../datasets/videos/{video_path}")
cap.set(cv2.CAP_PROP_FPS, 10)
fps = cap.get(cv2.CAP_PROP_FPS)
print('fps: ', fps)
total_x, total_y = 0.0, 0.0
prev_x, prev_y = 0.0, 0.0
count_x, count_y = 0, 0
filename = re.findall(r'\d+', video_path)[0]
path = f"./results/{filename}.txt"
# Define and open a file to save emotion statistics
if not os.path.exists(path):
open(path, 'w').close()
f = open(path, "a+")
start_time = time.time()
while( int(time.time() - start_time) < 10 ):
__, img = cap.read()
faces = face_cascade.detectMultiScale(img, 1.1, 20)
for (x,y,w,h) in faces:
cv2.rectangle(img, (x,y), (x+w,y+h), (0,255,0) ,2)
diff_x = abs(w/2 - prev_x)
diff_y = abs(h/2 - prev_x)
total_x += diff_x
total_y += diff_y
count_x += 1
count_y += 1
print(diff_x, diff_y)
roi_gray = img[y:y+h, x:x+w]
roi_color = img[y:y+h, x:x+w]
prev_x = w/2
prev_y = h/2
cv2.imshow('Head Movement', img)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
print("Total X", total_x)
print("Total Y", total_y)
print("Count X", count_x)
print("Count Y", count_y)
f.write("%s\n" % (total_x))
f.write("%s\n" % (total_y))
f.write("%s\n" % (count_x))
f.write("%s\n" % (count_y))
cap.release()
cv2.destroyAllWindows()
f.close()
| [
"cv2.rectangle",
"os.path.exists",
"cv2.imshow",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.CascadeClassifier",
"re.findall",
"time.time"
] | [((122, 182), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""haarcascade_frontalface_default.xml"""'], {}), "('haarcascade_frontalface_default.xml')\n", (143, 182), False, 'import cv2\n'), ((196, 248), 'cv2.VideoCapture', 'cv2.VideoCapture', (['f"""../datasets/videos/{video_path}"""'], {}), "(f'../datasets/videos/{video_path}')\n", (212, 248), False, 'import cv2\n'), ((717, 728), 'time.time', 'time.time', ([], {}), '()\n', (726, 728), False, 'import time\n'), ((1728, 1751), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1749, 1751), False, 'import cv2\n'), ((463, 493), 're.findall', 're.findall', (['"""\\\\d+"""', 'video_path'], {}), "('\\\\d+', video_path)\n", (473, 493), False, 'import re\n'), ((609, 629), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (623, 629), False, 'import os\n'), ((1346, 1378), 'cv2.imshow', 'cv2.imshow', (['"""Head Movement"""', 'img'], {}), "('Head Movement', img)\n", (1356, 1378), False, 'import cv2\n'), ((920, 978), 'cv2.rectangle', 'cv2.rectangle', (['img', '(x, y)', '(x + w, y + h)', '(0, 255, 0)', '(2)'], {}), '(img, (x, y), (x + w, y + h), (0, 255, 0), 2)\n', (933, 978), False, 'import cv2\n'), ((745, 756), 'time.time', 'time.time', ([], {}), '()\n', (754, 756), False, 'import time\n'), ((1391, 1405), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1402, 1405), False, 'import cv2\n')] |
from django import template
register = template.Library()
@register.simple_tag
def define(val=None):
return val
def get_at_index(list, index):
return list[index]
@register.filter
def getItem(dict, key):
return dict.get(key)
| [
"django.template.Library"
] | [((39, 57), 'django.template.Library', 'template.Library', ([], {}), '()\n', (55, 57), False, 'from django import template\n')] |
import sys
import pygame
from pygame.locals import *
import itertools
pygame.init()
pygame.font.init()
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
GREY = (192, 192, 192)
GREEN = (46, 139, 87)
RED = (220, 20, 60)
BLUE = (25, 25, 112)
BROWN = (244, 164, 96)
PURPLE = (178, 102, 255)
ORANGE = (255, 128, 0)
HEIGHT = 600
WIDTH = 800
FONT = pygame.font.SysFont(None, 60)
display = pygame.display.set_mode((WIDTH, HEIGHT))
display.fill(BLACK)
pygame.display.set_caption('Atari Breakout')
class Blocks:
"""Implements blocks as a collection instead of
as individual block objects """
def __init__(self):
self.width = 100
self.height = 20
self.blocks = self.make_blocks()
def make_blocks(self):
rects = []
rows = 5
rows_height = HEIGHT//rows
for i in range(0, rows_height, self.height):
for j in range(0, WIDTH, self.width):
rects.append(pygame.Rect(j, i, self.width, self.height))
print('rects', rects)
print('len', len(rects))
return rects
def draw_blocks(self):
colors = itertools.cycle([RED, GREEN, BLUE, PURPLE, ORANGE])
for i in self.blocks:
color = next(colors)
pygame.draw.rect(display, color, i)
return
# removes single block from blocks list when it is hit by ball
# ball being the ball object
def collided(self, ball_object):
for i in self.blocks:
if ball_object.collided(i):
self.blocks.remove(i)
return
class Paddle:
def __init__(self):
self.width = WIDTH // 4
self.height = 10
self.initial_x = (WIDTH//2) - (self.width//2)
self.initial_y = HEIGHT - 50
self.rect = pygame.Rect(self.initial_x, self.initial_y,
self.width, self.height)
print(self.rect)
def move(self, speed):
if self.rect.right + speed > WIDTH or self.rect.left + speed < 0:
return
else:
self.rect = self.rect.move(speed, 0)
def draw(self):
pygame.draw.rect(display, GREY, self.rect)
return
class Ball:
"""Ball object that takes initial speed in x direction (speedx)
and initial speed in y direction(speedy)"""
def __init__(self, speedx, speedy):
self.x = WIDTH//2
self.y = HEIGHT - 60
self.radius = 10
self.speedx = speedx
self.speedy = speedy
def draw(self):
pygame.draw.circle(display, BROWN, (self.x, self.y), self.radius)
def move(self):
if self.x + self.radius + self.speedx >= WIDTH:
self.speedx = -self.speedx
elif self.x + self.speedx <= 0:
self.speedx = abs(self.speedx)
if self.y + self.radius + self.speedy >= HEIGHT:
self.speedy = -self.speedy
elif self.y + self.radius + self.speedy <= 0:
self.speedy = abs(self.speedy)
self.x += self.speedx
self.y += self.speedy
return
# checks if ball has collided with the given pygame rect object
# which may be rect of block or paddle
def collided(self, rect):
if rect.left <= self.x + self.radius and\
self.x - self.radius <= rect.right:
if rect.top < self.y + self.radius < rect.bottom:
self.speedy = -self.speedy
return True
else:
return False
def show_text(text):
text = str(text).encode("UTF-8")
display.fill(BLACK)
my_text = FONT.render(text, True, WHITE)
width, height = FONT.size(text)
display.blit(my_text, (WIDTH//2 - width//2, HEIGHT//2 - height//2))
return
if __name__ == '__main__':
running = True
paddle = Paddle()
blocks = Blocks()
ball = Ball(0, 0)
direction = 0
paused = False
clock = pygame.time.Clock()
while running:
if len(blocks.blocks) == 0:
show_text("GAME OVER")
pygame.display.flip()
pygame.time.wait(2000)
pygame.display.quit()
pygame.quit()
sys.exit(0)
for e in pygame.event.get():
if e.type == QUIT:
pygame.display.quit()
pygame.quit()
sys.exit(0)
elif e.type == KEYDOWN:
if e.key == K_RIGHT:
direction = 5
elif e.key == K_LEFT:
direction = -5
elif e.key == K_p:
paused = not paused
elif e.key == K_SPACE:
if ball.speedx == 0 and ball.speedy == 0:
ball.speedx = 5
ball.speedy = 5
continue
if not paused:
paddle.move(direction)
ball.move()
ball.collided(paddle.rect)
blocks.collided(ball)
display.fill(BLACK)
blocks.draw_blocks()
paddle.draw()
ball.draw()
else:
show_text("PAUSED")
pygame.display.flip()
clock.tick(60)
| [
"pygame.draw.circle",
"itertools.cycle",
"sys.exit",
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.time.wait",
"pygame.display.set_mode",
"pygame.display.flip",
"pygame.time.Clock",
"pygame.Rect",
"pygame.display.quit",
"pygame.draw.rect",
"pygame.font.init",
"pygame.display.set_caption",
"pygame.font.SysFont"
] | [((71, 84), 'pygame.init', 'pygame.init', ([], {}), '()\n', (82, 84), False, 'import pygame\n'), ((85, 103), 'pygame.font.init', 'pygame.font.init', ([], {}), '()\n', (101, 103), False, 'import pygame\n'), ((336, 365), 'pygame.font.SysFont', 'pygame.font.SysFont', (['None', '(60)'], {}), '(None, 60)\n', (355, 365), False, 'import pygame\n'), ((377, 417), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(WIDTH, HEIGHT)'], {}), '((WIDTH, HEIGHT))\n', (400, 417), False, 'import pygame\n'), ((438, 482), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Atari Breakout"""'], {}), "('Atari Breakout')\n", (464, 482), False, 'import pygame\n'), ((3855, 3874), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (3872, 3874), False, 'import pygame\n'), ((1111, 1162), 'itertools.cycle', 'itertools.cycle', (['[RED, GREEN, BLUE, PURPLE, ORANGE]'], {}), '([RED, GREEN, BLUE, PURPLE, ORANGE])\n', (1126, 1162), False, 'import itertools\n'), ((1758, 1826), 'pygame.Rect', 'pygame.Rect', (['self.initial_x', 'self.initial_y', 'self.width', 'self.height'], {}), '(self.initial_x, self.initial_y, self.width, self.height)\n', (1769, 1826), False, 'import pygame\n'), ((2097, 2139), 'pygame.draw.rect', 'pygame.draw.rect', (['display', 'GREY', 'self.rect'], {}), '(display, GREY, self.rect)\n', (2113, 2139), False, 'import pygame\n'), ((2496, 2561), 'pygame.draw.circle', 'pygame.draw.circle', (['display', 'BROWN', '(self.x, self.y)', 'self.radius'], {}), '(display, BROWN, (self.x, self.y), self.radius)\n', (2514, 2561), False, 'import pygame\n'), ((4135, 4153), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (4151, 4153), False, 'import pygame\n'), ((5067, 5088), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (5086, 5088), False, 'import pygame\n'), ((1238, 1273), 'pygame.draw.rect', 'pygame.draw.rect', (['display', 'color', 'i'], {}), '(display, color, i)\n', (1254, 1273), False, 'import pygame\n'), ((3977, 3998), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (3996, 3998), False, 'import pygame\n'), ((4011, 4033), 'pygame.time.wait', 'pygame.time.wait', (['(2000)'], {}), '(2000)\n', (4027, 4033), False, 'import pygame\n'), ((4046, 4067), 'pygame.display.quit', 'pygame.display.quit', ([], {}), '()\n', (4065, 4067), False, 'import pygame\n'), ((4080, 4093), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (4091, 4093), False, 'import pygame\n'), ((4106, 4117), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4114, 4117), False, 'import sys\n'), ((4202, 4223), 'pygame.display.quit', 'pygame.display.quit', ([], {}), '()\n', (4221, 4223), False, 'import pygame\n'), ((4240, 4253), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (4251, 4253), False, 'import pygame\n'), ((4270, 4281), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4278, 4281), False, 'import sys\n'), ((938, 980), 'pygame.Rect', 'pygame.Rect', (['j', 'i', 'self.width', 'self.height'], {}), '(j, i, self.width, self.height)\n', (949, 980), False, 'import pygame\n')] |
# Copyright (C) 2019 by eHealth Africa : http://www.eHealthAfrica.org
#
# See the NOTICE file distributed with this work for additional information
# regarding copyright ownership.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from copy import copy
from datetime import datetime, timedelta
import inspect
import json
import random
from string import ascii_lowercase
from typing import Any, List, Mapping
import uuid
from spavro.io import validate as spavro_validate
from spavro.schema import parse as parse_schema
from aether.python.avro.schema import Node
from aether.python.avro.tools import (
AvroValidationException,
random_avro,
validate
)
from aether.python.utils import replace_nested
'''
Utility Functions
'''
def guid():
return str(uuid.uuid4())
def single_choice(choices):
# return one item from a list
return random.choice(choices)
def subset(choices, max=None):
# return a random subset from some choices
if not max:
max = len(choices)
size = random.randint(1, max)
return random.sample(choices, size)
def str_time_prop(start, end, fraction):
# get an iso string of a point a fraction
# between a start and end datetime
stime = datetime.fromisoformat(start)
etime = datetime.fromisoformat(end)
new_time = stime + fraction * (etime - stime)
return new_time.isoformat()
'''
Callable From the Generator
(only uses kwargs, and might know how to use Nodes)
'''
def random_date(start=None, end=None, min_prop=0, max_prop=1):
prop = min_prop + random.random() * (max_prop - min_prop)
return str_time_prop(start, end, prop)
def random_str(constant=None, max=32):
if constant:
return constant
return ''.join([
c for i in range(max) for c in random.choice(ascii_lowercase)
])
def random_numeric(min=0, max=1, cast=int):
return cast(random.random() * (max - min) + min)
def select1(choices=None, node=None):
if not choices:
choices = [i.get('value') for i in node.__lookup]
return single_choice(choices)
def select_many(choices=None, node=None, max=None, path=None):
if not choices:
choices = [i.get('value') for i in node.__lookup]
return subset(choices, max)
class SampleGenerator(object):
_base_name: str
_limits: Mapping[str, List[Any]]
_exclude: List[str]
extended_handlers: Mapping[str, tuple]
regular_handlers: Mapping[str, tuple]
named_handlers: Mapping[str, tuple]
value_overrides: Mapping[str, str]
def __init__(self, schema: Mapping[Any, Any] = None, node: Node = None, raw_schema: str = None):
if not any([schema, node, raw_schema]):
raise ValueError(
'Must include one of: schema (dict) node (Node) or raw_schema (JSON)')
if node:
schema = node
else:
if schema:
schema = Node(schema)
else:
schema = Node(json.loads(raw_schema))
self._base_name = schema.name
self.load_defaults()
self.schema = schema
self.spavro_schema = parse_schema(self.schema._source)
def load_defaults(self):
self._limits = {
'datetime': [
(datetime.now() - timedelta(days=365)).isoformat(),
datetime.now().isoformat()
],
'int': [0, 1000],
'float': [0.0, 1.0]
}
self._exclude = []
# handlers are defined in form: (fn, **default_kwargs)
self.extended_handlers = {
'string': (random_str, {'max': 10}),
'select': (select_many, {}),
'select1': (select1, {}),
'dateTime': (random_date, {
'start': self._limits['datetime'][0],
'end': self._limits['datetime'][1]
})
}
# If there is not an explicit handler, we will use the avro.tools.random_avro tool to fill.
self.regular_handlers = {
'string': (random_str, {'max': 10}),
'int': (random_numeric, {
'min': self._limits['int'][0],
'max': self._limits['int'][1]
}),
'float': (random_numeric, {
'min': self._limits['float'][0],
'max': self._limits['float'][1],
'cast': float
})
}
self.named_handlers = {
'id': (guid, None),
'_id': (guid, None)
}
self.value_overrides = {}
'''
Internal function / argument handling
'''
def get_kwargs(self, fn, kwargs, path, node):
kwargs = copy(kwargs)
sig = inspect.signature(fn)
allowed = [i for i in sig.parameters.keys()]
if 'path' in allowed:
kwargs['path'] = path
if 'node' in allowed:
kwargs['node'] = node
name = path[len(self._base_name) + 1::]
rules = self.value_overrides.get(name, [])
for arg in rules:
if arg in allowed:
kwargs[arg] = rules.get(arg)
return kwargs
def handle(self, fn, kwargs):
if kwargs:
return fn(**kwargs)
else:
return fn()
'''
Set Handlers
'''
def register_type_handler(self, _type, handler, extended=False):
if extended:
self.extended_handlers[_type] = handler
else:
self.regular_handlers[_type] = handler
def register_field_handler(self, path, handler):
self.named_handlers[path] = handler
def set_overrides(self, path, rules):
for k, v in rules.items():
self.set_override(path, k, v)
def set_override(self, path, _type, value):
rules = self.value_overrides.get(path, {})
rules[_type] = value
self.value_overrides[path] = rules
def set_exclusion(self, exclusion):
self._exclude.append(exclusion)
'''
Render the sample
'''
def from_schema(self, path):
if path in self.named_handlers:
return self.handle(*self.named_handlers[path])
path = f'{self.schema.name}.{path}'
node = self.schema.get_node(path)
try:
_type = getattr(node, '__extended_type')
if _type not in self.extended_handlers:
raise AttributeError(f'type: {_type} not handled')
fn, kwargs = self.extended_handlers[_type]
except AttributeError:
_type = node.avro_type
if isinstance(_type, list) and len(_type) > 1:
if 'null' in _type:
_type.remove('null')
_type = random.choice(_type)
elif isinstance(_type, list):
_type = _type[0]
try:
fn, kwargs = self.regular_handlers[_type]
except KeyError:
return random_avro(json.loads(node._source))
extended_kwargs = self.get_kwargs(fn, kwargs, path, node)
return self.handle(fn, extended_kwargs)
def make_sample(self):
out = {}
for node in self.schema.iter_children():
path = node[(len(self.schema.name) + 1)::]
if path in self._exclude:
continue
val = self.from_schema(path)
replace_nested(out, path.split('.'), val)
ok = spavro_validate(self.spavro_schema, out)
if not ok:
result = validate(self.spavro_schema, out)
if result.errors:
raise AvroValidationException(result.errors)
else:
raise AvroValidationException(
'Avro Validation failed, but detailed reporting found no errors.')
return out
| [
"aether.python.avro.schema.Node",
"random.sample",
"random.choice",
"json.loads",
"inspect.signature",
"aether.python.avro.tools.validate",
"uuid.uuid4",
"datetime.timedelta",
"datetime.datetime.now",
"spavro.schema.parse",
"random.random",
"aether.python.avro.tools.AvroValidationException",
"datetime.datetime.fromisoformat",
"spavro.io.validate",
"copy.copy",
"random.randint"
] | [((1358, 1380), 'random.choice', 'random.choice', (['choices'], {}), '(choices)\n', (1371, 1380), False, 'import random\n'), ((1515, 1537), 'random.randint', 'random.randint', (['(1)', 'max'], {}), '(1, max)\n', (1529, 1537), False, 'import random\n'), ((1549, 1577), 'random.sample', 'random.sample', (['choices', 'size'], {}), '(choices, size)\n', (1562, 1577), False, 'import random\n'), ((1718, 1747), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['start'], {}), '(start)\n', (1740, 1747), False, 'from datetime import datetime, timedelta\n'), ((1760, 1787), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['end'], {}), '(end)\n', (1782, 1787), False, 'from datetime import datetime, timedelta\n'), ((1269, 1281), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1279, 1281), False, 'import uuid\n'), ((3605, 3638), 'spavro.schema.parse', 'parse_schema', (['self.schema._source'], {}), '(self.schema._source)\n', (3617, 3638), True, 'from spavro.schema import parse as parse_schema\n'), ((5135, 5147), 'copy.copy', 'copy', (['kwargs'], {}), '(kwargs)\n', (5139, 5147), False, 'from copy import copy\n'), ((5162, 5183), 'inspect.signature', 'inspect.signature', (['fn'], {}), '(fn)\n', (5179, 5183), False, 'import inspect\n'), ((7852, 7892), 'spavro.io.validate', 'spavro_validate', (['self.spavro_schema', 'out'], {}), '(self.spavro_schema, out)\n', (7867, 7892), True, 'from spavro.io import validate as spavro_validate\n'), ((2055, 2070), 'random.random', 'random.random', ([], {}), '()\n', (2068, 2070), False, 'import random\n'), ((7933, 7966), 'aether.python.avro.tools.validate', 'validate', (['self.spavro_schema', 'out'], {}), '(self.spavro_schema, out)\n', (7941, 7966), False, 'from aether.python.avro.tools import AvroValidationException, random_avro, validate\n'), ((2280, 2310), 'random.choice', 'random.choice', (['ascii_lowercase'], {}), '(ascii_lowercase)\n', (2293, 2310), False, 'import random\n'), ((2380, 2395), 'random.random', 'random.random', ([], {}), '()\n', (2393, 2395), False, 'import random\n'), ((3395, 3407), 'aether.python.avro.schema.Node', 'Node', (['schema'], {}), '(schema)\n', (3399, 3407), False, 'from aether.python.avro.schema import Node\n'), ((8019, 8057), 'aether.python.avro.tools.AvroValidationException', 'AvroValidationException', (['result.errors'], {}), '(result.errors)\n', (8042, 8057), False, 'from aether.python.avro.tools import AvroValidationException, random_avro, validate\n'), ((8098, 8193), 'aether.python.avro.tools.AvroValidationException', 'AvroValidationException', (['"""Avro Validation failed, but detailed reporting found no errors."""'], {}), "(\n 'Avro Validation failed, but detailed reporting found no errors.')\n", (8121, 8193), False, 'from aether.python.avro.tools import AvroValidationException, random_avro, validate\n'), ((3456, 3478), 'json.loads', 'json.loads', (['raw_schema'], {}), '(raw_schema)\n', (3466, 3478), False, 'import json\n'), ((7157, 7177), 'random.choice', 'random.choice', (['_type'], {}), '(_type)\n', (7170, 7177), False, 'import random\n'), ((3804, 3818), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3816, 3818), False, 'from datetime import datetime, timedelta\n'), ((3737, 3751), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3749, 3751), False, 'from datetime import datetime, timedelta\n'), ((3754, 3773), 'datetime.timedelta', 'timedelta', ([], {'days': '(365)'}), '(days=365)\n', (3763, 3773), False, 'from datetime import datetime, timedelta\n'), ((7392, 7416), 'json.loads', 'json.loads', (['node._source'], {}), '(node._source)\n', (7402, 7416), False, 'import json\n')] |
import unittest
from gsf.core.types import Time
from gsf.experiments.experiment_builders import DiscreteEventExperiment
from test.prototypes.prototype_4.assembly_line import AssemblyLine
class Prototype4Test(unittest.TestCase):
def test_validation(self):
assembly_line = AssemblyLine([1, 2])
experiment = DiscreteEventExperiment(assembly_line)
experiment.simulation_control.start(stop_time=Time(7))
experiment.simulation_control.wait()
print(experiment.simulation_report.generate_report())
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"test.prototypes.prototype_4.assembly_line.AssemblyLine",
"gsf.core.types.Time",
"gsf.experiments.experiment_builders.DiscreteEventExperiment"
] | [((571, 586), 'unittest.main', 'unittest.main', ([], {}), '()\n', (584, 586), False, 'import unittest\n'), ((286, 306), 'test.prototypes.prototype_4.assembly_line.AssemblyLine', 'AssemblyLine', (['[1, 2]'], {}), '([1, 2])\n', (298, 306), False, 'from test.prototypes.prototype_4.assembly_line import AssemblyLine\n'), ((328, 366), 'gsf.experiments.experiment_builders.DiscreteEventExperiment', 'DiscreteEventExperiment', (['assembly_line'], {}), '(assembly_line)\n', (351, 366), False, 'from gsf.experiments.experiment_builders import DiscreteEventExperiment\n'), ((422, 429), 'gsf.core.types.Time', 'Time', (['(7)'], {}), '(7)\n', (426, 429), False, 'from gsf.core.types import Time\n')] |
import cffi
import importlib
def compile(name, header, source):
mod_name = "gen._" + name
ffi = cffi.FFI()
ffi.cdef(open(header).read())
ffi.set_source(
mod_name,
open(source).read(),
extra_compile_args=['-O3', '-march=native', '-ffast-math', '-std=c99'])
ffi.compile()
mod = importlib.import_module(mod_name)
lib = mod.lib
return ffi, lib
| [
"cffi.FFI",
"importlib.import_module"
] | [((106, 116), 'cffi.FFI', 'cffi.FFI', ([], {}), '()\n', (114, 116), False, 'import cffi\n'), ((327, 360), 'importlib.import_module', 'importlib.import_module', (['mod_name'], {}), '(mod_name)\n', (350, 360), False, 'import importlib\n')] |
import re
import attr
@attr.s(eq=True)
class BasicType(object):
name = attr.ib() # type: str
module = attr.ib() # type: str
is_array = attr.ib(default=False) # type: bool
is_iterator = attr.ib(default=False) # type: bool
REPLACEMENTS = {
'boolean': 'bool',
'java.lang.String': 'unicode',
'java.lang.Object': 'object',
'java.math.BigInteger': 'long',
'long': 'long',
'B': 'int', # byte
'Z': 'bool',
'C': 'int', # char
'S': 'int', # short
'I': 'int',
'J': 'long',
'F': 'float',
'D': 'float', # double
'void': 'None',
'short': 'int',
'byte': 'int',
'double': 'float',
# Below this line are replacements from parsing Java code
'char': 'int', # char
'java.lang.Boolean': 'bool',
'java.lang.Integer': 'int',
'java.lang.Long': 'long',
'java.lang.Byte': 'int',
'java.lang.Double': 'float',
'java.lang.Short': 'int',
'java.lang.Float': 'float',
}
@property
def qualified_name(self):
if self.is_builtin:
return self.name
return '{self.module}.{self.name}'.format(self=self)
@property
def proper_name(self):
name = self.REPLACEMENTS.get(self.qualified_name, self.qualified_name)
if self.is_array:
return 'List[{}]'.format(name)
elif self.is_iterator:
return 'Iterator[{}]'.format(name)
return name
@property
def requires(self):
requires = set()
if self.is_array:
requires.add(('typing', 'List'))
if self.is_iterator:
requires.add(('typing', 'Iterator'))
if '.' in self.proper_name and not self.is_builtin:
requires.add(self.module)
return requires
@property
def is_builtin(self):
return self.module == str.__module__
def is_overload_match(self, other):
if not isinstance(other, BasicType):
return False
if self == other:
return True
if self.proper_name == other.proper_name:
return True
if self.is_iterator and other.qualified_name == 'java.util.Iterator':
return True
return False
@staticmethod
def from_type(t):
# type: (type) -> BasicType
is_array = t.__module__.startswith('[') or t.__name__.startswith('[')
name = t.__name__.lstrip('[').rstrip(';').replace('$', '.')
module = t.__module__.lstrip('[L')
if module == 'java.util' and name == 'List':
is_array = True
name = 'object'
module = str.__module__
return BasicType(name=name, module=module, is_array=is_array)
@staticmethod
def from_java(definition):
# type: (str) -> BasicType
match = re.match(r'((?P<template>[\w.]+)<)?(?P<type>[\w.]+)(?P<array>\[\])?>?', definition)
if match is None:
raise ValueError('Invalid type definition: {}'.format(definition))
type_name = match.group('type')
is_array = False
is_iterator = False
if match.group('array'):
is_array = True
template = match.group('template')
if template:
if template == 'java.util.List':
is_array = True
elif template == 'java.util.Iterator':
is_iterator = True
elif template == 'java.util.ArrayList':
is_array = True
else:
type_name = template
module, _sep, name = type_name.rpartition('.')
if not module:
module = str.__module__
name = type_name
if name == 'T':
name = 'object'
basic_type = BasicType(name=str(name), module=str(module), is_array=is_array, is_iterator=is_iterator)
if basic_type.proper_name == '.void':
print(basic_type)
return basic_type
| [
"attr.s",
"re.match",
"attr.ib"
] | [((26, 41), 'attr.s', 'attr.s', ([], {'eq': '(True)'}), '(eq=True)\n', (32, 41), False, 'import attr\n'), ((78, 87), 'attr.ib', 'attr.ib', ([], {}), '()\n', (85, 87), False, 'import attr\n'), ((114, 123), 'attr.ib', 'attr.ib', ([], {}), '()\n', (121, 123), False, 'import attr\n'), ((152, 174), 'attr.ib', 'attr.ib', ([], {'default': '(False)'}), '(default=False)\n', (159, 174), False, 'import attr\n'), ((207, 229), 'attr.ib', 'attr.ib', ([], {'default': '(False)'}), '(default=False)\n', (214, 229), False, 'import attr\n'), ((2902, 2992), 're.match', 're.match', (['"""((?P<template>[\\\\w.]+)<)?(?P<type>[\\\\w.]+)(?P<array>\\\\[\\\\])?>?"""', 'definition'], {}), "('((?P<template>[\\\\w.]+)<)?(?P<type>[\\\\w.]+)(?P<array>\\\\[\\\\])?>?',\n definition)\n", (2910, 2992), False, 'import re\n')] |
import json
from typing import List
import pytest
from asgiref.sync import sync_to_async
import django
from django.test.client import RequestFactory
import strawberry
from strawberry.dataloader import DataLoader
from strawberry.django.views import AsyncGraphQLView
from .app.models import Example
pytestmark = [
pytest.mark.asyncio,
pytest.mark.skipif(
django.VERSION < (3, 1),
reason="Async views are only supported in Django >= 3.1",
),
]
def _prepare_db():
ids = []
for index in range(5):
ids.append(Example.objects.create(name=f"This is a demo async {index}").id)
return ids
@pytest.mark.django_db
async def test_fetch_data_from_db(mocker):
def _sync_batch_load(keys):
data = Example.objects.filter(id__in=keys)
return list(data)
prepare_db = sync_to_async(_prepare_db)
batch_load = sync_to_async(_sync_batch_load)
ids = await prepare_db()
async def idx(keys) -> List[Example]:
return await batch_load(keys)
mock_loader = mocker.Mock(side_effect=idx)
loader = DataLoader(load_fn=mock_loader)
@strawberry.type
class Query:
hello: str = "strawberry"
@strawberry.field
async def get_example(self, id: strawberry.ID) -> str:
example = await loader.load(id)
return example.name
schema = strawberry.Schema(query=Query)
query = f"""{{
a: getExample(id: "{ids[0]}")
b: getExample(id: "{ids[1]}")
c: getExample(id: "{ids[2]}")
d: getExample(id: "{ids[3]}")
e: getExample(id: "{ids[4]}")
}}"""
factory = RequestFactory()
request = factory.post(
"/graphql/", {"query": query}, content_type="application/json"
)
response = await AsyncGraphQLView.as_view(schema=schema)(request)
data = json.loads(response.content.decode())
assert not data.get("errors")
assert data["data"] == {
"a": "This is a demo async 0",
"b": "This is a demo async 1",
"c": "This is a demo async 2",
"d": "This is a demo async 3",
"e": "This is a demo async 4",
}
reset_db = sync_to_async(lambda: Example.objects.all().delete())
await reset_db()
mock_loader.assert_called_once_with([str(id_) for id_ in ids])
| [
"asgiref.sync.sync_to_async",
"strawberry.django.views.AsyncGraphQLView.as_view",
"strawberry.dataloader.DataLoader",
"pytest.mark.skipif",
"strawberry.Schema",
"django.test.client.RequestFactory"
] | [((348, 454), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(django.VERSION < (3, 1))'], {'reason': '"""Async views are only supported in Django >= 3.1"""'}), "(django.VERSION < (3, 1), reason=\n 'Async views are only supported in Django >= 3.1')\n", (366, 454), False, 'import pytest\n'), ((834, 860), 'asgiref.sync.sync_to_async', 'sync_to_async', (['_prepare_db'], {}), '(_prepare_db)\n', (847, 860), False, 'from asgiref.sync import sync_to_async\n'), ((878, 909), 'asgiref.sync.sync_to_async', 'sync_to_async', (['_sync_batch_load'], {}), '(_sync_batch_load)\n', (891, 909), False, 'from asgiref.sync import sync_to_async\n'), ((1083, 1114), 'strawberry.dataloader.DataLoader', 'DataLoader', ([], {'load_fn': 'mock_loader'}), '(load_fn=mock_loader)\n', (1093, 1114), False, 'from strawberry.dataloader import DataLoader\n'), ((1369, 1399), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (1386, 1399), False, 'import strawberry\n'), ((1635, 1651), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (1649, 1651), False, 'from django.test.client import RequestFactory\n'), ((1779, 1818), 'strawberry.django.views.AsyncGraphQLView.as_view', 'AsyncGraphQLView.as_view', ([], {'schema': 'schema'}), '(schema=schema)\n', (1803, 1818), False, 'from strawberry.django.views import AsyncGraphQLView\n')] |
"""Implementation of an Artificial Neural Network."""
import math
import numpy as np
from .base import Estimator
from .util import validate_data, normalize
def sigmoid(x, derivative=False):
"""Sigmoid activation function."""
if derivative:
return x * (1 - x)
return 1 / (1 + math.e ** (-x))
class NeuralNet(Estimator):
"""An artificial neural network classifier with a single hidden layer."""
def __init__(self, hidden_size=20, learning_rate=0.5, activation=sigmoid,
seed=0, training_iterations=1000):
"""Instantiate a NeauralNet.
Parameters
----------
hidden_size : int
Number of neurons in the hidden layer.
learning_rate : float
Learning rate for batch gradient descent.
activation : callable
Activation function.
seed : int
Random state seed.
training_iterations : int
Number of iterations to perform in fit().
"""
self._hidden_size = 20
self._learning_rate = learning_rate
self._activation = activation
self._seed = seed
self._training_iterations = training_iterations
self._synapses = None
self._hidden_layer = None
self._classes = None
def fit(self, X, y=None):
"""Fit the estimator on a feature matrix.
Parameters
----------
X : array-like {n_samples, n_features}
A feature matrix.
y : array-like {n_samples, }
A target vector.
"""
validate_data(X, y)
X = self._add_bias_feature(normalize(X))
self._classes = np.unique(y)
np.random.seed(self._seed)
n_samples, n_features = X.shape
self._synapses = 2 * np.random.random((n_features, )) - 1
for _ in range(self._training_iterations):
hidden_layer_delta = self._forward_propogation(X, y)
self._backward_propogation(X, hidden_layer_delta)
self._training_predictions = self._activation(np.dot(X, self._synapses))
return self
@staticmethod
def _add_bias_feature(X):
bias = np.ones((X.shape[0], 1))
return np.hstack([X, bias])
def _forward_propogation(self, X, y):
hidden_layer = self._activation(np.dot(X, self._synapses))
error = y - hidden_layer
return error * self._activation(hidden_layer, True)
def _backward_propogation(self, X, hidden_layer_delta):
self._synapses += self._learning_rate * np.dot(X.T, hidden_layer_delta)
def predict(self, X):
"""Predict labels for the feature matrix based on the training data.
Parameters
----------
X : array-like {n_samples, n_features}
A feature matrix.
Return
------
Predicted targets.
"""
validate_data(X)
X = self._add_bias_feature(normalize(X))
if self._synapses is None:
raise ValueError('NeuralNet has not been fit.')
return self._activation(np.dot(X, self._synapses))
| [
"numpy.ones",
"numpy.unique",
"numpy.hstack",
"numpy.random.random",
"numpy.dot",
"numpy.random.seed"
] | [((1671, 1683), 'numpy.unique', 'np.unique', (['y'], {}), '(y)\n', (1680, 1683), True, 'import numpy as np\n'), ((1692, 1718), 'numpy.random.seed', 'np.random.seed', (['self._seed'], {}), '(self._seed)\n', (1706, 1718), True, 'import numpy as np\n'), ((2172, 2196), 'numpy.ones', 'np.ones', (['(X.shape[0], 1)'], {}), '((X.shape[0], 1))\n', (2179, 2196), True, 'import numpy as np\n'), ((2212, 2232), 'numpy.hstack', 'np.hstack', (['[X, bias]'], {}), '([X, bias])\n', (2221, 2232), True, 'import numpy as np\n'), ((2060, 2085), 'numpy.dot', 'np.dot', (['X', 'self._synapses'], {}), '(X, self._synapses)\n', (2066, 2085), True, 'import numpy as np\n'), ((2316, 2341), 'numpy.dot', 'np.dot', (['X', 'self._synapses'], {}), '(X, self._synapses)\n', (2322, 2341), True, 'import numpy as np\n'), ((2545, 2576), 'numpy.dot', 'np.dot', (['X.T', 'hidden_layer_delta'], {}), '(X.T, hidden_layer_delta)\n', (2551, 2576), True, 'import numpy as np\n'), ((3070, 3095), 'numpy.dot', 'np.dot', (['X', 'self._synapses'], {}), '(X, self._synapses)\n', (3076, 3095), True, 'import numpy as np\n'), ((1789, 1820), 'numpy.random.random', 'np.random.random', (['(n_features,)'], {}), '((n_features,))\n', (1805, 1820), True, 'import numpy as np\n')] |
# Copyright 2022 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
from ._ufunc.comparison import logical_and
from ._ufunc.floating import isinf, signbit
from .array import convert_to_cunumeric_ndarray, ndarray
from .module import full
def isneginf(x, out=None):
"""
Test element-wise for negative infinity, return result as bool array.
Parameters
----------
x : array_like
The input array.
out : array_like, optional
A location into which the result is stored. If provided, it must have a
shape that the input broadcasts to. If not provided or None, a
freshly-allocated boolean array is returned.
Returns
-------
out : ndarray
A boolean array with the same dimensions as the input.
If second argument is not supplied then a numpy boolean array is
returned with values True where the corresponding element of the
input is negative infinity and values False where the element of
the input is not negative infinity.
If a second argument is supplied the result is stored there. If the
type of that array is a numeric type the result is represented as
zeros and ones, if the type is boolean then as False and True. The
return value `out` is then a reference to that array.
See Also
--------
numpy.isneginf
Availability
--------
Multiple GPUs, Multiple CPUs
"""
x = convert_to_cunumeric_ndarray(x)
if out is not None:
out = convert_to_cunumeric_ndarray(out, share=True)
rhs1 = isinf(x)
rhs2 = signbit(x)
return logical_and(rhs1, rhs2, out=out)
def isposinf(x, out=None):
"""
Test element-wise for positive infinity, return result as bool array.
Parameters
----------
x : array_like
The input array.
out : array_like, optional
A location into which the result is stored. If provided, it must have a
shape that the input broadcasts to. If not provided or None, a
freshly-allocated boolean array is returned.
Returns
-------
out : ndarray
A boolean array with the same dimensions as the input.
If second argument is not supplied then a boolean array is returned
with values True where the corresponding element of the input is
positive infinity and values False where the element of the input is
not positive infinity.
If a second argument is supplied the result is stored there. If the
type of that array is a numeric type the result is represented as zeros
and ones, if the type is boolean then as False and True.
The return value `out` is then a reference to that array.
See Also
--------
numpy.isposinf
Availability
--------
Multiple GPUs, Multiple CPUs
"""
x = convert_to_cunumeric_ndarray(x)
if out is not None:
out = convert_to_cunumeric_ndarray(out, share=True)
rhs1 = isinf(x)
rhs2 = ~signbit(x)
return logical_and(rhs1, rhs2, out=out)
def iscomplex(x):
"""
Returns a bool array, where True if input element is complex.
What is tested is whether the input has a non-zero imaginary part, not if
the input type is complex.
Parameters
----------
x : array_like
Input array.
Returns
-------
out : ndarray[bool]
Output array.
See Also
--------
numpy.iscomplex
Availability
--------
Multiple GPUs, Multiple CPUs
"""
x = convert_to_cunumeric_ndarray(x)
if x.dtype.kind != "c":
return full(x.shape, False, dtype=bool)
else:
return x.imag != 0
def iscomplexobj(x):
"""
Check for a complex type or an array of complex numbers.
The type of the input is checked, not the value. Even if the input
has an imaginary part equal to zero, `iscomplexobj` evaluates to True.
Parameters
----------
x : any
The input can be of any type and shape.
Returns
-------
iscomplexobj : bool
The return value, True if `x` is of a complex type or has at least
one complex element.
See Also
--------
numpy.iscomplexobj
Availability
--------
Single CPU
"""
if isinstance(x, ndarray):
return x.dtype.kind == "c"
else:
return np.iscomplexobj(x)
def isreal(x):
"""
Returns a bool array, where True if input element is real.
If element has complex type with zero complex part, the return value
for that element is True.
Parameters
----------
x : array_like
Input array.
Returns
-------
out : ndarray, bool
Boolean array of same shape as `x`.
See Also
--------
numpy.isreal
Availability
--------
Multiple GPUs, Multiple CPUs
"""
x = convert_to_cunumeric_ndarray(x)
if x.dtype.kind != "c":
return full(x.shape, True, dtype=bool)
else:
return x.imag == 0
def isrealobj(x):
"""
Return True if x is a not complex type or an array of complex numbers.
The type of the input is checked, not the value. So even if the input
has an imaginary part equal to zero, `isrealobj` evaluates to False
if the data type is complex.
Parameters
----------
x : any
The input can be of any type and shape.
Returns
-------
y : bool
The return value, False if `x` is of a complex type.
See Also
--------
numpy.isrealobj
Availability
--------
Single CPU
"""
return not iscomplexobj(x)
def isscalar(x):
"""
Returns True if the type of `element` is a scalar type.
Parameters
----------
element : any
Input argument, can be of any type and shape.
Returns
-------
val : bool
True if `element` is a scalar type, False if it is not.
See Also
--------
numpy.isscalar
Notes
-----
This function falls back to NumPy for all object types but cuNumeric's
ndarray, which always returns `False`.
Availability
--------
Single CPU
"""
# Since the input can be any value, we can't just convert it to cunumeric
# ndarray. Instead we check if the input is cunumeric ndarray and, if not,
# fall back to Numpy
if isinstance(x, ndarray):
return False
else:
return np.isscalar(x)
| [
"numpy.iscomplexobj",
"numpy.isscalar"
] | [((4884, 4902), 'numpy.iscomplexobj', 'np.iscomplexobj', (['x'], {}), '(x)\n', (4899, 4902), True, 'import numpy as np\n'), ((6930, 6944), 'numpy.isscalar', 'np.isscalar', (['x'], {}), '(x)\n', (6941, 6944), True, 'import numpy as np\n')] |
from __future__ import print_function
import datetime
import json
import logging
from decimal import Decimal
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models, transaction
from django.utils import timezone
from papertrail import signals
from papertrail.fields import JSONField
LOG = logging.getLogger(__name__)
def coerce_to_queryset(instance_or_queryset):
if isinstance(instance_or_queryset, models.Model):
instance = instance_or_queryset
return instance.__class__.objects.filter(pk=instance.pk)
else:
return instance_or_queryset
def related_to_q(obj, relation_name=None):
'''
Create a Q object expressing an event relation with an optional name.
This is useful as a building block for Entry.objects.related_to(), and
can be used to provide better query control for more complex queries
without the boilerplate of directly querying an Entry's related objects.
Example 1: OR query
Entry.objects.filter(related_to(user1) | related_to(user2)).distinct()
Example 2: building block to Entry.objects.related_to()
The following are equivalent:
Entry.objects.related_to(user=user1, group=group1)
Entry.objects.filter(related_to_q(user1, 'user'))
.filter(related_to_q(group1, 'group'))
'''
related_qs = coerce_to_queryset(obj)
content_type = ContentType.objects.get_for_model(related_qs.model)
filters = {
'targets__related_content_type': content_type,
'targets__related_id__in': related_qs,
}
if relation_name:
filters.update({'targets__relation_name': relation_name})
return models.Q(**filters)
class RelatedObjectsQuerySetMixin(object):
def _get_object_ids_in_papertrail(self, qs, relation_name):
'''
A helper method that receives an arbitrary queryset `qs` and a relation name
`relation_name` and returns a list of object ids that are part of that queryset
and ARE referenced by the `self` entries queryset.
relation_name needs to point to the same type of objects the queryset this method
receives is pointing to.
For example, if qs has User objects whose ids are 1, 2, 3 and `self` has only
one papertrail entry and that entry has targets={'user': user2}, calling
_get_object_ids_in_papertrail(qs, 'user') would return only the id of user 2
(because user1 and user3 do not have papertrail entries pointing at them)
'''
# Filter ourselves to only entries relating to the queryset the user
# is passing us
entries_related_to_qs = self.related_to(**{relation_name: qs})
# Get the RelatedObject model (this would typically be EntryRelatedObject)
targets_model = self.model().targets_model
# Query related objects to get IDs of related objects that satisfy the following conditions:
related_entries = (
targets_model.objects
# 1) They point to entries that are related to the queryset we received
.filter(**{targets_model.owner_field_name + '__in': entries_related_to_qs})
# 2) Their relation_name matches the one the user is querying for
.filter(relation_name=relation_name)
)
# Return
return related_entries.values_list('related_id', flat=True)
def objects_not_represented(self, qs, relation_name):
return qs.exclude(id__in=self._get_object_ids_in_papertrail(qs, relation_name))
def objects_represented(self, qs, relation_name):
return qs.filter(id__in=self._get_object_ids_in_papertrail(qs, relation_name))
def related_to(self, *relations, **named_relations):
'''
Filter entries based on objects they pertain to, either generically or
by a specific relation type. If multiple relations are specified, the
filter combines them with AND semantics.
Examples:
Tracking a simple 'follow' event where one user follows another,
which is logged as:
> user1 = User.objects.get(...)
> user2 = User.objects.get(...)
> log('user-followed', 'User followed another user', targets={'follower': user1, 'following': user2})
First, a simple query for all events for user1, regardless of
the type of relationship:
> Entry.objects.related_to(user1)
Next, to query for events involving both user1 and user2.
> Entry.objects.related_to(user1, user2)
Finally, to query for specific relationships, such as user1
following user2:
> Entry.objects.related_to(follower=user1, following=user2)
'''
entry_qs = self
all_relations = [(None, r) for r in relations] + list(named_relations.items())
for name, relation in all_relations:
entry_qs = entry_qs.filter(related_to_q(relation, name))
return entry_qs.distinct()
class EntryQuerySet(models.query.QuerySet, RelatedObjectsQuerySetMixin):
def all_types(self):
return self.order_by('type').values_list('type', flat=True).distinct()
class ModelWithRelatedObjectsMixin(object):
def get(self, target, default=None):
try:
return self[target]
except KeyError:
return default
def set(self, target_name, val, replace=True):
'''
Sets the updated target value, optionally replacing the existing target
by that name. If `replace` is False, raises an error if the target
already exists. `val` is generally a Django model instance, but can
also be a tuple of (content_type, id) to reference an object as the
contenttypes app does (this also allows references to deleted objects).
'''
target = self.get(target_name)
if target and not replace:
raise ValueError('Target {} already exists for this event'.format(target_name))
attributes = {
self.targets_model.owner_field_name: self,
'relation_name': target_name,
}
target = target or self.targets_model(**attributes)
if type(val) == tuple:
content_type, object_id = val
target.related_content_type = content_type
target.related_id = object_id
target.save()
elif val:
target.related_object = val
target.save()
return target
@property
def targets_map(self):
return dict((t.relation_name, t.related_object) for t in self.targets.all())
def update(self, targets_map):
for target, val in (targets_map or {}).items():
self[target] = val
def __getitem__(self, target_name):
# Optimization, in case we pre-fetched targets
if hasattr(self, '_prefetched_objects_cache') and 'targets' in self._prefetched_objects_cache:
for target in self._prefetched_objects_cache['targets']:
if target.relation_name == target_name:
return target.related_object
raise KeyError
try:
target = self.targets.get(relation_name=target_name)
return target.related_object
except self.targets_model.DoesNotExist:
raise KeyError
def __setitem__(self, target, val):
return self.set(target, val)
def __contains__(self, target_name):
return self.targets.filter(relation_name=target_name).exists()
class Entry(models.Model, ModelWithRelatedObjectsMixin):
timestamp = models.DateTimeField(db_index=True)
type = models.CharField(max_length=50, db_index=True)
message = models.TextField()
data = JSONField(null=True)
# Field for storing a custom 'key' for looking up specific events from
# external sources. This can be used to quickly and precisely look up
# events that you can derive natural keys for, but aren't easily queryable
# using Entry's other fields.
external_key = models.CharField(max_length=255, null=True, db_index=True)
objects = EntryQuerySet.as_manager()
def __unicode__(self):
return u'{} - {}'.format(self.type, self.message)
class Meta:
ordering = ['-timestamp']
get_latest_by = 'timestamp'
verbose_name_plural = 'entries'
@property
def targets_model(self):
return EntryRelatedObject
# If you want to subclass this, make sure that the foreign key
# related name is called targets
class RelatedObject(models.Model):
relation_name = models.CharField(max_length=100, db_index=True)
related_content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
related_id = models.PositiveIntegerField(db_index=True)
related_object = GenericForeignKey('related_content_type', 'related_id')
class Meta:
abstract = True
@property
def safe_related_object(self):
try:
return self.related_object
# This will happen if the model class of this object is not available, for example if the app providing it has been removed from INSTALLED_APPS
except AttributeError:
return None
class EntryRelatedObject(RelatedObject):
entry = models.ForeignKey('Entry', related_name='targets', on_delete=models.CASCADE)
owner_field_name = 'entry'
def replace_object_in_papertrail(old_obj, new_obj, entry_qs=None):
entry_qs = entry_qs or Entry.objects.all()
old_obj_type = ContentType.objects.get_for_model(old_obj.__class__)
new_obj_type = ContentType.objects.get_for_model(new_obj.__class__)
related_qs = (EntryRelatedObject.objects.filter(
entry__in=entry_qs,
related_content_type=old_obj_type,
related_id=old_obj.pk
))
related_qs.update(related_content_type=new_obj_type,
related_id=new_obj.pk)
def json_default(o):
"""
A `default` method for allowing objects with dates/decimals to be encoded into JSON.
Usage: json.dumps(obj, default=_json_default)
"""
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime('%Y-%m-%dT%H:%M:%S')
if isinstance(o, datetime.date):
return o.strftime('%Y-%m-%d')
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime('%H:%M:%S')
def json_serializeable(obj):
"""
Attempts to return a copy of `obj` that is JSON serializeable
"""
return json.loads(json.dumps(obj, default=json_default))
def log(event_type, message, data=None, timestamp=None, targets=None, external_key=None, data_adapter=json_serializeable):
data_adapter = data_adapter or (lambda obj: obj)
timestamp = timestamp or timezone.now()
with transaction.atomic():
# Enforce uniqueness on event_type/external_id if an external id is
# provided
if external_key:
entry, created = Entry.objects.get_or_create(
type=event_type,
external_key=external_key,
defaults={
'message': message,
'data': data,
'timestamp': timestamp,
}
)
if not created:
return
else:
entry = Entry.objects.create(
type=event_type,
message=message,
data=data_adapter(data),
timestamp=timestamp,
)
entry.update(targets)
if getattr(settings, 'PAPERTRAIL_SHOW', False):
WARNING = u'\033[95m'
ENDC = u'\033[0m'
print(WARNING + u'papertrail ' + ENDC + event_type + u" " + message)
signals.event_logged.send_robust(sender=entry)
return entry
# Expose aliases for common filter functions
related_to = Entry.objects.related_to
objects_not_represented = Entry.objects.objects_not_represented
objects_represented = Entry.objects.objects_represented
filter = Entry.objects.filter
exclude = Entry.objects.exclude
all_types = Entry.objects.all_types
| [
"logging.getLogger",
"django.contrib.contenttypes.models.ContentType.objects.get_for_model",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.transaction.atomic",
"json.dumps",
"papertrail.signals.event_logged.send_robust",
"django.utils.timezone.now",
"papertrail.fields.JSONField",
"django.db.models.PositiveIntegerField",
"django.db.models.DateTimeField",
"django.contrib.contenttypes.fields.GenericForeignKey",
"django.db.models.Q",
"django.db.models.CharField"
] | [((423, 450), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (440, 450), False, 'import logging\n'), ((1509, 1560), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['related_qs.model'], {}), '(related_qs.model)\n', (1542, 1560), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((1784, 1803), 'django.db.models.Q', 'models.Q', ([], {}), '(**filters)\n', (1792, 1803), False, 'from django.db import models, transaction\n'), ((7723, 7758), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'db_index': '(True)'}), '(db_index=True)\n', (7743, 7758), False, 'from django.db import models, transaction\n'), ((7770, 7816), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'db_index': '(True)'}), '(max_length=50, db_index=True)\n', (7786, 7816), False, 'from django.db import models, transaction\n'), ((7831, 7849), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (7847, 7849), False, 'from django.db import models, transaction\n'), ((7861, 7881), 'papertrail.fields.JSONField', 'JSONField', ([], {'null': '(True)'}), '(null=True)\n', (7870, 7881), False, 'from papertrail.fields import JSONField\n'), ((8165, 8223), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)', 'db_index': '(True)'}), '(max_length=255, null=True, db_index=True)\n', (8181, 8223), False, 'from django.db import models, transaction\n'), ((8711, 8758), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'db_index': '(True)'}), '(max_length=100, db_index=True)\n', (8727, 8758), False, 'from django.db import models, transaction\n'), ((8786, 8842), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ContentType'], {'on_delete': 'models.CASCADE'}), '(ContentType, on_delete=models.CASCADE)\n', (8803, 8842), False, 'from django.db import models, transaction\n'), ((8860, 8902), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'db_index': '(True)'}), '(db_index=True)\n', (8887, 8902), False, 'from django.db import models, transaction\n'), ((8924, 8979), 'django.contrib.contenttypes.fields.GenericForeignKey', 'GenericForeignKey', (['"""related_content_type"""', '"""related_id"""'], {}), "('related_content_type', 'related_id')\n", (8941, 8979), False, 'from django.contrib.contenttypes.fields import GenericForeignKey\n'), ((9385, 9461), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Entry"""'], {'related_name': '"""targets"""', 'on_delete': 'models.CASCADE'}), "('Entry', related_name='targets', on_delete=models.CASCADE)\n", (9402, 9461), False, 'from django.db import models, transaction\n'), ((9628, 9680), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['old_obj.__class__'], {}), '(old_obj.__class__)\n', (9661, 9680), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((9700, 9752), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['new_obj.__class__'], {}), '(new_obj.__class__)\n', (9733, 9752), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((10820, 10857), 'json.dumps', 'json.dumps', (['obj'], {'default': 'json_default'}), '(obj, default=json_default)\n', (10830, 10857), False, 'import json\n'), ((11067, 11081), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (11079, 11081), False, 'from django.utils import timezone\n'), ((11092, 11112), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (11110, 11112), False, 'from django.db import models, transaction\n'), ((12071, 12117), 'papertrail.signals.event_logged.send_robust', 'signals.event_logged.send_robust', ([], {'sender': 'entry'}), '(sender=entry)\n', (12103, 12117), False, 'from papertrail import signals\n')] |
"""Tests for `pokemaseter2.io`."""
from pokemaster2.db import io, tables
def test_load_unsafe(test_db, test_csv_dir):
"""Load db with `unsafe=True`."""
io.load(test_db, models=[tables.Pokemon], csv_dir=test_csv_dir, safe=False)
assert 0 == test_db.synchronous
assert "memory" == test_db.journal_mode
def test_load_drop_table(test_db, test_csv_dir, test_pokemon_csv):
"""Drop tables."""
# Load the pokemon table first
io.load(test_db, models=[tables.Pokemon], csv_dir=test_csv_dir)
bulbasaur_1 = tables.Pokemon.select().where(tables.Pokemon.identifier == "bulbasaur").first()
assert "bulbasaur" == bulbasaur_1.identifier
# Drop the original pokemon table and load another table
content = """id,identifier,species_id,height,weight,base_experience,order,is_default
1,bulbasaur_2,1,7,69,64,1,1
"""
pokemon_csv = test_csv_dir / "pokemon.csv"
pokemon_csv.write_text(content)
io.load(test_db, models=[tables.Pokemon], csv_dir=test_csv_dir, drop_tables=True)
bulbasaur_2 = tables.Pokemon.select().where(tables.Pokemon.identifier == "bulbasaur_2").first()
assert "bulbasaur_2" == bulbasaur_2.identifier
def test_load_pokemon(test_db, test_pokemon_csv, test_csv_dir):
"""Load Pokemon."""
io.load(test_db, models=[tables.Pokemon], csv_dir=test_csv_dir, drop_tables=True)
bulbasaur = tables.Pokemon.select().where(tables.Pokemon.identifier == "bulbasaur").first()
assert 1 == bulbasaur.id
| [
"pokemaster2.db.io.load",
"pokemaster2.db.tables.Pokemon.select"
] | [((162, 237), 'pokemaster2.db.io.load', 'io.load', (['test_db'], {'models': '[tables.Pokemon]', 'csv_dir': 'test_csv_dir', 'safe': '(False)'}), '(test_db, models=[tables.Pokemon], csv_dir=test_csv_dir, safe=False)\n', (169, 237), False, 'from pokemaster2.db import io, tables\n'), ((449, 512), 'pokemaster2.db.io.load', 'io.load', (['test_db'], {'models': '[tables.Pokemon]', 'csv_dir': 'test_csv_dir'}), '(test_db, models=[tables.Pokemon], csv_dir=test_csv_dir)\n', (456, 512), False, 'from pokemaster2.db import io, tables\n'), ((930, 1016), 'pokemaster2.db.io.load', 'io.load', (['test_db'], {'models': '[tables.Pokemon]', 'csv_dir': 'test_csv_dir', 'drop_tables': '(True)'}), '(test_db, models=[tables.Pokemon], csv_dir=test_csv_dir, drop_tables\n =True)\n', (937, 1016), False, 'from pokemaster2.db import io, tables\n'), ((1257, 1343), 'pokemaster2.db.io.load', 'io.load', (['test_db'], {'models': '[tables.Pokemon]', 'csv_dir': 'test_csv_dir', 'drop_tables': '(True)'}), '(test_db, models=[tables.Pokemon], csv_dir=test_csv_dir, drop_tables\n =True)\n', (1264, 1343), False, 'from pokemaster2.db import io, tables\n'), ((531, 554), 'pokemaster2.db.tables.Pokemon.select', 'tables.Pokemon.select', ([], {}), '()\n', (552, 554), False, 'from pokemaster2.db import io, tables\n'), ((1030, 1053), 'pokemaster2.db.tables.Pokemon.select', 'tables.Pokemon.select', ([], {}), '()\n', (1051, 1053), False, 'from pokemaster2.db import io, tables\n'), ((1355, 1378), 'pokemaster2.db.tables.Pokemon.select', 'tables.Pokemon.select', ([], {}), '()\n', (1376, 1378), False, 'from pokemaster2.db import io, tables\n')] |
import utils
import jemail
__all__ = ["api_noticias", "api_docx", "api_email"]
def api_noticias(data_json):
keys = ("noticias", "titulo", "cabecalho", "tempo")
if utils.json_request_valido(keys, data_json):
titulo, markdown, erro = utils.gerar_jornal(data_json)
if not erro:
return {
"status": 200,
"message": "OK",
"erro": None,
"texto_markdown": markdown,
"titulo": titulo,
}
# se ocorreu algum erro except
return {
"status": 400,
"message": "problema no post, verifique seu dados",
"erro": "JSON inválido" or erro,
"texto_markdown": None,
"titulo": None,
}
def api_docx(data_json):
keys = ("titulo", "texto_markdown")
if utils.json_request_valido(keys, data_json):
nome_arquivo, erro = utils.gerar_docx(data_json)
if not erro:
return {
"status": 201,
"message": "Criado",
"erro": None,
"nome_arquivo": nome_arquivo,
}
# se ocorreu algum erro, except
return {
"status": 400,
"message": "Ocorreu um problema ",
"erro": "Erro ao gerar DOCX" or erro,
"nome_arquivo": nome_arquivo,
}
def api_email(data_json):
keys = ("emailTo", "emailFrom", "senha", "arquivo")
ok = utils.json_request_valido(keys, data_json)
if ok:
to = data_json["emailTo"]
ffrom = data_json["emailFrom"]
senha = data_json["senha"]
file_name = data_json["arquivo"]
subject = "[JORNAL-APP] - "
erro = jemail.enviar_email(
ffrom, to, subject, senha, file_name
)
if not erro:
return {
"status": 200,
"message": "Email Enviado!",
"erro": None,
}
# se algo aconteceu errado
return {
"status": 400,
"message": "Erro ao enviar email",
"erro": erro or "Falhou!",
}
| [
"jemail.enviar_email",
"utils.gerar_docx",
"utils.gerar_jornal",
"utils.json_request_valido"
] | [((175, 217), 'utils.json_request_valido', 'utils.json_request_valido', (['keys', 'data_json'], {}), '(keys, data_json)\n', (200, 217), False, 'import utils\n'), ((818, 860), 'utils.json_request_valido', 'utils.json_request_valido', (['keys', 'data_json'], {}), '(keys, data_json)\n', (843, 860), False, 'import utils\n'), ((1389, 1431), 'utils.json_request_valido', 'utils.json_request_valido', (['keys', 'data_json'], {}), '(keys, data_json)\n', (1414, 1431), False, 'import utils\n'), ((252, 281), 'utils.gerar_jornal', 'utils.gerar_jornal', (['data_json'], {}), '(data_json)\n', (270, 281), False, 'import utils\n'), ((891, 918), 'utils.gerar_docx', 'utils.gerar_docx', (['data_json'], {}), '(data_json)\n', (907, 918), False, 'import utils\n'), ((1644, 1701), 'jemail.enviar_email', 'jemail.enviar_email', (['ffrom', 'to', 'subject', 'senha', 'file_name'], {}), '(ffrom, to, subject, senha, file_name)\n', (1663, 1701), False, 'import jemail\n')] |
import unittest
from Sastrawi.Dictionary.ArrayDictionary import ArrayDictionary
from Sastrawi.Stemmer.Stemmer import Stemmer
from Sastrawi.Stemmer.StemmerFactory import StemmerFactory
class Test_StemmerTest(unittest.TestCase):
def setUp(self):
stemmerFactory = StemmerFactory()
self.stemmer = stemmerFactory.create_stemmer()
return super(Test_StemmerTest, self).setUp()
def get_test_data(self):
data = []
data.append(['kebijakan', 'bijak'])
#//data.append(['karyawan', 'karya'])
#//data.append(['karyawati', 'karya'])
data.append(['kinerja', 'kerja'])
data.append(['mengandung', 'kandung'])
data.append(['memakan', 'makan'])
data.append(['asean', 'asean'])
data.append(['pemandu', 'pandu'])
data.append(['mengurangi', 'kurang'])
data.append(['pemerintah', 'perintah'])
data.append(['mengabulkan', 'kabul'])
data.append(['mengupas', 'kupas'])
data.append(['keterpurukan', 'puruk'])
data.append(['ditemukan', 'temu'])
data.append(['mengerti', 'erti'])
data.append(['kebon', 'kebon'])
data.append(['terdepan', 'depan'])
data.append(['mengikis', 'kikis'])
data.append(['kedudukan', 'duduk'])
data.append(['menekan', 'tekan'])
data.append(['perusakan', 'rusa']) # overstemming, it's better than perusa
data.append(['ditemui', 'temu'])
data.append(['di', 'di'])
data.append(['mengalahkan', 'kalah'])
data.append(['melewati', 'lewat'])
data.append(['bernafas', 'nafas'])
data.append(['meniru-niru', 'tiru'])
data.append(['memanggil-manggil', 'panggil'])
data.append(['menyebut-nyebut', 'sebut'])
data.append(['menganga', 'nganga'])
data.append(['besaran', 'besar'])
data.append(['terhenyak', 'henyak'])
data.append(['mengokohkan', 'kokoh'])
data.append(['melainkan', 'lain'])
data.append(['kuasa-Mu', 'kuasa'])
data.append(['malaikat-malaikat-Nya', 'malaikat'])
data.append(['nikmat-Ku', 'nikmat'])
return data
def try_stem(self, word, stem):
self.assertEquals(stem, self.stemmer.stem(word))
def test_stem(self):
data = self.get_test_data()
for d in data:
self.try_stem(d[0], d[1])
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"Sastrawi.Stemmer.StemmerFactory.StemmerFactory"
] | [((2401, 2416), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2414, 2416), False, 'import unittest\n'), ((274, 290), 'Sastrawi.Stemmer.StemmerFactory.StemmerFactory', 'StemmerFactory', ([], {}), '()\n', (288, 290), False, 'from Sastrawi.Stemmer.StemmerFactory import StemmerFactory\n')] |
# -*- coding: utf-8 -*-
from pathlib import Path
from typing import Any
import yaml
from airflow.utils.module_loading import import_string
from data_detective_airflow import constants
from data_detective_airflow.dag_generator.dags.tdag import TDag
class YamlDag(TDag):
"""DAG created based on the description in the yaml file
:param dag_dir: Directory with TDag.META_FILE
:param config: Optional and decomposed meta.yaml file
"""
def __init__(self, dag_dir: str, config: dict[str, Any]):
self.config = config
if not self.config:
with open(f'{dag_dir}/{TDag.META_FILE}', encoding='utf-8') as file:
self.config = yaml.safe_load(file)
super().__init__(
dag_dir=dag_dir,
dag_id=Path(dag_dir).name,
factory=self.config['factory'],
start_date=constants.DEFAULT_START_DATE,
schedule_interval=self.config.get('schedule_interval'),
description=self.config.get('description', ''),
default_args=self.config['default_args'],
template_searchpath=dag_dir,
tags=self.config.get('tags'),
)
self.__fill_dag()
def __fill_dag(self):
"""Add a task to a DAG according to the description in yaml file"""
tasks = self.config.get('tasks')
if tasks:
for task in tasks:
self.attach_task(task)
def attach_task(self, task: dict) -> None:
"""Add task to DAG
:param task: Dictionary with task attributes
"""
excluded_params = ('type',)
task_filtered = {k: v for k, v in task.items() if k not in excluded_params}
# Processing callable parameters by converting from a string to a function
callables = filter(lambda k: k.endswith('_callable'), task_filtered.copy().keys())
for param in callables:
task_filtered[param.replace('_callable', '_lambda_val')] = task_filtered[param]
task_filtered[param] = self.get_callable_by_def(task_filtered[param])
# Create a task
task_filtered['dag'] = self
task_type = import_string(task['type'])
task_type(**task_filtered)
| [
"airflow.utils.module_loading.import_string",
"yaml.safe_load",
"pathlib.Path"
] | [((2151, 2178), 'airflow.utils.module_loading.import_string', 'import_string', (["task['type']"], {}), "(task['type'])\n", (2164, 2178), False, 'from airflow.utils.module_loading import import_string\n'), ((681, 701), 'yaml.safe_load', 'yaml.safe_load', (['file'], {}), '(file)\n', (695, 701), False, 'import yaml\n'), ((777, 790), 'pathlib.Path', 'Path', (['dag_dir'], {}), '(dag_dir)\n', (781, 790), False, 'from pathlib import Path\n')] |
from django.contrib import admin
from .models import DiasVisita, Horario, Imovei, Cidade, Imagem, Visitas, Imagens_nossas
@admin.register(Imovei)
class ImoveiAdmin(admin.ModelAdmin):
list_display = ('rua', 'valor', 'quartos', 'tamanho', 'cidade', 'tipo') # mostra o que vc quer do model
list_editable = ('valor', 'tipo') # permite editar os dados do model
list_filter = ('cidade', 'tipo') # filtra os dados do model
# esse comando registra essas tabelas no admin do django para que nós possamos alteralas a vontade pelo admin
admin.site.register(DiasVisita)
admin.site.register(Horario)
admin.site.register(Imagem)
admin.site.register(Imagens_nossas)
admin.site.register(Cidade)
admin.site.register(Visitas)
| [
"django.contrib.admin.register",
"django.contrib.admin.site.register"
] | [((126, 148), 'django.contrib.admin.register', 'admin.register', (['Imovei'], {}), '(Imovei)\n', (140, 148), False, 'from django.contrib import admin\n'), ((544, 575), 'django.contrib.admin.site.register', 'admin.site.register', (['DiasVisita'], {}), '(DiasVisita)\n', (563, 575), False, 'from django.contrib import admin\n'), ((576, 604), 'django.contrib.admin.site.register', 'admin.site.register', (['Horario'], {}), '(Horario)\n', (595, 604), False, 'from django.contrib import admin\n'), ((605, 632), 'django.contrib.admin.site.register', 'admin.site.register', (['Imagem'], {}), '(Imagem)\n', (624, 632), False, 'from django.contrib import admin\n'), ((633, 668), 'django.contrib.admin.site.register', 'admin.site.register', (['Imagens_nossas'], {}), '(Imagens_nossas)\n', (652, 668), False, 'from django.contrib import admin\n'), ((669, 696), 'django.contrib.admin.site.register', 'admin.site.register', (['Cidade'], {}), '(Cidade)\n', (688, 696), False, 'from django.contrib import admin\n'), ((697, 725), 'django.contrib.admin.site.register', 'admin.site.register', (['Visitas'], {}), '(Visitas)\n', (716, 725), False, 'from django.contrib import admin\n')] |
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 21 14:19:26 2021
@author: mohamed_akel
"""
# package that collects several modules for working with URLs
from urllib.request import urlopen
from urllib.error import HTTPError
from urllib.error import URLError
# library to pull web data.
from bs4 import BeautifulSoup
# list all the names.
def getNames(url):
""" get authors names function.
Input:
url: string contains address of the page
output:
names: a list contains author names.
"""
try:
html = urlopen(url)
except HTTPError as e:
return None
except URLError as e:
print("Server could not be found")
return None
try:
bs = BeautifulSoup(html.read(),'html.parser')
names = bs.find_all('span',{'class':'green'})
except AttributeError as e:
return None
return names
# get all levels of heading in the page.
def getAllHeadings(url):
""" get all levels of heading in the page function.
Input:
url: string contains URL of desired page.
Output:
headings:list of headings of each level.
"""
try:
html = urlopen(url)
except HTTPError as e:
return None
except URLError as e:
return None
try:
bs = BeautifulSoup(html.read(),'html.parser')
headings = bs.find_all(['h1','h2','h3','h4','h5','h6'])
except AttributeError as e:
return None
return headings
# URL for simple html web page
url = "http://www.pythonscraping.com/pages/warandpeace.html"
namesList = getNames(url)
print("List Content")
print(namesList)
# iterate through names
for name in namesList:
# seprate the text from the tag
print(name.get_text())
def getAllText(url):
''' get every text written by each author.
Input:
url: a string contains source page url
Output:
text_list: a list contains all the text
'''
| [
"urllib.request.urlopen"
] | [((554, 566), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (561, 566), False, 'from urllib.request import urlopen\n'), ((1176, 1188), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (1183, 1188), False, 'from urllib.request import urlopen\n')] |
import os
from flask import Flask, request, Response
import filetype
from tqdm import tqdm
from googletrans import Translator
import io
from base64 import encodebytes
from PIL import Image
import pymongo
import base64
from validation_model import validator
from classification_model import brahmi_classifier
from segmentation_module.segmentation import image_segmentation
from word_finder_module.possible_word_finder import searchForWords
from utils.util import make_response
app = Flask(__name__)
input_data = "input_data"
segmented_letters = "segmentation_module/segmented_letters"
# function to validate given image
# return True if given image is a brahmi inscription
# return False if given image is not a brahmi inscription
@app.route("/api/validatePlate", methods=["POST"])
def validatePlate():
try:
data = request.get_json()['image']
with open("input_data/plate.png", "wb") as fh:
fh.write(base64.b64decode(data))
# get prediction from validation model
flag = validator.validateImage()
if(flag == True):
os.remove("input_data/plate.png")
response = make_response('True', False, 200)
return Response(response=response, status=200, mimetype='application/json')
else:
os.remove("input_data/plate.png")
response = make_response('False', False, 403)
return Response(response=response, status=403, mimetype='application/json')
except:
response = make_response('Something went wrong', False, 404)
return Response(response=response, status=404, mimetype='application/json')
# function to get segmented letters with their meaning of given plate
# argument image type - base64
@app.route("/api/getLetters", methods=["POST"])
def translateLetters():
try:
data = request.get_json()['image']
with open("input_data/plate.png", "wb") as fh:
fh.write(base64.b64decode(data))
flag = image_segmentation()
# true if given image segmented correctly
if (flag == True):
result = {}
classify_letters = brahmi_classifier.classify_letters()
result['letter'] = classify_letters
test_path = os.path.join(segmented_letters)
segmented_images = []
for img in tqdm(os.listdir(test_path)):
if filetype.is_image(os.path.join(test_path, img)):
image_path = os.path.join(test_path, img)
pil_img = Image.open(image_path, mode='r')
byte_arr = io.BytesIO()
pil_img.save(byte_arr, format='PNG')
encoded_img = encodebytes(byte_arr.getvalue()).decode('ascii')
segmented_images.append(encoded_img)
os.remove(os.path.join(test_path, img))
result['images'] = segmented_images
response = make_response(result, True, 200)
os.remove("input_data/plate.png")
return Response(response=response, status=200, mimetype='application/json')
else:
test_path = os.path.join(segmented_letters)
for img in tqdm(os.listdir(test_path)):
if filetype.is_image(os.path.join(test_path, img)):
os.remove(os.path.join(test_path, img))
os.remove("input_data/plate.png")
response = make_response("Too much noise in image", True, 200)
return Response(response=response, status=200, mimetype='application/json')
except:
response = make_response('Something went wrong', False, 404)
return Response(response=response, status=404, mimetype='application/json')
# function to get possible words of plate
# argument letters of plate
@app.route('/api/getPossibleWords', methods=['POST'])
def getPossibleWords():
try:
data = request.get_json()['letters']
# url to mongoDB
myclient = pymongo.MongoClient("mongodb+srv://brahmilator_db:<EMAIL>[email protected]/brahmilator_db?retryWrites=true&w=majority")
mydb = myclient["brahmilator_database"]
column = mydb["words"]
words = searchForWords(column, data)
# true if possible word or words are found
if len(words) > 0:
possible_words = []
for key, value in words.items():
possible_words.append(key)
result = {}
result["possible_words"] = possible_words
result["possible_words_with_meaning"] = words
response = make_response(result, True, 200)
return Response(response=response, status=200, mimetype='application/json')
else:
response = make_response("Possible match not found", True, 404)
return Response(response=response, status=200, mimetype='application/json')
except:
response = make_response('Something went wrong', False, 404)
return Response(response=response, status=404, mimetype='application/json')
# function to translate into native language
# arguments words, current src language, destination language
@app.route("/api/translate", methods=["POST"])
def translate():
try:
data = request.get_json()['possible_words_with_meaning']
src_lan = request.get_json()['src_lan']
dest_lan = request.get_json()['dest_lan']
translator = Translator()
output = {}
for key, value in data.items():
temp = []
for word in value:
translate = translator.translate(word, src=src_lan, dest=dest_lan)
temp.append(translate.text)
output[key] = temp
result = {}
result['possible_words_with_meaning'] = output
result['src_lan'] = dest_lan
response = make_response(result, False, 200)
return Response(response=response, status=200, mimetype='application/json')
except:
response = make_response('Something went wrong', False, 404)
return Response(response=response, status=404, mimetype='application/json')
if __name__ == '__main__':
app.run('0.0.0.0') | [
"os.listdir",
"segmentation_module.segmentation.image_segmentation",
"PIL.Image.open",
"word_finder_module.possible_word_finder.searchForWords",
"flask.Flask",
"os.path.join",
"googletrans.Translator",
"base64.b64decode",
"validation_model.validator.validateImage",
"os.remove",
"io.BytesIO",
"flask.request.get_json",
"flask.Response",
"classification_model.brahmi_classifier.classify_letters",
"pymongo.MongoClient",
"utils.util.make_response"
] | [((485, 500), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (490, 500), False, 'from flask import Flask, request, Response\n'), ((1023, 1048), 'validation_model.validator.validateImage', 'validator.validateImage', ([], {}), '()\n', (1046, 1048), False, 'from validation_model import validator\n'), ((1983, 2003), 'segmentation_module.segmentation.image_segmentation', 'image_segmentation', ([], {}), '()\n', (2001, 2003), False, 'from segmentation_module.segmentation import image_segmentation\n'), ((3975, 4121), 'pymongo.MongoClient', 'pymongo.MongoClient', (['"""mongodb+srv://brahmilator_db:<EMAIL>[email protected]/brahmilator_db?retryWrites=true&w=majority"""'], {}), "(\n 'mongodb+srv://brahmilator_db:<EMAIL>[email protected]/brahmilator_db?retryWrites=true&w=majority'\n )\n", (3994, 4121), False, 'import pymongo\n'), ((4208, 4236), 'word_finder_module.possible_word_finder.searchForWords', 'searchForWords', (['column', 'data'], {}), '(column, data)\n', (4222, 4236), False, 'from word_finder_module.possible_word_finder import searchForWords\n'), ((5427, 5439), 'googletrans.Translator', 'Translator', ([], {}), '()\n', (5437, 5439), False, 'from googletrans import Translator\n'), ((5845, 5878), 'utils.util.make_response', 'make_response', (['result', '(False)', '(200)'], {}), '(result, False, 200)\n', (5858, 5878), False, 'from utils.util import make_response\n'), ((5894, 5962), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(200)', 'mimetype': '"""application/json"""'}), "(response=response, status=200, mimetype='application/json')\n", (5902, 5962), False, 'from flask import Flask, request, Response\n'), ((831, 849), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (847, 849), False, 'from flask import Flask, request, Response\n'), ((1088, 1121), 'os.remove', 'os.remove', (['"""input_data/plate.png"""'], {}), "('input_data/plate.png')\n", (1097, 1121), False, 'import os\n'), ((1145, 1178), 'utils.util.make_response', 'make_response', (['"""True"""', '(False)', '(200)'], {}), "('True', False, 200)\n", (1158, 1178), False, 'from utils.util import make_response\n'), ((1198, 1266), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(200)', 'mimetype': '"""application/json"""'}), "(response=response, status=200, mimetype='application/json')\n", (1206, 1266), False, 'from flask import Flask, request, Response\n'), ((1293, 1326), 'os.remove', 'os.remove', (['"""input_data/plate.png"""'], {}), "('input_data/plate.png')\n", (1302, 1326), False, 'import os\n'), ((1350, 1384), 'utils.util.make_response', 'make_response', (['"""False"""', '(False)', '(403)'], {}), "('False', False, 403)\n", (1363, 1384), False, 'from utils.util import make_response\n'), ((1404, 1472), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(403)', 'mimetype': '"""application/json"""'}), "(response=response, status=403, mimetype='application/json')\n", (1412, 1472), False, 'from flask import Flask, request, Response\n'), ((1505, 1554), 'utils.util.make_response', 'make_response', (['"""Something went wrong"""', '(False)', '(404)'], {}), "('Something went wrong', False, 404)\n", (1518, 1554), False, 'from utils.util import make_response\n'), ((1570, 1638), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(404)', 'mimetype': '"""application/json"""'}), "(response=response, status=404, mimetype='application/json')\n", (1578, 1638), False, 'from flask import Flask, request, Response\n'), ((1838, 1856), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1854, 1856), False, 'from flask import Flask, request, Response\n'), ((2137, 2173), 'classification_model.brahmi_classifier.classify_letters', 'brahmi_classifier.classify_letters', ([], {}), '()\n', (2171, 2173), False, 'from classification_model import brahmi_classifier\n'), ((2247, 2278), 'os.path.join', 'os.path.join', (['segmented_letters'], {}), '(segmented_letters)\n', (2259, 2278), False, 'import os\n'), ((2933, 2965), 'utils.util.make_response', 'make_response', (['result', '(True)', '(200)'], {}), '(result, True, 200)\n', (2946, 2965), False, 'from utils.util import make_response\n'), ((2978, 3011), 'os.remove', 'os.remove', (['"""input_data/plate.png"""'], {}), "('input_data/plate.png')\n", (2987, 3011), False, 'import os\n'), ((3031, 3099), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(200)', 'mimetype': '"""application/json"""'}), "(response=response, status=200, mimetype='application/json')\n", (3039, 3099), False, 'from flask import Flask, request, Response\n'), ((3138, 3169), 'os.path.join', 'os.path.join', (['segmented_letters'], {}), '(segmented_letters)\n', (3150, 3169), False, 'import os\n'), ((3364, 3397), 'os.remove', 'os.remove', (['"""input_data/plate.png"""'], {}), "('input_data/plate.png')\n", (3373, 3397), False, 'import os\n'), ((3421, 3472), 'utils.util.make_response', 'make_response', (['"""Too much noise in image"""', '(True)', '(200)'], {}), "('Too much noise in image', True, 200)\n", (3434, 3472), False, 'from utils.util import make_response\n'), ((3492, 3560), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(200)', 'mimetype': '"""application/json"""'}), "(response=response, status=200, mimetype='application/json')\n", (3500, 3560), False, 'from flask import Flask, request, Response\n'), ((3592, 3641), 'utils.util.make_response', 'make_response', (['"""Something went wrong"""', '(False)', '(404)'], {}), "('Something went wrong', False, 404)\n", (3605, 3641), False, 'from utils.util import make_response\n'), ((3657, 3725), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(404)', 'mimetype': '"""application/json"""'}), "(response=response, status=404, mimetype='application/json')\n", (3665, 3725), False, 'from flask import Flask, request, Response\n'), ((3900, 3918), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (3916, 3918), False, 'from flask import Flask, request, Response\n'), ((4597, 4629), 'utils.util.make_response', 'make_response', (['result', '(True)', '(200)'], {}), '(result, True, 200)\n', (4610, 4629), False, 'from utils.util import make_response\n'), ((4649, 4717), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(200)', 'mimetype': '"""application/json"""'}), "(response=response, status=200, mimetype='application/json')\n", (4657, 4717), False, 'from flask import Flask, request, Response\n'), ((4755, 4807), 'utils.util.make_response', 'make_response', (['"""Possible match not found"""', '(True)', '(404)'], {}), "('Possible match not found', True, 404)\n", (4768, 4807), False, 'from utils.util import make_response\n'), ((4827, 4895), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(200)', 'mimetype': '"""application/json"""'}), "(response=response, status=200, mimetype='application/json')\n", (4835, 4895), False, 'from flask import Flask, request, Response\n'), ((4927, 4976), 'utils.util.make_response', 'make_response', (['"""Something went wrong"""', '(False)', '(404)'], {}), "('Something went wrong', False, 404)\n", (4940, 4976), False, 'from utils.util import make_response\n'), ((4992, 5060), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(404)', 'mimetype': '"""application/json"""'}), "(response=response, status=404, mimetype='application/json')\n", (5000, 5060), False, 'from flask import Flask, request, Response\n'), ((5258, 5276), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (5274, 5276), False, 'from flask import Flask, request, Response\n'), ((5326, 5344), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (5342, 5344), False, 'from flask import Flask, request, Response\n'), ((5375, 5393), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (5391, 5393), False, 'from flask import Flask, request, Response\n'), ((5994, 6043), 'utils.util.make_response', 'make_response', (['"""Something went wrong"""', '(False)', '(404)'], {}), "('Something went wrong', False, 404)\n", (6007, 6043), False, 'from utils.util import make_response\n'), ((6059, 6127), 'flask.Response', 'Response', ([], {'response': 'response', 'status': '(404)', 'mimetype': '"""application/json"""'}), "(response=response, status=404, mimetype='application/json')\n", (6067, 6127), False, 'from flask import Flask, request, Response\n'), ((936, 958), 'base64.b64decode', 'base64.b64decode', (['data'], {}), '(data)\n', (952, 958), False, 'import base64\n'), ((1943, 1965), 'base64.b64decode', 'base64.b64decode', (['data'], {}), '(data)\n', (1959, 1965), False, 'import base64\n'), ((2342, 2363), 'os.listdir', 'os.listdir', (['test_path'], {}), '(test_path)\n', (2352, 2363), False, 'import os\n'), ((3199, 3220), 'os.listdir', 'os.listdir', (['test_path'], {}), '(test_path)\n', (3209, 3220), False, 'import os\n'), ((2403, 2431), 'os.path.join', 'os.path.join', (['test_path', 'img'], {}), '(test_path, img)\n', (2415, 2431), False, 'import os\n'), ((2467, 2495), 'os.path.join', 'os.path.join', (['test_path', 'img'], {}), '(test_path, img)\n', (2479, 2495), False, 'import os\n'), ((2526, 2558), 'PIL.Image.open', 'Image.open', (['image_path'], {'mode': '"""r"""'}), "(image_path, mode='r')\n", (2536, 2558), False, 'from PIL import Image\n'), ((2590, 2602), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (2600, 2602), False, 'import io\n'), ((3260, 3288), 'os.path.join', 'os.path.join', (['test_path', 'img'], {}), '(test_path, img)\n', (3272, 3288), False, 'import os\n'), ((2830, 2858), 'os.path.join', 'os.path.join', (['test_path', 'img'], {}), '(test_path, img)\n', (2842, 2858), False, 'import os\n'), ((3321, 3349), 'os.path.join', 'os.path.join', (['test_path', 'img'], {}), '(test_path, img)\n', (3333, 3349), False, 'import os\n')] |
"""
Copyright (C) 2019 <NAME>, ETH Zurich
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import print_function
import keras
import tensorflow as tf
import keras.backend as K
from keras.losses import kullback_leibler_divergence
def categorical_loss(x1, x2):
return -tf.reduce_sum(x1 * tf.log(x2), axis=len(x2.get_shape()) - 1)
def absolute_error_loss(x1, x2):
return K.abs(x1 - x2)
def calculate_delta_errors(y_true, attention_weights, auxiliary_outputs, all_but_one_auxiliary_outputs,
loss_function):
error_with_all_features = loss_function(y_true, auxiliary_outputs)
delta_errors = []
for all_but_one_auxiliary_output in all_but_one_auxiliary_outputs:
error_without_one_feature = loss_function(y_true, all_but_one_auxiliary_output)
# The error without the feature is an indicator as to how potent the left-out feature is as a predictor.
delta_error = tf.maximum(error_without_one_feature - error_with_all_features, K.epsilon())
delta_errors.append(delta_error)
delta_errors = tf.stack(delta_errors, axis=-1)
shape = K.int_shape(delta_errors)
if len(shape) > 2:
delta_errors = K.squeeze(delta_errors, axis=-2)
delta_errors /= (K.sum(delta_errors, axis=-1, keepdims=True))
# Ensure correct format.
delta_errors = tf.clip_by_value(delta_errors, K.epsilon(), 1.0)
attention_weights = tf.clip_by_value(attention_weights, K.epsilon(), 1.0)
if len(attention_weights.shape) == 3:
attention_weights = tf.squeeze(attention_weights, axis=-1)
# NOTE: Without stop_gradient back-propagation would attempt to optimise the error_variance
# instead of/in addition to the distance between attention weights and Granger causality index,
# which is not desired.
delta_errors = tf.stop_gradient(delta_errors)
return delta_errors, attention_weights
def granger_causal_loss(y_true, y_pred, attention_weights, auxiliary_outputs, all_but_one_auxiliary_outputs,
loss_function):
delta_errors, attention_weights = calculate_delta_errors(y_true,
attention_weights,
auxiliary_outputs,
all_but_one_auxiliary_outputs,
loss_function)
return K.mean(kullback_leibler_divergence(delta_errors, attention_weights))
def repeat_output_loss(y_true, y_pred, outputs, main_loss):
main_loss_fn = keras.losses.get(main_loss)
all_outputs = tf.stack(outputs)
y_true = tf.ones([len(outputs), 1, 1]) * y_true
return K.sum(main_loss_fn(y_true, all_outputs), axis=0)
| [
"keras.losses.kullback_leibler_divergence",
"keras.losses.get",
"keras.backend.sum",
"keras.backend.squeeze",
"tensorflow.stop_gradient",
"tensorflow.squeeze",
"keras.backend.epsilon",
"tensorflow.log",
"keras.backend.abs",
"keras.backend.int_shape",
"tensorflow.stack"
] | [((1378, 1392), 'keras.backend.abs', 'K.abs', (['(x1 - x2)'], {}), '(x1 - x2)\n', (1383, 1392), True, 'import keras.backend as K\n'), ((2067, 2098), 'tensorflow.stack', 'tf.stack', (['delta_errors'], {'axis': '(-1)'}), '(delta_errors, axis=-1)\n', (2075, 2098), True, 'import tensorflow as tf\n'), ((2112, 2137), 'keras.backend.int_shape', 'K.int_shape', (['delta_errors'], {}), '(delta_errors)\n', (2123, 2137), True, 'import keras.backend as K\n'), ((2238, 2281), 'keras.backend.sum', 'K.sum', (['delta_errors'], {'axis': '(-1)', 'keepdims': '(True)'}), '(delta_errors, axis=-1, keepdims=True)\n', (2243, 2281), True, 'import keras.backend as K\n'), ((2813, 2843), 'tensorflow.stop_gradient', 'tf.stop_gradient', (['delta_errors'], {}), '(delta_errors)\n', (2829, 2843), True, 'import tensorflow as tf\n'), ((3596, 3623), 'keras.losses.get', 'keras.losses.get', (['main_loss'], {}), '(main_loss)\n', (3612, 3623), False, 'import keras\n'), ((3642, 3659), 'tensorflow.stack', 'tf.stack', (['outputs'], {}), '(outputs)\n', (3650, 3659), True, 'import tensorflow as tf\n'), ((2184, 2216), 'keras.backend.squeeze', 'K.squeeze', (['delta_errors'], {'axis': '(-2)'}), '(delta_errors, axis=-2)\n', (2193, 2216), True, 'import keras.backend as K\n'), ((2363, 2374), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (2372, 2374), True, 'import keras.backend as K\n'), ((2441, 2452), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (2450, 2452), True, 'import keras.backend as K\n'), ((2530, 2568), 'tensorflow.squeeze', 'tf.squeeze', (['attention_weights'], {'axis': '(-1)'}), '(attention_weights, axis=-1)\n', (2540, 2568), True, 'import tensorflow as tf\n'), ((3453, 3513), 'keras.losses.kullback_leibler_divergence', 'kullback_leibler_divergence', (['delta_errors', 'attention_weights'], {}), '(delta_errors, attention_weights)\n', (3480, 3513), False, 'from keras.losses import kullback_leibler_divergence\n'), ((1994, 2005), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (2003, 2005), True, 'import keras.backend as K\n'), ((1290, 1300), 'tensorflow.log', 'tf.log', (['x2'], {}), '(x2)\n', (1296, 1300), True, 'import tensorflow as tf\n')] |
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=invalid-name, too-few-public-methods, bad-continuation
"""Test cases for the kokkos module"""
from __future__ import unicode_literals
from __future__ import print_function
import logging # pylint: disable=unused-import
import unittest
from helpers import centos, centos8, docker, ubuntu
from hpccm.building_blocks.kokkos import kokkos
class Test_kokkos(unittest.TestCase):
def setUp(self):
"""Disable logging output messages"""
logging.disable(logging.ERROR)
@ubuntu
@docker
def test_defaults_ubuntu(self):
"""Default kokkos building block"""
k = kokkos()
self.assertEqual(str(k),
r'''# Kokkos version 3.2.00
RUN apt-get update -y && \
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
gzip \
libhwloc-dev \
make \
tar \
wget && \
rm -rf /var/lib/apt/lists/*
RUN mkdir -p /var/tmp && wget -q -nc --no-check-certificate -P /var/tmp https://github.com/kokkos/kokkos/archive/3.2.00.tar.gz && \
mkdir -p /var/tmp && tar -x -f /var/tmp/3.2.00.tar.gz -C /var/tmp -z && \
mkdir -p /var/tmp/kokkos-3.2.00/build && cd /var/tmp/kokkos-3.2.00/build && cmake -DCMAKE_INSTALL_PREFIX=/usr/local/kokkos -DCMAKE_BUILD_TYPE=RELEASE -DKokkos_ARCH_VOLTA70=ON -DKokkos_ENABLE_CUDA=ON -DCMAKE_CXX_COMPILER=$(pwd)/../bin/nvcc_wrapper -DKokkos_ENABLE_HWLOC=ON /var/tmp/kokkos-3.2.00 && \
cmake --build /var/tmp/kokkos-3.2.00/build --target all -- -j$(nproc) && \
cmake --build /var/tmp/kokkos-3.2.00/build --target install -- -j$(nproc) && \
rm -rf /var/tmp/kokkos-3.2.00 /var/tmp/3.2.00.tar.gz
ENV PATH=/usr/local/kokkos/bin:$PATH''')
@centos
@docker
def test_defaults_centos(self):
"""Default kokkos building block"""
k = kokkos()
self.assertEqual(str(k),
r'''# Kokkos version 3.2.00
RUN yum install -y \
gzip \
hwloc-devel \
make \
tar \
wget && \
rm -rf /var/cache/yum/*
RUN mkdir -p /var/tmp && wget -q -nc --no-check-certificate -P /var/tmp https://github.com/kokkos/kokkos/archive/3.2.00.tar.gz && \
mkdir -p /var/tmp && tar -x -f /var/tmp/3.2.00.tar.gz -C /var/tmp -z && \
mkdir -p /var/tmp/kokkos-3.2.00/build && cd /var/tmp/kokkos-3.2.00/build && cmake -DCMAKE_INSTALL_PREFIX=/usr/local/kokkos -DCMAKE_BUILD_TYPE=RELEASE -DKokkos_ARCH_VOLTA70=ON -DKokkos_ENABLE_CUDA=ON -DCMAKE_CXX_COMPILER=$(pwd)/../bin/nvcc_wrapper -DKokkos_ENABLE_HWLOC=ON /var/tmp/kokkos-3.2.00 && \
cmake --build /var/tmp/kokkos-3.2.00/build --target all -- -j$(nproc) && \
cmake --build /var/tmp/kokkos-3.2.00/build --target install -- -j$(nproc) && \
rm -rf /var/tmp/kokkos-3.2.00 /var/tmp/3.2.00.tar.gz
ENV PATH=/usr/local/kokkos/bin:$PATH''')
@centos8
@docker
def test_defaults_centos8(self):
"""Default kokkos building block"""
k = kokkos()
self.assertEqual(str(k),
r'''# Kokkos version 3.2.00
RUN yum install -y dnf-utils && \
yum-config-manager --set-enabled powertools && \
yum install -y \
gzip \
hwloc-devel \
make \
tar \
wget && \
rm -rf /var/cache/yum/*
RUN mkdir -p /var/tmp && wget -q -nc --no-check-certificate -P /var/tmp https://github.com/kokkos/kokkos/archive/3.2.00.tar.gz && \
mkdir -p /var/tmp && tar -x -f /var/tmp/3.2.00.tar.gz -C /var/tmp -z && \
mkdir -p /var/tmp/kokkos-3.2.00/build && cd /var/tmp/kokkos-3.2.00/build && cmake -DCMAKE_INSTALL_PREFIX=/usr/local/kokkos -DCMAKE_BUILD_TYPE=RELEASE -DKokkos_ARCH_VOLTA70=ON -DKokkos_ENABLE_CUDA=ON -DCMAKE_CXX_COMPILER=$(pwd)/../bin/nvcc_wrapper -DKokkos_ENABLE_HWLOC=ON /var/tmp/kokkos-3.2.00 && \
cmake --build /var/tmp/kokkos-3.2.00/build --target all -- -j$(nproc) && \
cmake --build /var/tmp/kokkos-3.2.00/build --target install -- -j$(nproc) && \
rm -rf /var/tmp/kokkos-3.2.00 /var/tmp/3.2.00.tar.gz
ENV PATH=/usr/local/kokkos/bin:$PATH''')
@ubuntu
@docker
def test_check_and_repository(self):
"""Check and repository options"""
k = kokkos(check=True, repository=True, version='3.1.01')
self.assertEqual(str(k),
r'''# Kokkos version 3.1.01
RUN apt-get update -y && \
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
ca-certificates \
git \
libhwloc-dev \
make && \
rm -rf /var/lib/apt/lists/*
RUN mkdir -p /var/tmp && cd /var/tmp && git clone --depth=1 https://github.com/kokkos/kokkos.git kokkos && cd - && \
mkdir -p /var/tmp/kokkos/build && cd /var/tmp/kokkos/build && cmake -DCMAKE_INSTALL_PREFIX=/usr/local/kokkos -DCMAKE_BUILD_TYPE=RELEASE -DKokkos_ARCH_VOLTA70=ON -DKokkos_ENABLE_TESTS=ON -DKokkos_ENABLE_CUDA=ON -DCMAKE_CXX_COMPILER=$(pwd)/../bin/nvcc_wrapper -DKokkos_ENABLE_HWLOC=ON /var/tmp/kokkos && \
cmake --build /var/tmp/kokkos/build --target all -- -j$(nproc) && \
cmake --build /var/tmp/kokkos/build --target install -- -j$(nproc) && \
rm -rf /var/tmp/kokkos
ENV PATH=/usr/local/kokkos/bin:$PATH''')
@ubuntu
@docker
def test_runtime(self):
"""Runtime"""
k = kokkos()
r = k.runtime()
self.assertEqual(r,
r'''# Kokkos
COPY --from=0 /usr/local/kokkos /usr/local/kokkos
ENV PATH=/usr/local/kokkos/bin:$PATH''')
| [
"hpccm.building_blocks.kokkos.kokkos",
"logging.disable"
] | [((1083, 1113), 'logging.disable', 'logging.disable', (['logging.ERROR'], {}), '(logging.ERROR)\n', (1098, 1113), False, 'import logging\n'), ((1231, 1239), 'hpccm.building_blocks.kokkos.kokkos', 'kokkos', ([], {}), '()\n', (1237, 1239), False, 'from hpccm.building_blocks.kokkos import kokkos\n'), ((2416, 2424), 'hpccm.building_blocks.kokkos.kokkos', 'kokkos', ([], {}), '()\n', (2422, 2424), False, 'from hpccm.building_blocks.kokkos import kokkos\n'), ((3512, 3520), 'hpccm.building_blocks.kokkos.kokkos', 'kokkos', ([], {}), '()\n', (3518, 3520), False, 'from hpccm.building_blocks.kokkos import kokkos\n'), ((4697, 4750), 'hpccm.building_blocks.kokkos.kokkos', 'kokkos', ([], {'check': '(True)', 'repository': '(True)', 'version': '"""3.1.01"""'}), "(check=True, repository=True, version='3.1.01')\n", (4703, 4750), False, 'from hpccm.building_blocks.kokkos import kokkos\n'), ((5760, 5768), 'hpccm.building_blocks.kokkos.kokkos', 'kokkos', ([], {}), '()\n', (5766, 5768), False, 'from hpccm.building_blocks.kokkos import kokkos\n')] |
#!/usr/bin/python
#
# Random utility functions
#
# @author <NAME>
# @email <EMAIL>
#
from __future__ import unicode_literals, division, absolute_import, print_function
import os
import sys
from asn1crypto import core as asn
from base64 import (
b64decode,
b64encode
)
from textwrap import wrap
def to_pem(bitstr, header=str(), footer=str()):
return '\n'.join([header] + wrap(b64encode(bitstr).decode('utf-8'), width=64) + [footer])
def from_pem(pem_bytes):
b64_encoded = str()
record = False
for line in pem_bytes.decode('utf-8').split('\n'):
if record and 'END' not in line:
b64_encoded += line.strip()
elif 'BEGIN' in line:
record = True
elif 'END' in line:
record = False
assert not record and len(b64_encoded) > 0, 'Invalid certificate file'
return b64decode(b64_encoded)
def prettify(name, asn1obj, space=4, depth=0, file=sys.stdout):
padding = ' '*space*depth
# Parse the object if it hasn't been
if isinstance(asn1obj, (asn.ParsableOctetString, asn.ParsableOctetBitString)):
asn1obj = asn1obj.parsed
# Set the name
if len(name) > 0:
name = str(name).rstrip('=') + '='
else:
name = type(asn1obj).__name__ + '='
# Print based on object type/structure
if isinstance(asn1obj, asn.Choice):
prettify(name, asn1obj.chosen, space=space, depth=depth, file=file)
elif isinstance(asn1obj, (asn.Sequence, asn.Set)):
print(padding + name + '{', file=file)
for k in asn1obj:
prettify(k, asn1obj[k], space=space, depth=(depth + 1), file=file)
print(padding + '}', file=file)
elif isinstance(asn1obj, (asn.SequenceOf, asn.SetOf)):
print(padding + name + '[', file=file)
for item in asn1obj:
prettify('', item, space=space, depth=(depth + 1), file=file)
print(padding + ']', file=file)
elif isinstance(asn1obj, asn.ObjectIdentifier):
if asn1obj.dotted in asn1obj._map:
print(padding + name + asn1obj._map[asn1obj.dotted], file=file)
return padding + name + asn1obj.dotted
elif isinstance(asn1obj, (asn.OctetBitString, asn.OctetString)):
print(padding + name + asn1obj.native.hex(), file=file)
elif isinstance(asn1obj, (asn.Null, asn.Void)):
print(padding + name + type(asn1obj).__name__, file=file)
else:
print(padding + name + str(asn1obj.native), file=file)
| [
"base64.b64encode",
"base64.b64decode"
] | [((850, 872), 'base64.b64decode', 'b64decode', (['b64_encoded'], {}), '(b64_encoded)\n', (859, 872), False, 'from base64 import b64decode, b64encode\n'), ((390, 407), 'base64.b64encode', 'b64encode', (['bitstr'], {}), '(bitstr)\n', (399, 407), False, 'from base64 import b64decode, b64encode\n')] |
from collections import namedtuple
Point = namedtuple("Point", ["x", "y"])
class Rect(namedtuple("Rect", ["left", "top", "right", "bottom"])):
"""
Describes a rectangle given a left, top, right, and bottom value.
"""
@property
def width(self) -> int:
return self.right - self.left
@property
def height(self) -> int:
return self.bottom - self.top
@property
def topleft(self) -> Point:
return Point(self.left, self.top)
@property
def bottomright(self) -> Point:
return Point(self.right, self.bottom)
@property
def size(self) -> Point:
return Point(self.width, self.height)
def __str__(self):
return "(%d, %d) (%d, %d)" % (self.left, self.top, self.right, self.bottom)
| [
"collections.namedtuple"
] | [((44, 75), 'collections.namedtuple', 'namedtuple', (['"""Point"""', "['x', 'y']"], {}), "('Point', ['x', 'y'])\n", (54, 75), False, 'from collections import namedtuple\n'), ((89, 143), 'collections.namedtuple', 'namedtuple', (['"""Rect"""', "['left', 'top', 'right', 'bottom']"], {}), "('Rect', ['left', 'top', 'right', 'bottom'])\n", (99, 143), False, 'from collections import namedtuple\n')] |
from pkg_resources import (
DistributionNotFound as _DistributionNotFound,
get_distribution as _get_distribution,
)
__project__ = "sappy"
try:
__version__ = _get_distribution(__project__).version
except _DistributionNotFound:
__version__ = "(local)"
VERSION = "{0} v{1}".format(__project__, __version__)
| [
"pkg_resources.get_distribution"
] | [((173, 203), 'pkg_resources.get_distribution', '_get_distribution', (['__project__'], {}), '(__project__)\n', (190, 203), True, 'from pkg_resources import DistributionNotFound as _DistributionNotFound, get_distribution as _get_distribution\n')] |
# Enter your code here. Read input from STDIN. Print output to STDOUT
import re
for i in range(int(input())):
N = input().strip()
if N.isalnum() and len(N) == 10:
if bool(re.search(r'(.*[A-Z]){2,}',N)) and bool(re.search(r'(.*[0-9]){3,}',N)):
if re.search(r'.*(.).*\1+.*',N):
print ('Invalid')
else:
print ('Valid')
else:
print ('Invalid')
else:
print ('Invalid')
| [
"re.search"
] | [((275, 304), 're.search', 're.search', (['""".*(.).*\\\\1+.*"""', 'N'], {}), "('.*(.).*\\\\1+.*', N)\n", (284, 304), False, 'import re\n'), ((188, 217), 're.search', 're.search', (['"""(.*[A-Z]){2,}"""', 'N'], {}), "('(.*[A-Z]){2,}', N)\n", (197, 217), False, 'import re\n'), ((228, 257), 're.search', 're.search', (['"""(.*[0-9]){3,}"""', 'N'], {}), "('(.*[0-9]){3,}', N)\n", (237, 257), False, 'import re\n')] |
from unittest import TestCase
from usbcore import *
class TestRxClockDataRecovery(TestCase):
def test_basic_recovery(self):
"""
This test covers basic clock and data recovery.
"""
def get_output():
"""
Record data output when line_state_valid is asserted.
"""
valid = yield dut.line_state_valid
if valid == 1:
dj = yield dut.line_state_dj
dk = yield dut.line_state_dk
se0 = yield dut.line_state_se0
se1 = yield dut.line_state_se1
out = "%d%d%d%d" % (dj, dk, se0, se1)
return {
"1000" : "j",
"0100" : "k",
"0010" : "0",
"0001" : "1",
}[out]
else:
return ""
def stim(glitch=-1):
out_seq = ""
clock = 0
for bit in seq + "0":
for i in range(4):
if clock != glitch:
yield usbp_raw.eq({'j':1,'k':0,'0':0,'1':1}[bit])
yield usbn_raw.eq({'j':0,'k':1,'0':0,'1':1}[bit])
yield
clock += 1
out_seq += yield from get_output()
self.assertEqual(out_seq, "0" + seq)
test_sequences = [
"j",
"k",
"0",
"1",
"jk01",
"jjjkj0j1kjkkk0k10j0k00011j1k1011"
]
for seq in test_sequences:
with self.subTest(seq=seq):
usbp_raw = Signal()
usbn_raw = Signal()
dut = RxClockDataRecovery(usbp_raw, usbn_raw)
run_simulation(dut, stim(), vcd_name="vcd/test_basic_recovery_%s.vcd" % seq)
long_test_sequences = [
"jjjkj0j1kjkkk0k10j0k00011j1k1011",
"kkkkk0k0kjjjk0kkkkjjjkjkjkjjj0kj"
]
for seq in long_test_sequences:
for glitch in range(0, 32, 8):
with self.subTest(seq=seq, glitch=glitch):
usbp_raw = Signal()
usbn_raw = Signal()
dut = RxClockDataRecovery(usbp_raw, usbn_raw)
run_simulation(dut, stim(glitch), vcd_name="vcd/test_basic_recovery_%s_%d.vcd" % (seq, glitch))
class TestRxNRZIDecoder(TestCase):
def test_nrzi(self):
def send(valid, value):
valid += "_"
value += "_"
output = ""
for i in range(len(valid)):
yield i_valid.eq(valid[i] == '-')
yield i_dj.eq(value[i] == 'j')
yield i_dk.eq(value[i] == 'k')
yield i_se0.eq(value[i] == '_')
yield
o_valid = yield dut.o_valid
if o_valid:
data = yield dut.o_data
se0 = yield dut.o_se0
out = "%d%d" % (data, se0)
output += {
"10" : "1",
"00" : "0",
"01" : "_",
"11" : "_"
}[out]
return output
test_vectors = [
dict(
# USB2 Spec, 7.1.8
valid = "-----------------",
value = "jkkkjjkkjkjjkjjjk",
output = "10110101000100110"
),
dict(
# USB2 Spec, 7.1.9.1
valid = "--------------------",
value = "jkjkjkjkkkkkkkjjjjkk",
output = "10000000111111011101"
),
dict(
# USB2 Spec, 7.1.9.1 (added pipeline stalls)
valid = "------___--------------",
value = "jkjkjkkkkjkkkkkkkjjjjkk",
output = "10000000111111011101"
),
dict(
# USB2 Spec, 7.1.9.1 (added pipeline stalls 2)
valid = "-------___-------------",
value = "jkjkjkjjjjkkkkkkkjjjjkk",
output = "10000000111111011101"
),
dict(
# USB2 Spec, 7.1.9.1 (added pipeline stalls 3)
valid = "-------___-------------",
value = "jkjkjkjkkkkkkkkkkjjjjkk",
output = "10000000111111011101"
),
dict(
# USB2 Spec, 7.1.9.1 (added pipeline stalls, se0 glitch)
valid = "-------___-------------",
value = "jkjkjkj__kkkkkkkkjjjjkk",
output = "10000000111111011101"
),
dict(
# Captured setup packet
valid = "------------------------------------",
value = "jkjkjkjkkkjjjkkjkjkjkjkjkjkjkkjkj__j",
output = "100000001101101000000000000001000__1"
),
dict(
# Captured setup packet (pipeline stalls)
valid = "-___----___--------___-___-___-___----------------___-___---",
value = "jjjjkjkjjkkkjkkkjjjjjkkkkkkkkkjjjjkjkjkjkjkjkjkkjkkkkj_____j",
output = "100000001101101000000000000001000__1"
)
]
def stim(valid, value, output):
actual_output = yield from send(valid, value)
self.assertEqual(actual_output, output)
i = 0
for vector in test_vectors:
with self.subTest(i=i, vector=vector):
i_valid = Signal()
i_dj = Signal()
i_dk = Signal()
i_se0 = Signal()
dut = RxNRZIDecoder(i_valid, i_dj, i_dk, i_se0)
run_simulation(dut, stim(**vector), vcd_name="vcd/test_nrzi_%d.vcd" % i)
i += 1
class TestRxBitstuffRemover(TestCase):
def test_bitstuff(self):
def send(valid, value):
valid += "_"
value += "_"
output = ""
for i in range(len(valid)):
yield i_valid.eq(valid[i] == '-')
yield i_data.eq(value[i] == '1')
yield i_se0.eq(value[i] == '_')
yield
o_valid = yield dut.o_valid
bitstuff_error = yield dut.o_bitstuff_error
if o_valid or bitstuff_error:
data = yield dut.o_data
se0 = yield dut.o_se0
out = "%d%d%d" % (data, se0, bitstuff_error)
output += {
"100" : "1",
"101" : "e",
"000" : "0",
"010" : "_",
"110" : "_"
}[out]
return output
test_vectors = [
dict(
# Basic bitstuff scenario
valid = "-------",
value = "1111110",
output = "111111"
),
dict(
# Basic bitstuff scenario (valid gap)
valid = "---___----",
value = "111___1110",
output = "111111"
),
dict(
# Basic bitstuff scenario (valid gap)
valid = "---___----",
value = "1111111110",
output = "111111"
),
dict(
# Basic bitstuff scenario (valid gap)
valid = "---___----",
value = "1110001110",
output = "111111"
),
dict(
# Basic bitstuff scenario (valid gap)
valid = "---___-____---",
value = "11100010000110",
output = "111111"
),
dict(
# Basic bitstuff error
valid = "-------",
value = "1111111",
output = "111111e"
),
dict(
# Multiple bitstuff scenario
valid = "---------------------",
value = "111111011111101111110",
output = "111111111111111111"
),
dict(
# Mixed bitstuff error
valid = "---------------------------------",
value = "111111111111101111110111111111111",
output = "111111e111111111111111111e11111"
),
dict(
# Idle, Packet, Idle
valid = "-------------------------------",
value = "111110000000111111011101__11111",
output = "11111000000011111111101__11111"
),
dict(
# Idle, Packet, Idle, Packet, Idle
valid = "--------------------------------------------------------------",
value = "111110000000111111011101__11111111110000000111111011101__11111",
output = "11111000000011111111101__111111e111000000011111111101__11111"
),
dict(
# Captured setup packet (no bitstuff)
valid = "------------------------------------",
value = "100000001101101000000000000001000__1",
output = "100000001101101000000000000001000__1"
)
]
def stim(valid, value, output):
actual_output = yield from send(valid, value)
self.assertEqual(actual_output, output)
i = 0
for vector in test_vectors:
with self.subTest(i=i, vector=vector):
i_valid = Signal()
i_data = Signal()
i_se0 = Signal()
dut = RxBitstuffRemover(i_valid, i_data, i_se0)
run_simulation(dut, stim(**vector), vcd_name="vcd/test_bitstuff_%d.vcd" % i)
i += 1
class TestRxPacketDetect(TestCase):
def test_packet_detect(self):
test_vectors = [
dict(
# SE0, Idle
valid = "------------------------------",
value = "______________1111111111111111",
output_1 = " ",
output_2 = "_______________________________"
),
dict(
# Idle, Packet, Idle
valid = "------------------------------",
value = "11111000000011111111101__11111",
output_1 = " S E ",
output_2 = "_____________-----------_______"
),
dict(
# Idle, Packet, Idle (pipeline stall)
valid = "-------------___-----------------",
value = "11111000000011111111111101__11111",
output_1 = " S E ",
output_2 = "_____________--------------_______"
),
dict(
# Idle, Packet, Idle (pipeline stalls)
valid = "-----___---___-----___-----------------",
value = "11111111000___000011111111111101__11111",
output_1 = " S E ",
output_2 = "___________________--------------_______"
),
dict(
# Idle, Packet, Idle, Packet, Idle
valid = "------------------------------------------------------------",
value = "11111000000011111111101__1111111111000000011111111101__11111",
output_1 = " S E S E ",
output_2 = "_____________-----------___________________-----------_______"
),
dict(
# Idle, Short Sync Packet, Idle
valid = "----------------------------",
value = "111110000011111111101__11111",
output_1 = " S E ",
output_2 = "___________-----------_______"
),
dict(
# Idle Glitch
valid = "------------------------------",
value = "11111111110011111111_1111__111",
output_1 = " ",
output_2 = "_______________________________"
),
]
def send(valid, value):
valid += "_"
value += "_"
output_1 = ""
output_2 = ""
for i in range(len(valid)):
yield i_valid.eq(valid[i] == '-')
yield i_data.eq(value[i] == '1')
yield i_se0.eq(value[i] == '_')
yield
pkt_start = yield dut.o_pkt_start
pkt_end = yield dut.o_pkt_end
out = "%d%d" % (pkt_start, pkt_end)
output_1 += {
"10" : "S",
"01" : "E",
"00" : " ",
}[out]
pkt_active = yield dut.o_pkt_active
out = "%d" % (pkt_active)
output_2 += {
"1" : "-",
"0" : "_",
}[out]
return output_1, output_2
def stim(valid, value, output_1, output_2):
actual_output_1, actual_output_2 = yield from send(valid, value)
self.assertEqual(actual_output_1, output_1)
self.assertEqual(actual_output_2, output_2)
i = 0
for vector in test_vectors:
with self.subTest(i=i, vector=vector):
i_valid = Signal()
i_data = Signal()
i_se0 = Signal()
dut = RxPacketDetect(i_valid, i_data, i_se0)
run_simulation(dut, stim(**vector), vcd_name="vcd/test_packet_det_%d.vcd" % i)
i += 1
class TestRxShifter(TestCase):
def test_shifter(self):
test_vectors = [
dict(
# basic shift in
width = 8,
reset = "-______________",
valid = "_--------------",
value = "001110100101010",
full = "_________------",
output = [0x2E]
),
dict(
# basic shift in (short pipeline stall)
width = 8,
reset = "-_______________",
valid = "_----_----------",
value = "0011100100101010",
full = "__________------",
output = [0x2E]
),
dict(
# basic shift in (long pipeline stall)
width = 8,
reset = "-_________________",
valid = "_----___----------",
value = "001110000100101010",
full = "____________------",
output = [0x2E]
),
dict(
# basic shift in (multiple long pipeline stall)
width = 8,
reset = "-__________________________",
valid = "_-___---___-___--___-------",
value = "000001110000111101110101010",
full = "_____________________------",
output = [0x2E]
),
dict(
# multiple resets
width = 8,
reset = "-______________-______________",
valid = "_--------------_--------------",
value = "010111000001101001110100101010",
full = "_________-------________------",
output = [0b00011101, 0x2E]
),
dict(
# multiple resets (tight timing)
width = 8,
reset = "-________-______________",
valid = "_-----------------------",
value = "000101001111000010011101",
full = "_________-________------",
output = [0b10010100, 0b01000011]
),
]
def send(reset, valid, value):
full = ""
output = []
for i in range(len(valid)):
yield i_reset.eq(reset[i] == '-')
yield i_valid.eq(valid[i] == '-')
yield i_data.eq(value[i] == '1')
yield
o_full = yield dut.o_full
put = yield dut.o_put
if put:
last_output = yield dut.o_output
output.append(last_output)
out = "%d" % (o_full)
full += {
"1" : "-",
"0" : "_",
}[out]
return full, output
def stim(width, reset, valid, value, full, output):
actual_full, actual_output = yield from send(reset, valid, value)
self.assertEqual(actual_full, full)
self.assertEqual(actual_output, output)
i = 0
for vector in test_vectors:
with self.subTest(i=i, vector=vector):
i_valid = Signal()
i_data = Signal()
i_reset = Signal()
dut = RxShifter(vector["width"], i_valid, i_data, i_reset)
run_simulation(dut, stim(**vector), vcd_name="vcd/test_shifter_%d.vcd" % i)
i += 1
class TestRxCrcChecker(TestCase):
def test_shifter(self):
def send(reset, valid, value):
crc_good = ""
for i in range(len(valid)):
yield i_reset.eq(reset[i] == '-')
yield i_valid.eq(valid[i] == '-')
yield i_data.eq(value[i] == '1')
yield
o_crc_good = yield dut.o_crc_good
out = "%d" % (o_crc_good)
crc_good += {
"1" : "-",
"0" : "_",
}[out]
return crc_good
test_vectors = [
dict(
# USB2 token with good CRC5 (1)
width = 5,
polynomial = 0b00101,
initial = 0b11111,
residual = 0b01100,
reset = "-___________________",
valid = "_----------------___",
value = "00000000000001000000",
crc_good = "_______-__________--"
),
dict(
# USB2 token with good CRC5 and pipeline stalls (1)
width = 5,
polynomial = 0b00101,
initial = 0b11111,
residual = 0b01100,
reset = "-_______________________________",
valid = "_-___-___------------___-___-___",
value = "00000011100000000001011100000000",
crc_good = "_____________-________________--"
),
dict(
# USB2 token with bad CRC5 (1)
width = 5,
polynomial = 0b00101,
initial = 0b11111,
residual = 0b01100,
reset = "-___________________",
valid = "_----------------___",
value = "00010000000001000000",
crc_good = "______-________-____"
),
dict(
# USB2 token with good CRC5 (2)
width = 5,
polynomial = 0b00101,
initial = 0b11111,
residual = 0b01100,
reset = "-___________________",
valid = "_----------------___",
value = "00000011011011101000",
crc_good = "_______-__________--"
),
dict(
# USB2 token with bad CRC5 (2)
width = 5,
polynomial = 0b00101,
initial = 0b11111,
residual = 0b01100,
reset = "-___________________",
valid = "_----------------___",
value = "00010011011011101000",
crc_good = "______-_____________"
),
dict(
# Two USB2 token with good CRC5 (1,2)
width = 5,
polynomial = 0b00101,
initial = 0b11111,
residual = 0b01100,
reset = "-________________________-___________________",
valid = "_----------------_________----------------___",
value = "000000000000010000000000000000011011011101000",
crc_good = "_______-__________---------_____-__________--"
),
dict(
# USB2 data with good CRC16 (1)
width = 16,
polynomial = 0b1000000000000101,
initial = 0b1111111111111111,
residual = 0b1000000000001101,
reset = "-______________________________________________________________________________________________",
valid = "_--------_--------_--------_--------_--------_--------_--------_--------_----------------______",
value = "00000000100110000000000000001000000000000000000000000000000001000000000001011101100101001000010",
crc_good = "__________________________________________________________________________________________-----"
),
dict(
# USB2 data with bad CRC16 (1)
width = 16,
polynomial = 0b1000000000000101,
initial = 0b1111111111111111,
residual = 0b1000000000001101,
reset = "-______________________________________________________________________________________________",
valid = "_--------_--------_--------_--------_--------_--------_--------_--------_----------------______",
value = "00000000100110000000000000001000000000010000000000000000000001000000000001011101100101001000010",
crc_good = "_______________________________________________________________________________________________"
),
]
def stim(width, polynomial, initial, residual, reset, valid, value, crc_good):
actual_crc_good = yield from send(reset, valid, value)
self.assertEqual(actual_crc_good, crc_good)
i = 0
for vector in test_vectors:
with self.subTest(i=i, vector=vector):
i_valid = Signal()
i_data = Signal()
i_reset = Signal()
dut = RxCrcChecker(
vector["width"],
vector["polynomial"],
vector["initial"],
vector["residual"],
i_valid,
i_data,
i_reset)
run_simulation(dut, stim(**vector), vcd_name="vcd/test_crc_%d.vcd" % i)
i += 1
class TestRxPacketDecode(TestCase):
def test_pkt_decode(self):
def send(valid, value):
pid = []
token_payload = []
data_payload = []
data = []
pkt_good = []
for i in range(len(valid)):
yield i_valid.eq(valid[i] == '-')
yield i_data.eq(value[i] == '1' or value[i] == 'B')
yield i_se0.eq(value[i] == '_')
yield i_bitstuff_error.eq(value[i] == 'B')
yield
o_pkt_start = yield dut.o_pkt_start
o_pkt_pid = yield dut.o_pkt_pid
o_pkt_token_payload = yield dut.o_pkt_token_payload
o_pkt_data = yield dut.o_pkt_data
o_pkt_data_put = yield dut.o_pkt_data_put
o_pkt_good = yield dut.o_pkt_good
o_pkt_end = yield dut.o_pkt_end
if o_pkt_data_put:
data += [o_pkt_data]
if o_pkt_end:
pid += [o_pkt_pid]
token_payload += [o_pkt_token_payload]
data_payload.append(data)
data = []
pkt_good += [o_pkt_good]
return pid, token_payload, data_payload, pkt_good
test_vectors = [
dict(
# USB2 SOF token
valid = "---------------------------------------",
value = "1100000001101001011000011011000010__111",
pid = [0b0101],
token_payload = [865],
data_payload = [[]],
pkt_good = [1]
),
dict(
# USB2 SOF token - pipeline stalls
valid = "----------_--------_-----------_----------",
value = "1100000001_10100101_10000110110_00010__111",
pid = [0b0101],
token_payload = [865],
data_payload = [[]],
pkt_good = [1]
),
dict(
# USB2 SOF token - eop dribble 1
valid = "----------_--------_-----------_-----_-----",
value = "1100000001_10100101_10000110110_00010_1__111",
pid = [0b0101],
token_payload = [865],
data_payload = [[]],
pkt_good = [1]
),
dict(
# USB2 SOF token - eop dribble 6
valid = "----------_--------_-----------_-----_-----------",
value = "1100000001_10100101_10000110110_00010_111111__111",
pid = [0b0101],
token_payload = [865],
data_payload = [[]],
pkt_good = [1]
),
dict(
# USB2 SOF token - bad pid
valid = "----------_--------_-----------_----------",
value = "1100000001_10100100_10000110110_00010__111",
pid = [0b0101],
token_payload = [865],
data_payload = [[]],
pkt_good = [0]
),
dict(
# USB2 SOF token - bad crc5
valid = "----------_--------_-----------_----------",
value = "1100000001_10100101_10000110110_00011__111",
pid = [0b0101],
token_payload = [865],
data_payload = [[]],
pkt_good = [0]
),
dict(
# USB2 SOF token - bitstuff error
valid = "----------_-----_____-_____--_-----------_----------",
value = "1100000001_10100_____B_____01_10000110110_00010__111",
pid = [0b0101],
token_payload = [865],
data_payload = [[]],
pkt_good = [0]
),
dict(
# USB2 ACK handshake
valid = "----------_-------------",
value = "1100000001_01001011__111",
pid = [0b0010],
token_payload = [0],
data_payload = [[]],
pkt_good = [1]
),
dict(
# USB2 ACK handshake - late bitstuff error
valid = "----------_-------------",
value = "1100000001_0100101B__111",
pid = [0b0010],
token_payload = [0],
data_payload = [[]],
pkt_good = [0]
),
dict(
# USB2 ACK handshake - pid error
valid = "----------_-------------",
value = "1100000001_01001111__111",
pid = [0b0010],
token_payload = [0],
data_payload = [[]],
pkt_good = [0]
),
dict(
# USB2 ACK handshake - EOP dribble 1
valid = "----------_--------_-----",
value = "1100000001_01001011_1__111",
pid = [0b0010],
token_payload = [0],
data_payload = [[]],
pkt_good = [1]
),
dict(
# USB2 ACK handshake - EOP dribble 6
valid = "----------_--------_-----------",
value = "1100000001_01001011_111111__111",
pid = [0b0010],
token_payload = [1792], # token payload doesn't matter in this test, but dribble triggers it
data_payload = [[]],
pkt_good = [1]
),
dict(
# USB2 data with good CRC16 (1)
valid = "----------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_------",
value = "1100000001_11000011_00000001_01100000_00000000_10000000_00000000_00000000_00000010_00000000_10111011_00101001___1111",
pid = [0b0011],
token_payload = [1664], # token payload is a "don't care" for this test
data_payload = [[0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x00, 0xdd, 0x94]],
pkt_good = [1]
),
dict(
# USB2 data with good CRC16 - 1 eop dribble
valid = "----------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_-_-----",
value = "1100000001_11000011_00000001_01100000_00000000_10000000_00000000_00000000_00000010_00000000_10111011_00101001_1___1111",
pid = [0b0011],
token_payload = [1664], # token payload is a "don't care" for this test
data_payload = [[0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x00, 0xdd, 0x94]],
pkt_good = [1]
),
dict(
# USB2 data with good CRC16 - 6 eop dribble
valid = "----------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_------_------",
value = "1100000001_11000011_00000001_01100000_00000000_10000000_00000000_00000000_00000010_00000000_10111011_00101001_111111___1111",
pid = [0b0011],
token_payload = [1664], # token payload is a "don't care" for this test
data_payload = [[0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x00, 0xdd, 0x94]],
pkt_good = [1]
),
# TODO: need a better way to handle eop dribble with bitstuff error :(
#dict(
# # USB2 data with good CRC16 - 1 eop dribble with bitstuff error
# valid = "----------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_-_-----",
# value = "1100000001_11000011_00000001_01100000_00000000_10000000_00000000_00000000_00000010_00000000_10111011_00101001_B___1111",
# pid = [0b0011],
# token_payload = [1664], # token payload is a "don't care" for this test
# data_payload = [[0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x00, 0xdd, 0x94]],
# pkt_good = [1]
#),
#dict(
# # USB2 data with good CRC16 - 6 eop dribble with bitstuff error
# valid = "----------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_------_------",
# value = "1100000001_11000011_00000001_01100000_00000000_10000000_00000000_00000000_00000010_00000000_10111011_00101001_11111B___1111",
# pid = [0b0011],
# token_payload = [1664], # token payload is a "don't care" for this test
# data_payload = [[0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x00, 0xdd, 0x94]],
# pkt_good = [1]
#),
dict(
# USB2 data with bad CRC16 (1)
valid = "----------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_------",
value = "1100000001_11000011_00000001_01100000_00000000_10000000_00000000_00000000_00000010_00000000_10111011_00101011___1111",
pid = [0b0011],
token_payload = [1664], # token payload is a "don't care" for this test
data_payload = [[0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x00, 0xdd, 0xD4]],
pkt_good = [0]
),
dict(
# USB2 data with late bitstuff error
valid = "----------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_------",
value = "1100000001_11000011_00000001_01100000_00000000_10000000_00000000_00000000_00000010_00000000_10111011_0010100B___1111",
pid = [0b0011],
token_payload = [1664], # token payload is a "don't care" for this test
data_payload = [[0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x00, 0xdd, 0x94]],
pkt_good = [0]
),
dict(
# USB2 data with bad pid
valid = "----------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_------",
value = "1100000001_11000001_00000001_01100000_00000000_10000000_00000000_00000000_00000010_00000000_10111011_00101001___1111",
pid = [0b0011],
token_payload = [1664], # token payload is a "don't care" for this test
data_payload = [[0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x00, 0xdd, 0x94]],
pkt_good = [0]
),
dict(
# USB2 SETUP and DATA
valid = "----------_--------_-----------_----------___---------------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_--------_------",
value = "1100000001_10110100_00000000000_01000__111___111111100000001_11000011_00000001_01100000_00000000_10000000_00000000_00000000_00000010_00000000_10111011_00101001___1111",
pid = [0b1101, 0b0011],
token_payload = [0, 1664],
data_payload = [[], [0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x00, 0xdd, 0x94]],
pkt_good = [1, 1]
),
]
def stim(valid, value, pid, token_payload, data_payload, pkt_good):
actual_pid, actual_token_payload, actual_data_payload, actual_pkt_good = yield from send(valid, value)
self.assertEqual(actual_pid, pid)
self.assertEqual(actual_token_payload, token_payload)
self.assertEqual(actual_data_payload, data_payload)
self.assertEqual(actual_pkt_good, pkt_good)
i = 0
for vector in test_vectors:
with self.subTest(i=i, vector=vector):
i_valid = Signal()
i_data = Signal()
i_se0 = Signal()
i_bitstuff_error = Signal()
dut = RxPacketDecode(
i_valid,
i_data,
i_se0,
i_bitstuff_error)
run_simulation(dut, stim(**vector), vcd_name="vcd/test_decode_%d.vcd" % i)
i += 1
class TestTxShifter(TestCase):
def test_shifter(self):
test_vectors = [
dict(
# basic shift out
width = 8,
data = [0b10011000],
put = "-______________",
shift = "_--------------",
output = " 00011001 "
),
dict(
# basic shift out - pipeline stall
width = 8,
data = [0b10011000],
put = "-_______________",
shift = "_-_-------------",
output = " 000011001 "
),
dict(
# basic shift out - pipeline stall
width = 8,
data = [0b10011000],
put = "-________________",
shift = "__-_-------------",
output = " 0000011001 "
),
dict(
# basic shift out - pipeline stall
width = 8,
data = [0b10011000],
put = "-________________________",
shift = "____-_------___-___------",
output = " 000000011001111 "
),
dict(
# basic shift out - pipeline stalls
width = 8,
data = [0b10011000],
put = "-______________________________________",
shift = "_-___-___-___-___-___-___-___-___------",
output = " 00000000011111111000000001111 "
),
dict(
# basic shift out multiple
width = 8,
data = [0b10011000, 0b11001011],
put = "-________-___________",
shift = "_--------_--------___",
output = " 00011001 11010011 "
),
dict(
# basic shift out multiple
width = 8,
data = [0b10011000, 0b11001011],
put = "-_________-___________",
shift = "_--------__--------___",
output = " 00011001 11010011 "
),
]
def send(shift, put, data):
output = ""
for i in range(len(shift)):
do_put = put[i] == '-'
if do_put:
yield i_data.eq(data.pop(0))
yield i_put.eq(do_put)
yield i_shift.eq(shift[i] == '-')
yield
o_empty = yield dut.o_empty
o_data = yield dut.o_data
out = "%d%d" % (o_empty, o_data)
output += {
"00" : "0",
"01" : "1",
"10" : " ",
"11" : " ",
}[out]
return output
def stim(width, data, put, shift, output):
actual_output = yield from send(shift, put, data)
self.assertEqual(actual_output, output)
i = 0
for vector in test_vectors:
with self.subTest(i=i, vector=vector):
i_put = Signal()
i_shift = Signal()
i_data = Signal(vector["width"])
dut = TxShifter(vector["width"], i_put, i_shift, i_data)
run_simulation(dut, stim(**vector), vcd_name="vcd/test_tx_shifter_%d.vcd" % i)
i += 1
def create_tester(dut_type, **def_args):
def run(self, **test_args):
name = self.id()
self.inputs = dict()
self.outputs = dict()
self.params = set()
self.dut_args = dict()
# parse tester definition
for key in def_args:
if not key.startswith("i_") and not key.startswith("o_"):
self.params.add(key)
for key in def_args:
if key.startswith("i_"):
width = def_args[key][0]
if isinstance(width, str):
width = test_args[width]
self.inputs[key] = Signal(def_args[key][0])
if key.startswith("o_"):
self.outputs[key] = None
# create dut
for p in self.params:
self.dut_args[p] = test_args[p]
for i in self.inputs.keys():
self.dut_args[i] = self.inputs[i]
dut = dut_type(**self.dut_args)
# gather outputs
for o in self.outputs.keys():
self.outputs[o] = getattr(dut, o)
# calc num clocks
clocks = 0
for i in set(self.inputs.keys()) | set(self.outputs.keys()):
if isinstance(test_args[i], str):
clocks = max(clocks, len(test_args[i]))
# decode stimulus
def decode(c):
try:
return int(c, 16)
except:
pass
if c == "-":
return 1
return 0
# error message debug helper
def to_waveform(sigs):
output = ""
for name in sigs.keys():
output += "%20s: %s\n" % (name, sigs[name])
return output
actual_output = dict()
# setup stimulus
def stim():
for signal_name in self.outputs.keys():
actual_output[signal_name] = ""
for i in range(clocks):
for input_signal in self.inputs.keys():
yield self.inputs[input_signal].eq(decode(test_args[input_signal][i]))
yield
for output_signal in self.outputs.keys():
actual_value = yield self.outputs[output_signal]
actual_output[output_signal] += str(actual_value)
if isinstance(test_args[output_signal], tuple):
if test_args[output_signal][0][i] == '*':
expected_value = decode(test_args[output_signal][1].pop(0))
elif test_args[output_signal] is not None:
if test_args[output_signal][i] != ' ':
expected_value = decode(test_args[output_signal][i])
details = "\n"
if actual_value != expected_value:
details += " Expected: %s\n" % (test_args[output_signal])
details += " " + (" " * i) + "^\n"
details += to_waveform(actual_output)
self.assertEqual(actual_value, expected_value, msg = ("%s:%s:%d" % (name, output_signal, i)) + details)
# run simulation
run_simulation(dut, stim(), vcd_name="vcd/%s.vcd" % name)
return actual_output
return run
def module_tester(dut_type, **def_args):
def wrapper(class_type):
class_type.do = create_tester(dut_type, **def_args)
return class_type
return wrapper
@module_tester(
TxCrcGenerator,
width = None,
polynomial = None,
initial = None,
i_reset = (1,),
i_data = (1,),
i_shift = (1,),
o_crc = ("width",)
)
class TestTxCrcGenerator(TestCase):
def test_token_crc5_zeroes(self):
self.do(
width = 5,
polynomial = 0b00101,
initial = 0b11111,
i_reset = "-_______________",
i_data = " 00000000000 ",
i_shift = "__-----------___",
o_crc = " 222"
)
def test_token_crc5_zeroes_alt(self):
self.do(
width = 5,
polynomial = 0b00101,
initial = 0b11111,
i_reset = "-______________",
i_data = " 00000000000 ",
i_shift = "_-----------___",
o_crc = " 222"
)
def test_token_crc5_nonzero(self):
self.do(
width = 5,
polynomial = 0b00101,
initial = 0b11111,
i_reset = "-______________",
i_data = " 01100000011 ",
i_shift = "_-----------___",
o_crc = " ccc"
)
def test_token_crc5_nonzero_stall(self):
self.do(
width = 5,
polynomial = 0b00101,
initial = 0b11111,
i_reset = "-_____________________________",
i_data = " 0 1 111101110111000011 ",
i_shift = "_-___-___-___-___-___------___",
o_crc = " ccc"
)
def test_data_crc16_nonzero(self):
self.do(
width = 16,
polynomial = 0b1000000000000101,
initial = 0b1111111111111111,
i_reset = "-________________________________________________________________________",
i_data = " 00000001 01100000 00000000 10000000 00000000 00000000 00000010 00000000 ",
i_shift = "_--------_--------_--------_--------_--------_--------_--------_--------_",
o_crc =(" *", [0x94dd])
)
@module_tester(
TxBitstuffer,
i_valid = (1,),
i_oe = (1,),
i_data = (1,),
i_se0 = (1,),
o_stall = (1,),
o_data = (1,),
o_se0 = (1,),
o_oe = (1,)
)
class TestTxBitstuffer(TestCase):
def test_passthrough(self):
self.do(
i_valid = "_----------",
i_oe = "_--------__",
i_data = "_--___---__",
i_se0 = "___________",
o_stall = "___________",
o_data = "__--___---_",
o_se0 = "___________",
o_oe = "__--------_",
)
def test_passthrough_se0(self):
self.do(
i_valid = "_----------",
i_oe = "_--------__",
i_data = "_--___---__",
i_se0 = "____--_____",
o_stall = "___________",
o_data = "__--___---_",
o_se0 = "_____--____",
o_oe = "__--------_",
)
def test_bitstuff(self):
self.do(
i_valid = "_-----------",
i_oe = "_---------__",
i_data = "_---------__",
i_se0 = "____________",
o_stall = "_______-____",
o_data = "__------_--_",
o_se0 = "____________",
o_oe = "__---------_",
)
def test_bitstuff_input_stall(self):
self.do(
i_valid = "_-___-___-___-___-___-___-___-___-__",
i_oe = "_-----------------------------------",
i_data = "_-----------------------------------",
i_se0 = "____________________________________",
o_stall = "______________________----__________",
o_data = "__------------------------____------",
o_se0 = "____________________________________",
o_oe = "__----------------------------------",
)
def test_bitstuff_se0(self):
self.do(
i_valid = "_-----------__",
i_oe = "_-----------__",
i_data = "_---------____",
i_se0 = "__________--__",
o_stall = "_______-______",
o_data = "__------_--___",
o_se0 = "___________---",
o_oe = "__------------",
)
def test_bitstuff_at_eop(self):
self.do(
i_valid = "_---------____",
i_oe = "_---------____",
i_data = "_-------______",
i_se0 = "________--____",
o_stall = "_______-______",
o_data = "__------______",
o_se0 = "_________-----",
o_oe = "__------------",
)
def test_multi_bitstuff(self):
self.do(
i_valid = "_----------------",
i_oe = "_----------------",
i_data = "_----------------",
i_se0 = "_________________",
o_stall = "_______-______-__",
o_data = "__------_------_-",
o_se0 = "_________________",
o_oe = "__---------------",
)
@module_tester(
TxNrziEncoder,
i_valid = (1,),
i_oe = (1,),
i_data = (1,),
i_se0 = (1,),
o_usbp = (1,),
o_usbn = (1,),
o_oe = (1,)
)
class TestTxNrziEncoder(TestCase):
def test_setup_token(self):
self.do(
i_valid = "_--------------------------------------",
i_oe = "_----------------------------------____",
i_data = "_0000000110110100000000000000100000____",
i_se0 = "_________________________________--____",
o_oe = "___-----------------------------------_",
o_usbp = " _-_-_-___---__-_-_-_-_-_-_-__-_-__- ",
o_usbn = " -_-_-_---___--_-_-_-_-_-_-_--_-____ ",
)
def data(data):
"""
Converts array of 8-bit ints into string of 0s and 1s.
"""
output = ""
for b in data:
output += ("{0:08b}".format(b))[::-1]
return output
def crc16(input_data):
import crcmod
crc16_func = crcmod.mkCrcFun(0x18005, initCrc=0x0000, xorOut=0b1111111111111111, rev=True)
crc16 = crc16_func(bytearray(input_data))
hex(crc16)
return data([crc16 & 0xff, (crc16 >> 8) & 0xff])
def nrzi(data, clock_width=4):
"""
Converts string of 0s and 1s into NRZI encoded string.
"""
def toggle_state(state):
if state == 'j':
return 'k'
if state == 'k':
return 'j'
return state
prev_state = "k"
output = ""
for bit in data:
# only toggle the state on '0'
if bit == '0':
state = toggle_state(state)
if bit in "jk_":
state = bit
output += (state * clock_width)
return output
def line(data):
oe = ""
usbp = ""
usbn = ""
for bit in data:
oe += "-"
if bit == "j":
usbp += "-"
usbn += "_"
if bit == "k":
usbp += "_"
usbn += "-"
if bit == "_":
usbp += "_"
usbn += "_"
return (oe, usbp, usbn)
def sync():
return "kjkjkjkk"
def pid(value):
return data([value | ((0b1111 ^ value) << 4)])
def eop():
return "__j"
def idle():
return r"\s+"
class TestUsbFsTx_longer(TestCase):
def do(self, clocks, pid, token_payload, data, expected_output):
self.output = ""
name = self.id()
# create dut
i_bit_strobe = Signal(1)
i_pkt_start = Signal(1)
i_pid = Signal(4)
i_token_payload = Signal(11)
i_data_valid = Signal(1)
i_data_payload = Signal(8)
dut = UsbFsTx(i_bit_strobe, i_pkt_start, i_pid, i_token_payload, i_data_valid, i_data_payload)
def clock():
yield i_data_valid.eq(len(data) > 0)
if len(data) > 0:
yield i_data_payload.eq(data[0])
else:
yield i_data_payload.eq(0)
yield
o_data_get = yield dut.o_data_get
if o_data_get:
data.pop(0)
oe = yield dut.o_oe
usbp = yield dut.o_usbp
usbn = yield dut.o_usbn
if oe == 0:
self.output += " "
else:
if usbp == 0 and usbn == 0:
self.output += "_"
elif usbp == 1 and usbn == 0:
self.output += "j"
elif usbp == 0 and usbn == 1:
self.output += "k"
else:
self.output += "!"
# setup stimulus
def stim():
# initiate packet transmission
yield i_pid.eq(pid)
yield i_token_payload.eq(token_payload)
yield i_pkt_start.eq(1)
yield from clock()
yield i_pid.eq(0)
yield i_token_payload.eq(0)
yield i_pkt_start.eq(0)
# pump the clock and collect output
for i in range(clocks):
yield i_bit_strobe.eq(1)
yield from clock()
yield i_bit_strobe.eq(0)
yield from clock()
yield from clock()
yield from clock()
import re
m = re.fullmatch(idle() + expected_output + idle(), self.output)
if m:
pass
else:
raise AssertionError("Packet not found:\n %s\n %s" % (expected_output, self.output))
# run simulation
run_simulation(dut, stim(), vcd_name="vcd/%s.vcd" % name)
def test_ack_handshake(self):
self.do(
clocks = 100,
pid = 2,
token_payload = 0,
data = [],
expected_output = nrzi(sync() + pid(2) + eop())
)
def test_empty_data(self):
self.do(
clocks = 100,
pid = 3,
token_payload = 0,
data = [],
expected_output = nrzi(sync() + pid(3) + data([0x00, 0x00]) + eop())
)
def test_setup_data(self):
payload = [0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x00]
self.do(
clocks = 200,
pid = 3,
token_payload = 0,
data = payload,
expected_output = nrzi(sync() + pid(3) + data(payload) + crc16(payload) + eop())
)
def test_setup_data_bitstuff(self):
payload = [0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40, 0x3F]
self.do(
clocks = 200,
pid = 3,
token_payload = 0,
data = payload,
expected_output = nrzi(sync() + pid(3) + data([0x80, 0x06, 0x00, 0x01, 0x00, 0x00, 0x40]) + "111111000" +crc16(payload) + eop())
)
| [
"crcmod.mkCrcFun"
] | [((51952, 52009), 'crcmod.mkCrcFun', 'crcmod.mkCrcFun', (['(98309)'], {'initCrc': '(0)', 'xorOut': '(65535)', 'rev': '(True)'}), '(98309, initCrc=0, xorOut=65535, rev=True)\n', (51967, 52009), False, 'import crcmod\n')] |
import logging
import os
from argparse import ArgumentParser
import importlib_metadata as metadata
from dynaconf import Dynaconf, loaders
log = logging.getLogger()
parser = ArgumentParser()
parser.add_argument("-c", "--config", help="Specify config YAML file location.")
parser.add_argument(
"-p",
"--peerfile",
help="Specify file location with peerfetch values such as current peerid.",
)
args, unknown = parser.parse_known_args()
# default path locations
DEFAULT_WORK_DIR: str = "/workspace"
DEFAULT_DATA_DIR: str = "./data"
CONFIG_FILE_PATH: str = args.config or "config.yaml"
PEER_FILE_PATH: str = args.peerfile or ".peerjsrc"
SECRETS_FILE_PATH: str = "secrets.yaml"
CONFIG_DEFAULTS_FILE_PATH: str = "config.defaults.yaml"
# actual local file locations
__config_file: str = None
__secrets_file: str = None
# package version
__version__: str = metadata.version("ambianic-edge")
# refernce to system global config instance
__config: Dynaconf = "Not Initialized Yet!"
def get_root_config() -> Dynaconf:
return __config
def get_config_defaults_file() -> str:
return CONFIG_DEFAULTS_FILE_PATH
def get_config_file() -> str:
"""Return path to main config file for this instance.
This is typically the file with baseline configuration settings
across a fleet of similar devices.
"""
return __config_file
def get_local_config_file() -> str:
"""Return path to local custom config file. E.g. config.local.yaml.
This is the file where config changes via API calls are saved.
This is also the file where manual local changes can be applied and saved to override values from other config files."""
(cfg_file_head, cfg_file_ext) = os.path.splitext(get_config_file())
local_file_path = cfg_file_head + ".local" + cfg_file_ext
return local_file_path
def get_peerid_file():
"""Return path to the file where peerfetch Peer ID for this device is stored."""
return __peer_file
def get_secrets_file() -> str:
if __secrets_file:
return __secrets_file
return os.path.join(get_work_dir(), SECRETS_FILE_PATH)
def get_all_config_files():
conf_files = os.environ.get("AMBIANIC_CONFIG_FILES")
if conf_files:
file_list = conf_files.split(",")
file_list = list(map(lambda s: s.strip(), file_list))
return file_list
def init_config() -> Dynaconf:
log.debug("Configuration: begin init_config()")
global __config
conf_files = os.environ.get("AMBIANIC_CONFIG_FILES", None)
if conf_files is None:
conf_files = ",".join(
[
get_config_defaults_file(),
get_secrets_file(),
get_peerid_file(),
get_config_file(),
]
)
os.environ["AMBIANIC_CONFIG_FILES"] = conf_files
os.environ["SETTINGS_FILE_FOR_DYNACONF"] = conf_files
log.info(f"Loading config settings from: {conf_files}")
__config = Dynaconf(
# settings_files read from SETTINGS_FILE_FOR_DYNACONF
environments=False,
)
log.debug(f"Config settings: {get_root_config().as_dict()}")
return __config
def reload_config() -> Dynaconf:
"""Reloads settings with latest config file updates."""
__config.reload()
log.info("Configuration: reloaded.")
log.debug(f"Config settings: {get_root_config().as_dict()}")
def save_config():
"""Persist configuration settings to local config file."""
# ref: https://dynaconf.readthedocs.io/en/docs_223/guides/advanced_usage.html#exporting
# ref: https://dynaconf.readthedocs.io/en/docs_223/reference/dynaconf.loaders.html#module-dynaconf.loaders.yaml_loader
file_to_save = os.environ.get("AMBIANIC_SAVE_CONFIG_TO", None)
if not file_to_save:
file_to_save = get_local_config_file()
root_config = get_root_config()
data = root_config.as_dict()
log.info(f"Saving config settings to: {file_to_save}")
loaders.write(file_to_save, data)
def get_work_dir() -> str:
"""Retrieve the ambianic working directory"""
env_work_dir = os.environ.get("AMBIANIC_DIR", os.getcwd())
if not env_work_dir:
env_work_dir = DEFAULT_WORK_DIR
return env_work_dir
__config_file = os.path.join(get_work_dir(), CONFIG_FILE_PATH)
__peer_file = os.path.join(get_work_dir(), PEER_FILE_PATH)
# initial config init
init_config()
| [
"logging.getLogger",
"argparse.ArgumentParser",
"os.environ.get",
"os.getcwd",
"importlib_metadata.version",
"dynaconf.loaders.write",
"dynaconf.Dynaconf"
] | [((146, 165), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (163, 165), False, 'import logging\n'), ((176, 192), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (190, 192), False, 'from argparse import ArgumentParser\n'), ((868, 901), 'importlib_metadata.version', 'metadata.version', (['"""ambianic-edge"""'], {}), "('ambianic-edge')\n", (884, 901), True, 'import importlib_metadata as metadata\n'), ((2144, 2183), 'os.environ.get', 'os.environ.get', (['"""AMBIANIC_CONFIG_FILES"""'], {}), "('AMBIANIC_CONFIG_FILES')\n", (2158, 2183), False, 'import os\n'), ((2454, 2499), 'os.environ.get', 'os.environ.get', (['"""AMBIANIC_CONFIG_FILES"""', 'None'], {}), "('AMBIANIC_CONFIG_FILES', None)\n", (2468, 2499), False, 'import os\n'), ((2932, 2960), 'dynaconf.Dynaconf', 'Dynaconf', ([], {'environments': '(False)'}), '(environments=False)\n', (2940, 2960), False, 'from dynaconf import Dynaconf, loaders\n'), ((3664, 3711), 'os.environ.get', 'os.environ.get', (['"""AMBIANIC_SAVE_CONFIG_TO"""', 'None'], {}), "('AMBIANIC_SAVE_CONFIG_TO', None)\n", (3678, 3711), False, 'import os\n'), ((3916, 3949), 'dynaconf.loaders.write', 'loaders.write', (['file_to_save', 'data'], {}), '(file_to_save, data)\n', (3929, 3949), False, 'from dynaconf import Dynaconf, loaders\n'), ((4079, 4090), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4088, 4090), False, 'import os\n')] |
import os
import sys
def parse_file(path):
fd = open(path)
i = 0
spaces = 0
tabs = 0
for i,line in enumerate(fd):
spaces += line.count(' ')
tabs += line.count('\t')
fd.close()
return (spaces, tabs, i + 1)
path = 'test.txt'
print(path)
if os.path.exists(path):
print("a")
print(parse_file(path))
| [
"os.path.exists"
] | [((292, 312), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (306, 312), False, 'import os\n')] |
# -*- coding: UTF-8 -*-
from __future__ import print_function
import argparse
import json
import logging
import os
import pickle
import sys
import numpy as np
import torch
from tensorboardX import SummaryWriter
import evaluation_vatex
from util.msrvtt_dataloader import Dataset2BertRes, collate_data
import util.data_provider as data
from basic.bigfile import BigFile
from basic.constant import ROOT_PATH
from basic.util import AverageMeter, LogCollector, read_dict
from model_part.model_attention import get_model
from util.text2vec import get_text_encoder
from util.vatex_dataloader import Dataset2BertI3d
from util.vocab import Vocabulary
def parse_args():
# Hyper Parameters
parser = argparse.ArgumentParser()
parser.add_argument('--runpath', type=str, default='/home/fengkai/dataset/')
parser.add_argument('--overwrite', type=int, default=0, choices=[0,1], help='overwrite existed file. (default: 0)')
parser.add_argument('--log_step', default=10, type=int, help='Number of steps to print and record the log.')
parser.add_argument('--batch_size', default=128, type=int, help='Size of a training mini-batch.')
parser.add_argument('--workers', default=5, type=int, help='Number of data loader workers.')
parser.add_argument('--logger_name', default='/home/fengkai/PycharmProjects/dual_encoding/result/fengkai_msrvtt/dual_encoding_concate_full_dp_0.2_measure_cosine/vocab_word_vocab_5_word_dim_768_text_rnn_size_1024_text_norm_True_kernel_sizes_2-3-4_num_512/visual_feat_dim_1024_visual_rnn_size_1024_visual_norm_True_kernel_sizes_2-3-4-5_num_512/mapping_text_0-2048_img_0-2048/loss_func_mrl_margin_0.2_direction_all_max_violation_False_cost_style_sum/optimizer_adam_lr_0.0001_decay_0.99_grad_clip_2.0_val_metric_recall/msrvtt_attention_1', help='Path to save the model and Tensorboard log.')
parser.add_argument('--checkpoint_name', default='model_best.pth.tar', type=str, help='name of checkpoint (default: model_best.pth.tar)')
parser.add_argument('--n_caption', type=int, default=20, help='number of captions of each image/video (default: 1)')
args = parser.parse_args()
return args
def load_config(config_path):
variables = {}
exec(compile(open(config_path, "rb").read(), config_path, 'exec'), variables)
return variables['config']
def main():
opt = parse_args()
print(json.dumps(vars(opt), indent=2))
rootpath = opt.runpath
n_caption = opt.n_caption
resume = os.path.join(opt.logger_name, opt.checkpoint_name)
if not os.path.exists(resume):
logging.info(resume + ' not exists.')
sys.exit(0)
# 模型加载
checkpoint = torch.load(resume)
start_epoch = checkpoint['epoch']
best_rsum = checkpoint['best_rsum']
print("=> loaded checkpoint '{}' (epoch {}, best_rsum {})"
.format(resume, start_epoch, best_rsum))
options = checkpoint['opt']
if not hasattr(options, 'concate'):
setattr(options, "concate", "full")
# 文件名称
visual_feat_path = os.path.join(rootpath, 'msrvtt/msrvtt10ktest/FeatureData')
caption_files = os.path.join(rootpath, 'msrvtt/msrvtt10ktest/TextData/bert_text')
visual_feat_test = BigFile(visual_feat_path)
video_frames_test = read_dict(os.path.join(visual_feat_path, 'video2frames.txt'))
# Construct the model
model = get_model(options.model)(options)
model.load_state_dict(checkpoint['model'])
model.Eiters = checkpoint['Eiters']
model.val_start()
# set data loader
dset = Dataset2BertRes(caption_files, visual_feat_test, video_frames_test, videoEmbed_num = 32)
data_loaders_val = torch.utils.data.DataLoader(dataset=dset,
batch_size=opt.batch_size,
shuffle=False,
pin_memory=True,
num_workers=opt.workers,
collate_fn = collate_data)
video_embs, cap_embs, video_ids = evaluation_vatex.encode_data(model, data_loaders_val, opt.log_step, logging.info)
#embedding 的可视化分析
# tensor_show = torch.cat((video_embs.data, torch.ones(len(video_embs), 1)), 1)
# with SummaryWriter(log_dir='./results', comment='embedding——show') as writer:
# writer.add_embedding(
# video_embs.data,
# label_img=cap_embs.data,
# global_step=1)
c2i_all_errors = evaluation_vatex.cal_error(video_embs, cap_embs, options.measure)
# caption retrieval
(r1i, r5i, r10i, medri, meanri) = evaluation_vatex.t2i(c2i_all_errors, n_caption=n_caption)
t2i_map_score = evaluation_vatex.t2i_map(c2i_all_errors, n_caption=n_caption)
# video retrieval
(r1, r5, r10, medr, meanr) = evaluation_vatex.i2t(c2i_all_errors, n_caption=n_caption)
i2t_map_score = evaluation_vatex.i2t_map(c2i_all_errors, n_caption=n_caption)
print(" * Text to Video:")
print(" * r_1_5_10, medr, meanr: {}".format([round(r1i, 1), round(r5i, 1), round(r10i, 1), round(medri, 1), round(meanri, 1)]))
print(" * recall sum: {}".format(round(r1i+r5i+r10i, 1)))
print(" * mAP: {}".format(round(t2i_map_score, 3)))
print(" * "+'-'*10)
# caption retrieval
print(" * Video to text:")
print(" * r_1_5_10, medr, meanr: {}".format([round(r1, 1), round(r5, 1), round(r10, 1), round(medr, 1), round(meanr, 1)]))
print(" * recall sum: {}".format(round(r1+r5+r10, 1)))
print(" * mAP: {}".format(round(i2t_map_score, 3)))
print(" * "+'-'*10)
if __name__ == '__main__':
main()
| [
"os.path.exists",
"evaluation_vatex.t2i",
"sys.exit",
"argparse.ArgumentParser",
"util.msrvtt_dataloader.Dataset2BertRes",
"torch.load",
"evaluation_vatex.i2t_map",
"os.path.join",
"basic.bigfile.BigFile",
"evaluation_vatex.t2i_map",
"evaluation_vatex.encode_data",
"evaluation_vatex.cal_error",
"model_part.model_attention.get_model",
"torch.utils.data.DataLoader",
"logging.info",
"evaluation_vatex.i2t"
] | [((701, 726), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (724, 726), False, 'import argparse\n'), ((2457, 2507), 'os.path.join', 'os.path.join', (['opt.logger_name', 'opt.checkpoint_name'], {}), '(opt.logger_name, opt.checkpoint_name)\n', (2469, 2507), False, 'import os\n'), ((2639, 2657), 'torch.load', 'torch.load', (['resume'], {}), '(resume)\n', (2649, 2657), False, 'import torch\n'), ((3001, 3059), 'os.path.join', 'os.path.join', (['rootpath', '"""msrvtt/msrvtt10ktest/FeatureData"""'], {}), "(rootpath, 'msrvtt/msrvtt10ktest/FeatureData')\n", (3013, 3059), False, 'import os\n'), ((3081, 3146), 'os.path.join', 'os.path.join', (['rootpath', '"""msrvtt/msrvtt10ktest/TextData/bert_text"""'], {}), "(rootpath, 'msrvtt/msrvtt10ktest/TextData/bert_text')\n", (3093, 3146), False, 'import os\n'), ((3170, 3195), 'basic.bigfile.BigFile', 'BigFile', (['visual_feat_path'], {}), '(visual_feat_path)\n', (3177, 3195), False, 'from basic.bigfile import BigFile\n'), ((3499, 3589), 'util.msrvtt_dataloader.Dataset2BertRes', 'Dataset2BertRes', (['caption_files', 'visual_feat_test', 'video_frames_test'], {'videoEmbed_num': '(32)'}), '(caption_files, visual_feat_test, video_frames_test,\n videoEmbed_num=32)\n', (3514, 3589), False, 'from util.msrvtt_dataloader import Dataset2BertRes, collate_data\n'), ((3611, 3770), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', ([], {'dataset': 'dset', 'batch_size': 'opt.batch_size', 'shuffle': '(False)', 'pin_memory': '(True)', 'num_workers': 'opt.workers', 'collate_fn': 'collate_data'}), '(dataset=dset, batch_size=opt.batch_size,\n shuffle=False, pin_memory=True, num_workers=opt.workers, collate_fn=\n collate_data)\n', (3638, 3770), False, 'import torch\n'), ((3982, 4068), 'evaluation_vatex.encode_data', 'evaluation_vatex.encode_data', (['model', 'data_loaders_val', 'opt.log_step', 'logging.info'], {}), '(model, data_loaders_val, opt.log_step, logging\n .info)\n', (4010, 4068), False, 'import evaluation_vatex\n'), ((4424, 4489), 'evaluation_vatex.cal_error', 'evaluation_vatex.cal_error', (['video_embs', 'cap_embs', 'options.measure'], {}), '(video_embs, cap_embs, options.measure)\n', (4450, 4489), False, 'import evaluation_vatex\n'), ((4554, 4611), 'evaluation_vatex.t2i', 'evaluation_vatex.t2i', (['c2i_all_errors'], {'n_caption': 'n_caption'}), '(c2i_all_errors, n_caption=n_caption)\n', (4574, 4611), False, 'import evaluation_vatex\n'), ((4632, 4693), 'evaluation_vatex.t2i_map', 'evaluation_vatex.t2i_map', (['c2i_all_errors'], {'n_caption': 'n_caption'}), '(c2i_all_errors, n_caption=n_caption)\n', (4656, 4693), False, 'import evaluation_vatex\n'), ((4750, 4807), 'evaluation_vatex.i2t', 'evaluation_vatex.i2t', (['c2i_all_errors'], {'n_caption': 'n_caption'}), '(c2i_all_errors, n_caption=n_caption)\n', (4770, 4807), False, 'import evaluation_vatex\n'), ((4828, 4889), 'evaluation_vatex.i2t_map', 'evaluation_vatex.i2t_map', (['c2i_all_errors'], {'n_caption': 'n_caption'}), '(c2i_all_errors, n_caption=n_caption)\n', (4852, 4889), False, 'import evaluation_vatex\n'), ((2520, 2542), 'os.path.exists', 'os.path.exists', (['resume'], {}), '(resume)\n', (2534, 2542), False, 'import os\n'), ((2552, 2589), 'logging.info', 'logging.info', (["(resume + ' not exists.')"], {}), "(resume + ' not exists.')\n", (2564, 2589), False, 'import logging\n'), ((2598, 2609), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2606, 2609), False, 'import sys\n'), ((3231, 3281), 'os.path.join', 'os.path.join', (['visual_feat_path', '"""video2frames.txt"""'], {}), "(visual_feat_path, 'video2frames.txt')\n", (3243, 3281), False, 'import os\n'), ((3322, 3346), 'model_part.model_attention.get_model', 'get_model', (['options.model'], {}), '(options.model)\n', (3331, 3346), False, 'from model_part.model_attention import get_model\n')] |
from typing import Optional, Tuple
from alembic import command
from alembic.config import Config
from alembic.environment import EnvironmentContext
from alembic.script import ScriptDirectory
from sqlalchemy import create_engine, exc, inspect
from sqlalchemy.engine import URL, Engine
from sqlalchemy.schema import CreateSchema
from fief.paths import ALEMBIC_CONFIG_FILE
class WorkspaceDatabaseError(Exception):
pass
class WorkspaceDatabaseConnectionError(WorkspaceDatabaseError):
def __init__(self, message: str) -> None:
self.message = message
class WorkspaceDatabase:
def migrate(self, database_url: URL, schema_name: str) -> str:
try:
engine = self.get_engine(database_url, schema_name)
with engine.begin() as connection:
config = self._get_alembic_base_config()
config.attributes["configure_logger"] = False
config.attributes["connection"] = connection
command.upgrade(config, "head")
except exc.OperationalError as e:
raise WorkspaceDatabaseConnectionError(str(e)) from e
return self.get_latest_revision()
def get_engine(self, database_url: URL, schema_name: str) -> Engine:
self._ensure_schema(database_url, schema_name)
connect_args = {}
dialect_name = database_url.get_dialect().name
if dialect_name == "postgresql":
connect_args["options"] = f"-csearch_path={schema_name}"
elif dialect_name == "mysql":
database_url = database_url.set(database=schema_name)
return create_engine(database_url, connect_args=connect_args)
def check_connection(self, database_url: URL) -> Tuple[bool, Optional[str]]:
try:
engine = create_engine(database_url)
with engine.begin():
return True, None
except exc.OperationalError as e:
return False, str(e)
def get_latest_revision(self) -> str:
config = self._get_alembic_base_config()
script = ScriptDirectory.from_config(config)
with EnvironmentContext(config, script) as environment:
return str(environment.get_head_revision())
def _get_alembic_base_config(self) -> Config:
return Config(ALEMBIC_CONFIG_FILE, ini_section="workspace")
def _ensure_schema(self, database_url: URL, schema_name: str):
engine = create_engine(database_url, connect_args={"connect_timeout": 5})
dialect_name = database_url.get_dialect().name
if dialect_name == "sqlite":
return
inspector = inspect(engine)
schemas = inspector.get_schema_names()
if schema_name not in schemas:
with engine.begin() as connection:
connection.execute(CreateSchema(schema_name))
| [
"alembic.script.ScriptDirectory.from_config",
"alembic.config.Config",
"sqlalchemy.create_engine",
"alembic.environment.EnvironmentContext",
"alembic.command.upgrade",
"sqlalchemy.inspect",
"sqlalchemy.schema.CreateSchema"
] | [((1606, 1660), 'sqlalchemy.create_engine', 'create_engine', (['database_url'], {'connect_args': 'connect_args'}), '(database_url, connect_args=connect_args)\n', (1619, 1660), False, 'from sqlalchemy import create_engine, exc, inspect\n'), ((2056, 2091), 'alembic.script.ScriptDirectory.from_config', 'ScriptDirectory.from_config', (['config'], {}), '(config)\n', (2083, 2091), False, 'from alembic.script import ScriptDirectory\n'), ((2278, 2330), 'alembic.config.Config', 'Config', (['ALEMBIC_CONFIG_FILE'], {'ini_section': '"""workspace"""'}), "(ALEMBIC_CONFIG_FILE, ini_section='workspace')\n", (2284, 2330), False, 'from alembic.config import Config\n'), ((2416, 2480), 'sqlalchemy.create_engine', 'create_engine', (['database_url'], {'connect_args': "{'connect_timeout': 5}"}), "(database_url, connect_args={'connect_timeout': 5})\n", (2429, 2480), False, 'from sqlalchemy import create_engine, exc, inspect\n'), ((2614, 2629), 'sqlalchemy.inspect', 'inspect', (['engine'], {}), '(engine)\n', (2621, 2629), False, 'from sqlalchemy import create_engine, exc, inspect\n'), ((1777, 1804), 'sqlalchemy.create_engine', 'create_engine', (['database_url'], {}), '(database_url)\n', (1790, 1804), False, 'from sqlalchemy import create_engine, exc, inspect\n'), ((2105, 2139), 'alembic.environment.EnvironmentContext', 'EnvironmentContext', (['config', 'script'], {}), '(config, script)\n', (2123, 2139), False, 'from alembic.environment import EnvironmentContext\n'), ((981, 1012), 'alembic.command.upgrade', 'command.upgrade', (['config', '"""head"""'], {}), "(config, 'head')\n", (996, 1012), False, 'from alembic import command\n'), ((2798, 2823), 'sqlalchemy.schema.CreateSchema', 'CreateSchema', (['schema_name'], {}), '(schema_name)\n', (2810, 2823), False, 'from sqlalchemy.schema import CreateSchema\n')] |
import inspect
import json
import pathlib
from fastapi import FastAPI, Request
from fastapi.responses import StreamingResponse
from .trigger import trigger_run
filename = inspect.getframeinfo(inspect.currentframe()).filename
BASE_DIR = pathlib.Path(filename).resolve().parent
app = FastAPI()
@app.get("/") # HTTP GET
def read_root():
return {"hello": "world", "parent": str(BASE_DIR), "container": "hello"}
# @app.post('/') # HTTP POST
# def write_root():
# return {"hello": "world", "parent": str(BASE_DIR)}
# /trigger/nbs/test.ipynb
# /trigger/nbs/scrape.ipynb
@app.get("/trigger/{filepath:path}")
def trigger_notebook(filepath, request:Request):
# /Users/cfe/Dev/jupyter-api/src / nbs/scrape.ipynb
params = dict(request.query_params)
params["BASE_DIR"] = str(BASE_DIR)
params["DATA_DIR"] = str(BASE_DIR / "data")
input_path = BASE_DIR / pathlib.Path(filepath) #
rs, output_path = None, None
try:
rs, output_path = trigger_run(input_path, params=params)
except:
pass
return {"output_path": output_path}
@app.get("/output/{filepath:path}")
def output_notebook(filepath, request:Request):
input_path = BASE_DIR / pathlib.Path(filepath) #
fname = input_path.stem
suffix = input_path.suffix
output_dir = input_path.parent / "outputs"
output_dir.mkdir(parents=True, exist_ok=True)
output_path = output_dir / f"{fname}-output{suffix}"
if not output_path.exists():
return {}
data = json.loads(output_path.read_text())
return data
@app.get("/events/{filepath:path}")
def output_events(filepath, request:Request):
input_path = BASE_DIR / pathlib.Path(filepath) #
fname = input_path.stem
suffix = input_path.suffix
output_dir = input_path.parent / "outputs"
stdout_path = output_dir / f"{fname}-stdout"
if not stdout_path.exists():
return []
return StreamingResponse(open(stdout_path, 'rb'))
| [
"inspect.currentframe",
"fastapi.FastAPI",
"pathlib.Path"
] | [((285, 294), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (292, 294), False, 'from fastapi import FastAPI, Request\n'), ((193, 215), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (213, 215), False, 'import inspect\n'), ((875, 897), 'pathlib.Path', 'pathlib.Path', (['filepath'], {}), '(filepath)\n', (887, 897), False, 'import pathlib\n'), ((1186, 1208), 'pathlib.Path', 'pathlib.Path', (['filepath'], {}), '(filepath)\n', (1198, 1208), False, 'import pathlib\n'), ((1654, 1676), 'pathlib.Path', 'pathlib.Path', (['filepath'], {}), '(filepath)\n', (1666, 1676), False, 'import pathlib\n'), ((238, 260), 'pathlib.Path', 'pathlib.Path', (['filename'], {}), '(filename)\n', (250, 260), False, 'import pathlib\n')] |
from leapp import reporting
from leapp.libraries.common.config import get_product_type
from leapp.libraries.stdlib import api
from leapp.models import (
RepositoriesBlacklisted,
RepositoriesFacts,
RepositoriesMap,
)
def _is_optional_repo(repo):
sys_type = get_product_type('source')
suffix = 'optional-rpms'
if sys_type != 'ga':
suffix = 'optional-{}-rpms'.format(sys_type)
return repo.from_repoid.endswith(suffix)
def _get_list_of_optional_repos():
"""
Return a dict of optional repositories based on RepositoriesMap: { 'from_repoid' : 'to_repoid'}
It consumes RepositoriesMap messages and create map (dict) of optional repositories
on RHEL 7 system to CRB repositories on RHEL 8. See the RepositoriesMap model..
"""
opt_repo = {}
repo_map = next(api.consume(RepositoriesMap), None)
if repo_map:
for repo in repo_map.repositories:
if _is_optional_repo(repo):
opt_repo[repo.from_repoid] = repo.to_repoid
return opt_repo
def _get_disabled_optional_repo():
"""
Return a list of disabled optional repositories available on the system.
"""
opt_repos = _get_list_of_optional_repos()
repos_blacklist = []
repo_map = next(api.consume(RepositoriesFacts), None)
for repo_file in repo_map.repositories:
for repo in repo_file.data:
if repo.repoid in opt_repos and not repo.enabled:
repos_blacklist.append(opt_repos[repo.repoid])
return repos_blacklist
def process():
# blacklist CRB repo if optional repo is not enabled
reposid_blacklist = _get_disabled_optional_repo()
if reposid_blacklist:
api.current_logger().info("The optional repository is not enabled. Blacklisting the CRB repository.")
api.produce(RepositoriesBlacklisted(repoids=reposid_blacklist))
report = [
reporting.Title("Excluded RHEL 8 repositories"),
reporting.Summary(
"The following repositories are not supported by "
"Red Hat and are excluded from the list of repositories "
"used during the upgrade.\n- {}".format(
"\n- ".join(reposid_blacklist)
)
),
reporting.Severity(reporting.Severity.INFO),
reporting.Tags([reporting.Tags.REPOSITORY]),
reporting.Flags([reporting.Flags.FAILURE]),
reporting.ExternalLink(
url=(
"https://access.redhat.com/documentation/en-us/"
"red_hat_enterprise_linux/8/html/package_manifest/"
"codereadylinuxbuilder-repository."
),
title="CodeReady Linux Builder repository",
),
]
reporting.create_report(report)
| [
"leapp.reporting.create_report",
"leapp.reporting.Title",
"leapp.reporting.Flags",
"leapp.models.RepositoriesBlacklisted",
"leapp.libraries.stdlib.api.consume",
"leapp.reporting.Tags",
"leapp.libraries.stdlib.api.current_logger",
"leapp.libraries.common.config.get_product_type",
"leapp.reporting.Severity",
"leapp.reporting.ExternalLink"
] | [((274, 300), 'leapp.libraries.common.config.get_product_type', 'get_product_type', (['"""source"""'], {}), "('source')\n", (290, 300), False, 'from leapp.libraries.common.config import get_product_type\n'), ((817, 845), 'leapp.libraries.stdlib.api.consume', 'api.consume', (['RepositoriesMap'], {}), '(RepositoriesMap)\n', (828, 845), False, 'from leapp.libraries.stdlib import api\n'), ((1254, 1284), 'leapp.libraries.stdlib.api.consume', 'api.consume', (['RepositoriesFacts'], {}), '(RepositoriesFacts)\n', (1265, 1284), False, 'from leapp.libraries.stdlib import api\n'), ((2791, 2822), 'leapp.reporting.create_report', 'reporting.create_report', (['report'], {}), '(report)\n', (2814, 2822), False, 'from leapp import reporting\n'), ((1808, 1858), 'leapp.models.RepositoriesBlacklisted', 'RepositoriesBlacklisted', ([], {'repoids': 'reposid_blacklist'}), '(repoids=reposid_blacklist)\n', (1831, 1858), False, 'from leapp.models import RepositoriesBlacklisted, RepositoriesFacts, RepositoriesMap\n'), ((1892, 1939), 'leapp.reporting.Title', 'reporting.Title', (['"""Excluded RHEL 8 repositories"""'], {}), "('Excluded RHEL 8 repositories')\n", (1907, 1939), False, 'from leapp import reporting\n'), ((2266, 2309), 'leapp.reporting.Severity', 'reporting.Severity', (['reporting.Severity.INFO'], {}), '(reporting.Severity.INFO)\n', (2284, 2309), False, 'from leapp import reporting\n'), ((2323, 2366), 'leapp.reporting.Tags', 'reporting.Tags', (['[reporting.Tags.REPOSITORY]'], {}), '([reporting.Tags.REPOSITORY])\n', (2337, 2366), False, 'from leapp import reporting\n'), ((2380, 2422), 'leapp.reporting.Flags', 'reporting.Flags', (['[reporting.Flags.FAILURE]'], {}), '([reporting.Flags.FAILURE])\n', (2395, 2422), False, 'from leapp import reporting\n'), ((2436, 2648), 'leapp.reporting.ExternalLink', 'reporting.ExternalLink', ([], {'url': '"""https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/8/html/package_manifest/codereadylinuxbuilder-repository."""', 'title': '"""CodeReady Linux Builder repository"""'}), "(url=\n 'https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/8/html/package_manifest/codereadylinuxbuilder-repository.'\n , title='CodeReady Linux Builder repository')\n", (2458, 2648), False, 'from leapp import reporting\n'), ((1686, 1706), 'leapp.libraries.stdlib.api.current_logger', 'api.current_logger', ([], {}), '()\n', (1704, 1706), False, 'from leapp.libraries.stdlib import api\n')] |
from typing import Any, Dict, Type
import dash_bootstrap_components as dbc
import numpy as np
import pandas as pd
import plotly.graph_objs as go
from dash import dcc, html
from deepcave import app
from deepcave.evaluators.fanova import fANOVA as _fANOVA
from deepcave.plugins.dynamic_plugin import DynamicPlugin
from deepcave.plugins.static_plugin import StaticPlugin
from deepcave.runs.run import Status
from deepcave.utils.logs import get_logger
logger = get_logger(__name__)
"""
class Configspace(DynamicPlugin):
@staticmethod
def id():
return "configspace"
@staticmethod
def name():
return "Configspace"
@staticmethod
def position():
return 2
@staticmethod
def category():
return "Meta-Data Analysis"
@staticmethod
def description():
return "especially lists all found configurations and which fidelities were used."
@staticmethod
def debug():
return False
def get_input_layout(self):
return []
def get_filter_layout(self):
return []
@staticmethod
def process(run, inputs):
# Config id | Fidelity #1 | Fidelity #2 | ...
all_config_ids = []
config_ids = {}
for trial in run.history:
if trial.status != Status.SUCCESS:
continue
if trial.config_id not in all_config_ids:
all_config_ids.append(trial.config_id)
if trial.budget not in config_ids:
config_ids[trial.budget] = []
if trial.config_id not in config_ids[trial.budget]:
config_ids[trial.budget].append(trial.config_id)
results = {}
for config_id in all_config_ids:
results[config_id] = []
for fidelity in run.get_budgets():
if config_id in config_ids[fidelity]:
results[config_id].append("YES")
else:
results[config_id].append("")
return {
"fidelities": run.get_budgets(),
"data": results
}
def get_output_layout(self):
return [
html.Div(id=self.register_output("output", "children"))
]
def load_outputs(self, filters, raw_outputs):
table_header = [
html.Thead(html.Tr([
html.Th("Config ID"),
*[html.Th(fidelity) for fidelity in raw_outputs["fidelities"]]
]))
]
rows = []
for config_id, values in raw_outputs["data"].items():
fidelity_cols = []
for value in values:
fidelity_cols.append(html.Td(value))
rows.append(html.Tr([html.Td(config_id), *fidelity_cols]))
table_body = [html.Tbody(rows)]
table = dbc.Table(table_header + table_body, bordered=True)
return table
"""
| [
"deepcave.utils.logs.get_logger"
] | [((460, 480), 'deepcave.utils.logs.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (470, 480), False, 'from deepcave.utils.logs import get_logger\n')] |
"""FactorAnalysis class for Metran in Pastas."""
from logging import getLogger
import numpy as np
import scipy.optimize as scopt
from pastas.utils import initialize_logger
logger = getLogger(__name__)
initialize_logger(logger)
class FactorAnalysis:
"""Class to perform a factor analysis for the Pastas Metran model.
Parameters
----------
maxfactors : int, optional.
maximum number of factors to select. The default is None.
Examples
--------
A minimal working example of the FactorAnalysis class is shown below:
>>> fa = FactorAnalysis()
>>> factors = fa.solve(oseries)
"""
def __init__(self, maxfactors=None):
self.maxfactors = maxfactors
def get_eigval_weight(self):
"""Method to get the relative weight of each eigenvalue.
Returns
-------
numpy.ndarray
All eigenvalues as a fraction of the sum of eigenvalues.
"""
return self.eigval / np.sum(self.eigval)
def solve(self, oseries):
"""Method to perform factor analysis.
Factor analysis is based on the minres algorithm.
The number of eigenvalues is determined by MAP test.
If more than one eigenvalue is used,
the factors are rotated using orthogonal rotation.
Parameters
----------
oseries : pandas.DataFrame
Object containing the time series. The
series can be non-equidistant.
Raises
------
Exception
If no proper factors can be derived from the series.
Returns
-------
factors : numpy.ndarray
Factor loadings.
"""
correlation = self._get_correlations(oseries)
self.eigval, eigvec = self._get_eigval(correlation)
# Velicer's MAP test
try:
nfactors, _ = self._maptest(correlation,
eigvec, self.eigval)
msg = "Number of factors according to Velicer\'s MAP test: " \
+ f"{nfactors}"
logger.info(msg)
if nfactors == 0:
nfactors = sum(self.eigval > 1)
msg = "Number of factors according to Kaiser criterion: " \
+ f"{nfactors}"
logger.info(msg)
if self.maxfactors is not None:
nfactors = min(nfactors, self.maxfactors)
except Exception:
nfactors = 0
factors = self._minres(correlation, nfactors)
if ((nfactors > 0) and (factors is not None)
and (np.count_nonzero(factors) > 0)):
# factors is not None and does not contain nonzero elements
if nfactors > 1:
# perform varimax rotation
comm = np.zeros(factors.shape[0])
for i in range(factors.shape[0]):
for j in range(nfactors):
comm[i] = comm[i] + factors[i, j] ** 2
factors[i, :] = factors[i, :] / np.sqrt(comm[i])
factors = self._rotate(factors[:, :nfactors])
for i in range(factors.shape[0]):
factors[i, :] = factors[i, :] * np.sqrt(comm[i])
# swap sign if dominant sign is negative
for j in range(factors.shape[1]):
facsign = 0
for i in range(factors.shape[0]):
facsign = facsign + factors[i, j]
if facsign < 0:
for i in range(factors.shape[0]):
if np.sign(factors[i, j]) != 0:
factors[i, j] = -1. * factors[i, j]
self.factors = np.atleast_2d(factors[:, :nfactors])
self.fep = 100 * np.sum(self.get_eigval_weight()[:nfactors])
else:
msg = "No proper common factors could be derived from series."
logger.warning(msg)
self.factors = None
return self.factors
@staticmethod
def _rotate(phi, gamma=1, maxiter=20, tol=1e-6):
"""Internal method to rotate factor loadings.
Uses varimax, quartimax, equamax, or parsimax rotation.
Parameters
----------
phi : numpy.ndarray
Eigenvectors to be rotated
gamma : float, optional
Coefficient for rotation. The default is 1.
Varimax: gamma = 1.
Quartimax: gamma = 0.
Equamax: gamma = nfac/2.
Parsimax: gamma = nvar(nfac - 1)/(nvar + nfac - 2).
maxiter : integer, optional
Maximum number of iterations. The default is 20.
tol : float, optional
Stop criterion. The default is 1e-6.
Returns
-------
phi_rot : 2-dimensional array
rotated eigenvectors
References
----------
<NAME>. (1958): The varimax criterion for analytic rotation in
factor analysis. Psychometrika 23: 187–200.
"""
p, k = phi.shape
R = np.eye(k)
d = 0
for _ in range(maxiter):
d_old = d
Lambda = np.dot(phi, R)
u, s, vh = np.linalg.svd(
np.dot(phi.T, np.asarray(Lambda) ** 3 - (gamma / p)
* np.dot(Lambda, np.diag(
np.diag(np.dot(Lambda.T, Lambda))))))
R = np.dot(u, vh)
d = np.sum(s)
if (d_old != 0) and (d / d_old < 1 + tol):
break
phi_rot = np.dot(phi, R)
return phi_rot
def _minres(self, s, nf, covar=False):
"""Internal method for estimating factor loadings.
Uses the minimum residuals (minres) algorithm.
Parameters
----------
s : numpy.ndarray
Correlation matrix
nf : integer
Number of factors
covar : boolean
True if S is covar
Returns
-------
loadings : numpy.ndarray
Estimated factor loadings
"""
sorg = np.copy(s)
try:
ssmc = 1 - 1 / np.diag(np.linalg.inv(s))
if (not(covar) and np.sum(ssmc) == nf) and (nf > 1):
start = 0.5 * np.ones(nf, dtype=float)
else:
start = np.diag(s) - ssmc
except:
return
bounds = list()
for _ in range(len(start)):
bounds.append((0.005, 1))
res = scopt.minimize(self._minresfun, start, method='L-BFGS-B',
jac=self._minresgrad, bounds=bounds,
args=(s, nf))
loadings = self._get_loadings(res.x, sorg, nf)
return loadings
@staticmethod
def _maptest(cov, eigvec, eigval):
"""Internal method to run Velicer's MAP test.
Determines the number of factors to be used. This method includes
two variations of the MAP test: the orginal and the revised MAP test.
Parameters
----------
cov : numpy.ndarray
Covariance matrix.
eigvec : numpy.ndarray
Matrix with columns eigenvectors associated with eigenvalues.
eigval : numpy.ndarray
Vector with eigenvalues in descending order.
Returns
-------
nfacts : integer
Number factors according to MAP test.
nfacts4 : integer
Number factors according to revised MAP test.
References
----------
The original MAP test:
<NAME>. (1976). Determining the number of components
from the matrix of partial correlations. Psychometrika, 41, 321-327.
The revised (2000) MAP test i.e., with the partial correlations
raised to the 4rth power (rather than squared):
<NAME>., <NAME>., and <NAME>. (2000). Construct
explication through factor or component analysis: A review and
evaluation of alternative procedures for determining the number
of factors or components. Pp. 41-71 in <NAME> and
<NAME>, eds., Problems and solutions in human assessment.
Boston: Kluwer.
"""
nvars = len(eigval)
fm = np.array([np.arange(nvars, dtype=float),
np.arange(nvars, dtype=float)]).T
np.put(fm, [0, 1], ((np.sum(np.sum(np.square(cov))) - nvars)
/ (nvars * (nvars - 1))))
fm4 = np.copy(fm)
np.put(fm4, [0, 1],
((np.sum(np.sum(np.square(np.square(cov)))) - nvars)
/ (nvars * (nvars - 1))))
for m in range(nvars - 1):
biga = np.atleast_2d(eigvec[:, :m + 1])
partcov = cov - np.dot(biga, biga.T)
# exit function with nfacts=1 if diag partcov contains negatives
if np.amin(np.diag(partcov)) < 0:
return 1, 1
d = np.diag((1 / np.sqrt(np.diag(partcov))))
pr = np.dot(d, np.dot(partcov, d))
np.put(fm, [m + 1, 1], ((np.sum(np.sum(np.square(pr))) - nvars)
/ (nvars * (nvars - 1))))
np.put(fm4, [m + 1, 1], ((np.sum(np.sum(np.square(np.square(pr))))
- nvars) / (nvars * (nvars - 1))))
minfm = fm[0, 1]
nfacts = 0
minfm4 = fm4[0, 1]
nfacts4 = 0
for s in range(nvars):
fm[s, 0] = s
fm4[s, 0] = s
if fm[s, 1] < minfm:
minfm = fm[s, 1]
nfacts = s
if fm4[s, 1] < minfm4:
minfm4 = fm4[s, 1]
nfacts4 = s
return nfacts, nfacts4
@staticmethod
def _minresfun(psi, s, nf):
"""Function to be minimized in minimum residuals (minres) algorithm.
Parameters
----------
psi : array
Vector to be adjusted during optimization
s : array
Correlation matrix
nf : integer
Number of factors
Returns
-------
obj : array
objective function defined as sum of residuals
"""
s2 = np.copy(s)
np.fill_diagonal(s2, 1 - psi)
eigval, eigvec = np.linalg.eigh(s2)
eigval[eigval < np.MachAr().eps] = 100 * np.MachAr().eps
if nf > 1:
loadings = np.atleast_2d(np.dot(eigvec[:, :nf],
np.diag(np.sqrt(eigval[:nf]))))
else:
loadings = eigvec[:, 0] * np.sqrt(eigval[0])
model = np.dot(loadings, loadings.T)
residual = np.square(s2 - model)
np.fill_diagonal(residual, 0)
return np.sum(residual)
def _minresgrad(self, psi, s, nf):
"""Internal method to calculate jacobian of function.
Jacobian to be minimized in minimum residuals (minres) algorithm.
Parameters
----------
psi : array
Vector to be adjusted during optimization.
s : array
Correlation matrix.
nf : integer
Number of factors.
Returns
-------
jac : array
Jacobian of minresfun.
"""
load = self._get_loadings(psi, s, nf)
g = np.dot(load, load.T) + np.diag(psi) - s
jac = np.diag(g) / np.square(psi)
return jac
@staticmethod
def _get_loadings(psi, s, nf):
"""Internal method to estimate matrix of factor loadings.
Based on minimum residuals (minres) algorithm.
Parameters
----------
psi : numpy.ndarray
Communality estimate.
s : numpy.ndarray
Correlation matrix.
nf : integer
Number of factors.
Returns
-------
load : npumy.ndarray
Estimated factor loadings.
"""
sc = np.diag(1 / np.sqrt(psi))
sstar = np.dot(sc, np.dot(s, sc))
eigval, eigvec = np.linalg.eig(sstar)
L = eigvec[:, :nf]
load = np.dot(L, np.diag(np.sqrt(np.maximum(
np.subtract(eigval[:nf], 1), 0))))
load = np.dot(np.diag(np.sqrt(psi)), load)
return load
@staticmethod
def _get_correlations(oseries):
"""Internal method to calculate correlations for multivariate series.
Parameters
----------
oseries : pandas.DataFrame
Multivariate series
Returns
-------
corr : numpy.ndarray
Correlation matrix
"""
corr = np.array(oseries.corr())
return corr
@staticmethod
def _get_eigval(correlation):
"""Internal method to get eigenvalues and eigenvectors.
Get eigenvalues and eigenvectors based on correlation matrix.
Parameters
----------
correlation : numpy.ndarray
Correlation matrix for which eigenvalues
and eigenvectors need to be derived.
Raises
------
Exception
If method results in complex eigenvalues and eigenvectors.
Returns
-------
eigval : numpy.ndarray
Vector with eigenvalues.
eigvec : numpy.ndarray
Matrix with eigenvectors.
"""
# perform eigenvalue decomposition
eigval, eigvec = np.linalg.eig(correlation)
if isinstance(eigval[0], np.complex128):
msg = "Serial correlation matrix has " + \
"complex eigenvalues and eigenvectors. " + \
"Factors cannot be estimated for these series."
logger.error(msg)
raise Exception(msg)
# sort eigenvalues and eigenvectors
evals_order = np.argsort(-eigval)
eigval = eigval[evals_order]
eigval[eigval < 0] = 0.
eigvec = eigvec[:, evals_order]
eigvec = np.atleast_2d(np.dot(eigvec, np.sqrt(np.diag(eigval))))
return eigval, eigvec
| [
"logging.getLogger",
"numpy.sqrt",
"pastas.utils.initialize_logger",
"numpy.MachAr",
"numpy.argsort",
"numpy.count_nonzero",
"numpy.arange",
"numpy.atleast_2d",
"numpy.asarray",
"numpy.subtract",
"numpy.dot",
"numpy.linalg.eigh",
"numpy.eye",
"numpy.linalg.eig",
"numpy.ones",
"scipy.optimize.minimize",
"numpy.fill_diagonal",
"numpy.square",
"numpy.sign",
"numpy.copy",
"numpy.diag",
"numpy.sum",
"numpy.zeros",
"numpy.linalg.inv"
] | [((184, 203), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (193, 203), False, 'from logging import getLogger\n'), ((204, 229), 'pastas.utils.initialize_logger', 'initialize_logger', (['logger'], {}), '(logger)\n', (221, 229), False, 'from pastas.utils import initialize_logger\n'), ((5037, 5046), 'numpy.eye', 'np.eye', (['k'], {}), '(k)\n', (5043, 5046), True, 'import numpy as np\n'), ((5524, 5538), 'numpy.dot', 'np.dot', (['phi', 'R'], {}), '(phi, R)\n', (5530, 5538), True, 'import numpy as np\n'), ((6054, 6064), 'numpy.copy', 'np.copy', (['s'], {}), '(s)\n', (6061, 6064), True, 'import numpy as np\n'), ((6460, 6573), 'scipy.optimize.minimize', 'scopt.minimize', (['self._minresfun', 'start'], {'method': '"""L-BFGS-B"""', 'jac': 'self._minresgrad', 'bounds': 'bounds', 'args': '(s, nf)'}), "(self._minresfun, start, method='L-BFGS-B', jac=self.\n _minresgrad, bounds=bounds, args=(s, nf))\n", (6474, 6573), True, 'import scipy.optimize as scopt\n'), ((8431, 8442), 'numpy.copy', 'np.copy', (['fm'], {}), '(fm)\n', (8438, 8442), True, 'import numpy as np\n'), ((10137, 10147), 'numpy.copy', 'np.copy', (['s'], {}), '(s)\n', (10144, 10147), True, 'import numpy as np\n'), ((10156, 10185), 'numpy.fill_diagonal', 'np.fill_diagonal', (['s2', '(1 - psi)'], {}), '(s2, 1 - psi)\n', (10172, 10185), True, 'import numpy as np\n'), ((10211, 10229), 'numpy.linalg.eigh', 'np.linalg.eigh', (['s2'], {}), '(s2)\n', (10225, 10229), True, 'import numpy as np\n'), ((10537, 10565), 'numpy.dot', 'np.dot', (['loadings', 'loadings.T'], {}), '(loadings, loadings.T)\n', (10543, 10565), True, 'import numpy as np\n'), ((10585, 10606), 'numpy.square', 'np.square', (['(s2 - model)'], {}), '(s2 - model)\n', (10594, 10606), True, 'import numpy as np\n'), ((10615, 10644), 'numpy.fill_diagonal', 'np.fill_diagonal', (['residual', '(0)'], {}), '(residual, 0)\n', (10631, 10644), True, 'import numpy as np\n'), ((10661, 10677), 'numpy.sum', 'np.sum', (['residual'], {}), '(residual)\n', (10667, 10677), True, 'import numpy as np\n'), ((11938, 11958), 'numpy.linalg.eig', 'np.linalg.eig', (['sstar'], {}), '(sstar)\n', (11951, 11958), True, 'import numpy as np\n'), ((13296, 13322), 'numpy.linalg.eig', 'np.linalg.eig', (['correlation'], {}), '(correlation)\n', (13309, 13322), True, 'import numpy as np\n'), ((13685, 13704), 'numpy.argsort', 'np.argsort', (['(-eigval)'], {}), '(-eigval)\n', (13695, 13704), True, 'import numpy as np\n'), ((973, 992), 'numpy.sum', 'np.sum', (['self.eigval'], {}), '(self.eigval)\n', (979, 992), True, 'import numpy as np\n'), ((3700, 3736), 'numpy.atleast_2d', 'np.atleast_2d', (['factors[:, :nfactors]'], {}), '(factors[:, :nfactors])\n', (3713, 3736), True, 'import numpy as np\n'), ((5137, 5151), 'numpy.dot', 'np.dot', (['phi', 'R'], {}), '(phi, R)\n', (5143, 5151), True, 'import numpy as np\n'), ((5388, 5401), 'numpy.dot', 'np.dot', (['u', 'vh'], {}), '(u, vh)\n', (5394, 5401), True, 'import numpy as np\n'), ((5418, 5427), 'numpy.sum', 'np.sum', (['s'], {}), '(s)\n', (5424, 5427), True, 'import numpy as np\n'), ((8636, 8668), 'numpy.atleast_2d', 'np.atleast_2d', (['eigvec[:, :m + 1]'], {}), '(eigvec[:, :m + 1])\n', (8649, 8668), True, 'import numpy as np\n'), ((11284, 11294), 'numpy.diag', 'np.diag', (['g'], {}), '(g)\n', (11291, 11294), True, 'import numpy as np\n'), ((11297, 11311), 'numpy.square', 'np.square', (['psi'], {}), '(psi)\n', (11306, 11311), True, 'import numpy as np\n'), ((11898, 11911), 'numpy.dot', 'np.dot', (['s', 'sc'], {}), '(s, sc)\n', (11904, 11911), True, 'import numpy as np\n'), ((2598, 2623), 'numpy.count_nonzero', 'np.count_nonzero', (['factors'], {}), '(factors)\n', (2614, 2623), True, 'import numpy as np\n'), ((2798, 2824), 'numpy.zeros', 'np.zeros', (['factors.shape[0]'], {}), '(factors.shape[0])\n', (2806, 2824), True, 'import numpy as np\n'), ((8697, 8717), 'numpy.dot', 'np.dot', (['biga', 'biga.T'], {}), '(biga, biga.T)\n', (8703, 8717), True, 'import numpy as np\n'), ((8953, 8971), 'numpy.dot', 'np.dot', (['partcov', 'd'], {}), '(partcov, d)\n', (8959, 8971), True, 'import numpy as np\n'), ((10279, 10290), 'numpy.MachAr', 'np.MachAr', ([], {}), '()\n', (10288, 10290), True, 'import numpy as np\n'), ((10502, 10520), 'numpy.sqrt', 'np.sqrt', (['eigval[0]'], {}), '(eigval[0])\n', (10509, 10520), True, 'import numpy as np\n'), ((11230, 11250), 'numpy.dot', 'np.dot', (['load', 'load.T'], {}), '(load, load.T)\n', (11236, 11250), True, 'import numpy as np\n'), ((11253, 11265), 'numpy.diag', 'np.diag', (['psi'], {}), '(psi)\n', (11260, 11265), True, 'import numpy as np\n'), ((11857, 11869), 'numpy.sqrt', 'np.sqrt', (['psi'], {}), '(psi)\n', (11864, 11869), True, 'import numpy as np\n'), ((12116, 12128), 'numpy.sqrt', 'np.sqrt', (['psi'], {}), '(psi)\n', (12123, 12128), True, 'import numpy as np\n'), ((6226, 6250), 'numpy.ones', 'np.ones', (['nf'], {'dtype': 'float'}), '(nf, dtype=float)\n', (6233, 6250), True, 'import numpy as np\n'), ((6293, 6303), 'numpy.diag', 'np.diag', (['s'], {}), '(s)\n', (6300, 6303), True, 'import numpy as np\n'), ((8206, 8235), 'numpy.arange', 'np.arange', (['nvars'], {'dtype': 'float'}), '(nvars, dtype=float)\n', (8215, 8235), True, 'import numpy as np\n'), ((8260, 8289), 'numpy.arange', 'np.arange', (['nvars'], {'dtype': 'float'}), '(nvars, dtype=float)\n', (8269, 8289), True, 'import numpy as np\n'), ((8818, 8834), 'numpy.diag', 'np.diag', (['partcov'], {}), '(partcov)\n', (8825, 8834), True, 'import numpy as np\n'), ((10254, 10265), 'numpy.MachAr', 'np.MachAr', ([], {}), '()\n', (10263, 10265), True, 'import numpy as np\n'), ((13868, 13883), 'numpy.diag', 'np.diag', (['eigval'], {}), '(eigval)\n', (13875, 13883), True, 'import numpy as np\n'), ((3036, 3052), 'numpy.sqrt', 'np.sqrt', (['comm[i]'], {}), '(comm[i])\n', (3043, 3052), True, 'import numpy as np\n'), ((3217, 3233), 'numpy.sqrt', 'np.sqrt', (['comm[i]'], {}), '(comm[i])\n', (3224, 3233), True, 'import numpy as np\n'), ((6113, 6129), 'numpy.linalg.inv', 'np.linalg.inv', (['s'], {}), '(s)\n', (6126, 6129), True, 'import numpy as np\n'), ((6162, 6174), 'numpy.sum', 'np.sum', (['ssmc'], {}), '(ssmc)\n', (6168, 6174), True, 'import numpy as np\n'), ((8906, 8922), 'numpy.diag', 'np.diag', (['partcov'], {}), '(partcov)\n', (8913, 8922), True, 'import numpy as np\n'), ((10426, 10446), 'numpy.sqrt', 'np.sqrt', (['eigval[:nf]'], {}), '(eigval[:nf])\n', (10433, 10446), True, 'import numpy as np\n'), ((12051, 12078), 'numpy.subtract', 'np.subtract', (['eigval[:nf]', '(1)'], {}), '(eigval[:nf], 1)\n', (12062, 12078), True, 'import numpy as np\n'), ((3579, 3601), 'numpy.sign', 'np.sign', (['factors[i, j]'], {}), '(factors[i, j])\n', (3586, 3601), True, 'import numpy as np\n'), ((5220, 5238), 'numpy.asarray', 'np.asarray', (['Lambda'], {}), '(Lambda)\n', (5230, 5238), True, 'import numpy as np\n'), ((8337, 8351), 'numpy.square', 'np.square', (['cov'], {}), '(cov)\n', (8346, 8351), True, 'import numpy as np\n'), ((8512, 8526), 'numpy.square', 'np.square', (['cov'], {}), '(cov)\n', (8521, 8526), True, 'import numpy as np\n'), ((9024, 9037), 'numpy.square', 'np.square', (['pr'], {}), '(pr)\n', (9033, 9037), True, 'import numpy as np\n'), ((9173, 9186), 'numpy.square', 'np.square', (['pr'], {}), '(pr)\n', (9182, 9186), True, 'import numpy as np\n'), ((5342, 5366), 'numpy.dot', 'np.dot', (['Lambda.T', 'Lambda'], {}), '(Lambda.T, Lambda)\n', (5348, 5366), True, 'import numpy as np\n')] |
import unittest
import sys
import os.path
import numpy as np
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir)))
import nqueens
class NQueensTest(unittest.TestCase):
def testDiagsValid(self):
good_board = np.array([[0, 1, 0, 0], [0, 0, 0, 1], [1, 0, 0, 0], [0, 0, 1, 0]])
self.assertTrue(nqueens.diags_valid(good_board))
bad_board = np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]])
self.assertFalse(nqueens.diags_valid(bad_board))
bad_board2 = np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0]])
self.assertFalse(nqueens.diags_valid(bad_board2))
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"numpy.array",
"nqueens.diags_valid"
] | [((707, 722), 'unittest.main', 'unittest.main', ([], {}), '()\n', (720, 722), False, 'import unittest\n'), ((259, 325), 'numpy.array', 'np.array', (['[[0, 1, 0, 0], [0, 0, 0, 1], [1, 0, 0, 0], [0, 0, 1, 0]]'], {}), '([[0, 1, 0, 0], [0, 0, 0, 1], [1, 0, 0, 0], [0, 0, 1, 0]])\n', (267, 325), True, 'import numpy as np\n'), ((404, 470), 'numpy.array', 'np.array', (['[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]'], {}), '([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]])\n', (412, 470), True, 'import numpy as np\n'), ((550, 616), 'numpy.array', 'np.array', (['[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0]]'], {}), '([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0]])\n', (558, 616), True, 'import numpy as np\n'), ((350, 381), 'nqueens.diags_valid', 'nqueens.diags_valid', (['good_board'], {}), '(good_board)\n', (369, 381), False, 'import nqueens\n'), ((496, 526), 'nqueens.diags_valid', 'nqueens.diags_valid', (['bad_board'], {}), '(bad_board)\n', (515, 526), False, 'import nqueens\n'), ((642, 673), 'nqueens.diags_valid', 'nqueens.diags_valid', (['bad_board2'], {}), '(bad_board2)\n', (661, 673), False, 'import nqueens\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2021 The HERA Collaboration
# Licensed under the MIT License
"""Simple demo showing how to plot GSM."""
import numpy as np
import hera_cc_utils as hera_cc
from astropy.coordinates import SkyCoord
gsm = hera_cc.Map()
fig1, ax1, proj1 = gsm.plot_map(freq=150e6, projection="Robinson")
fig2, ax2, proj2 = gsm.plot_map(
freq=150e6, projection="rectilinear", num=2, vmin=1e2, vmax=1e3
)
ax2.axhline(-30 - 5, color="w", ls=":", lw=1.5, zorder=1)
ax2.axhline(-30 + 5, color="w", ls=":", lw=1.5, zorder=1)
lst_cuts = [(1.25, 2.7), (4.5, 6.5), (8.5, 10.75)]
for i, lst_cut in enumerate(lst_cuts):
ax2.fill_betweenx([-35, -25], *lst_cut, color="none", edgecolor="w")
ax2.annotate(
"field {}".format(i + 1),
(np.mean(lst_cut), -24),
color="w",
ha="center",
va="bottom",
)
ax2.set_xlim(1, 11)
ax2.set_ylim(-40, -20)
# Draw GOODS-S field
goods_s = SkyCoord("03h32m28s", "-27d48m30s", frame="icrs")
ra, dec = goods_s.ra.hour, goods_s.dec.degree
ax2.scatter(ra, dec, marker="x", color="w")
# patch = Rectangle(xy=(ra,dec), width=0.5, height=0.5,
# facecolor='c', alpha=0.5)
# ax.add_patch(patch)
euclid_dff = SkyCoord("03h31m43.6s", "-28d05m18.6s", frame="icrs")
euclid_sqdeg = 20
euclid_r = np.sqrt(10.0 / np.pi)
ra, dec = euclid_dff.ra.hour, euclid_dff.dec.degree
ax2.scatter(ra, dec, color="w", marker="+")
| [
"hera_cc_utils.Map",
"numpy.mean",
"numpy.sqrt",
"astropy.coordinates.SkyCoord"
] | [((245, 258), 'hera_cc_utils.Map', 'hera_cc.Map', ([], {}), '()\n', (256, 258), True, 'import hera_cc_utils as hera_cc\n'), ((940, 989), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['"""03h32m28s"""', '"""-27d48m30s"""'], {'frame': '"""icrs"""'}), "('03h32m28s', '-27d48m30s', frame='icrs')\n", (948, 989), False, 'from astropy.coordinates import SkyCoord\n'), ((1206, 1259), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['"""03h31m43.6s"""', '"""-28d05m18.6s"""'], {'frame': '"""icrs"""'}), "('03h31m43.6s', '-28d05m18.6s', frame='icrs')\n", (1214, 1259), False, 'from astropy.coordinates import SkyCoord\n'), ((1289, 1310), 'numpy.sqrt', 'np.sqrt', (['(10.0 / np.pi)'], {}), '(10.0 / np.pi)\n', (1296, 1310), True, 'import numpy as np\n'), ((773, 789), 'numpy.mean', 'np.mean', (['lst_cut'], {}), '(lst_cut)\n', (780, 789), True, 'import numpy as np\n')] |
"""
:author: <NAME> <<EMAIL>>
"""
import domintell
class AppInfoRequest(domintell.Command):
"""
send: &APPINFO
"""
def __init__(self):
domintell.Command.__init__(self, "_APPINFO_", "_APPINFO_")
def command(self):
return "APPINFO"
| [
"domintell.Command.__init__"
] | [((165, 223), 'domintell.Command.__init__', 'domintell.Command.__init__', (['self', '"""_APPINFO_"""', '"""_APPINFO_"""'], {}), "(self, '_APPINFO_', '_APPINFO_')\n", (191, 223), False, 'import domintell\n')] |
import tkinter as tk
import requests
import json
win = tk.Tk()
win.title("Weather Report")
win.geometry("800x800")
api = "your API"
url = "http://api.openweathermap.org/data/2.5/weather?"
def weather () :
location = entry.get()
answer = url + "appid=" +api + "&q=" +location
response = requests.get(answer)
res = response.json()
if res["cod"] !="404" :
x = res["main"]
temperature = x["temp"]
pressure = x["pressure"]
humidity = x["humidity"]
y = res["weather"]
weather_detail = y[0]["description"]
label1 = tk.Label(win , text = 'Temperature (In Kelvin) = {temp} , \n'' Atmospheric Pressure (In hPa) = {pre} , \n'' Humidity (In Percentage) = {hum} , \n'' Detail = {weather_detail} ')
label1.grid(row = 2 , column = 0)
else :
label2 = tk.Label(win , text = "Enter The Correct Location")
label2.grid(row = 2 , column = 0)
label = tk.Label(win , text = "Enter Location : " , bg = "#009332")
label.grid(row = 0 , column = 0)
label.config(font=("=sans serif" , 20 , "bold"))
entry = tk.Entry(win)
entry.grid(row = 1 , column = 0 , padx = 100)
button = tk.Button(win , text = "Search" , command = weather)
button.grid(row = 1 , column = 1)
win.mainloop()
| [
"tkinter.Entry",
"tkinter.Button",
"requests.get",
"tkinter.Tk",
"tkinter.Label"
] | [((55, 62), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (60, 62), True, 'import tkinter as tk\n'), ((933, 986), 'tkinter.Label', 'tk.Label', (['win'], {'text': '"""Enter Location : """', 'bg': '"""#009332"""'}), "(win, text='Enter Location : ', bg='#009332')\n", (941, 986), True, 'import tkinter as tk\n'), ((1083, 1096), 'tkinter.Entry', 'tk.Entry', (['win'], {}), '(win)\n', (1091, 1096), True, 'import tkinter as tk\n'), ((1152, 1198), 'tkinter.Button', 'tk.Button', (['win'], {'text': '"""Search"""', 'command': 'weather'}), "(win, text='Search', command=weather)\n", (1161, 1198), True, 'import tkinter as tk\n'), ((298, 318), 'requests.get', 'requests.get', (['answer'], {}), '(answer)\n', (310, 318), False, 'import requests\n'), ((584, 762), 'tkinter.Label', 'tk.Label', (['win'], {'text': '"""Temperature (In Kelvin) = {temp} , \n Atmospheric Pressure (In hPa) = {pre} , \n Humidity (In Percentage) = {hum} , \n Detail = {weather_detail} """'}), '(win, text=\n """Temperature (In Kelvin) = {temp} , \n Atmospheric Pressure (In hPa) = {pre} , \n Humidity (In Percentage) = {hum} , \n Detail = {weather_detail} """\n )\n', (592, 762), True, 'import tkinter as tk\n'), ((831, 879), 'tkinter.Label', 'tk.Label', (['win'], {'text': '"""Enter The Correct Location"""'}), "(win, text='Enter The Correct Location')\n", (839, 879), True, 'import tkinter as tk\n')] |
"""
####################################################################################################
# Copyright Info : Copyright (c) <NAME> @ Hikvision Research Institute. All rights reserved.
# Filename : character_mask_att_head.py
# Abstract : Character Mask Attention predtion
# Current Version: 1.0.0
# Date : 2021-03-19
######################################################################################################
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from mmdet.models.builder import build_loss, HEADS
from mmcv.cnn import normal_init
from mmcv.cnn import ConvModule
from mmcv.runner import force_fp32, auto_fp16
import cv2
import numpy as np
@HEADS.register_module()
class CharacterMaskAttentionHead(nn.Module):
""" Inplemenation of CMA in MANGO[1]. Dynamic convolution strategy refers to Solov2 [2].
Ref: [1] MANGO: A Mask Attention Guided One-Staged Text Spotter. AAAI-21.
<https://arxiv.org/abs/2012.04350>`_
[2] SOLOv2: Dynamic, Faster and Stronger, NeurIPS-20
<https://arxiv.org/abs/2003.10152>`_
"""
def __init__(self,
in_channels,
conv_out_channels,
num_grids,
stacked_convs=4,
text_max_length=25,
featmap_indices=(0, 1, 2, 3),
loss_char_mask_att=None,
):
"""
Args:
in_channels (int): input feature map channel
conv_out_channels (int): output feature map channel
num_grids (list(int)): split img into S*S grids. List for 4x 8x 16x .. feature maps. e.g. [40, 40, 40, 40]
stacked_convs (int): stacked convolution layers number
text_max_length (int): the max length of recognition words.
featmap_indices (list(int)): selected feature map scales.
loss_char_mask_att (dict): loss function for CMA supervision, which is requried in pretraining stage.
"""
super().__init__()
assert len(num_grids) == len(featmap_indices)
self.in_channels = in_channels
self.conv_out_channels = conv_out_channels
self.text_max_length = text_max_length
self.stacked_convs = stacked_convs
self.fp16_enabled = False
self.num_grids = num_grids
self.featmap_indices = featmap_indices
if loss_char_mask_att is not None:
self.loss_char_mask_att = build_loss(loss_char_mask_att)
self.loss_weight = loss_char_mask_att['loss_weight']
else:
self.loss_char_mask_att = None
norm_cfg = dict(type='GN', num_groups=32, requires_grad=True)
self.kernal_convs = nn.ModuleList()
self.feature_convs = nn.ModuleList()
for i in range(self.stacked_convs):
chn = self.in_channels + 2 if i == 0 else self.conv_out_channels
self.kernal_convs.append(
ConvModule(
chn,
self.conv_out_channels,
kernel_size=3,
stride=1,
padding=1,
norm_cfg=norm_cfg,
bias=norm_cfg is None))
self.feature_convs.append(
ConvModule(
chn,
self.conv_out_channels,
kernel_size=3,
stride=1,
padding=1,
norm_cfg=norm_cfg,
bias=norm_cfg is None))
kernal_out_channels = self.conv_out_channels * self.text_max_length
self.kernal_out = nn.Conv2d(self.conv_out_channels,
kernal_out_channels,
kernel_size=1,
padding=0)
self.feature_out = ConvModule(self.conv_out_channels,
self.conv_out_channels,
kernel_size=1,
padding=0,
norm_cfg=norm_cfg,
bias=norm_cfg is None)
def init_weights(self):
""" Weight initialization. """
for kernal_conv in self.kernal_convs:
normal_init(kernal_conv.conv, std=0.01)
for kernal_conv in self.feature_convs:
normal_init(kernal_conv.conv, std=0.01)
def forward_single(self, feats, matched_bboxes, num_grid):
""" Forward of CMA in single level.
Args:
feats (Tensor): Input feature map, in shape of [B, C, H, W]
matched_bboxes (Tensor): A tensor of shape [B, S^2] to indicate grid categories
num_grid (int): An int number to indicate grid split numbers
Returns:
Tensor: in shape of [B, K, L, H, W]
"""
kernal_feature = feats
mask_feature = feats
# Calculate x-axis and y-axis coordinate features
x_range = torch.linspace(-1, 1, kernal_feature.shape[-1], device=kernal_feature.device)
y_range = torch.linspace(-1, 1, kernal_feature.shape[-2], device=kernal_feature.device)
y_coord, x_coord = torch.meshgrid(y_range, x_range)
y_coord = y_coord.expand([kernal_feature.shape[0], 1, -1, -1])
x_coord = x_coord.expand([kernal_feature.shape[0], 1, -1, -1])
coord_feature = torch.cat([x_coord, y_coord], 1)
# B x C x H x W -> B x (C+2) x H x W
kernal_feature = torch.cat([kernal_feature, coord_feature], 1)
# B x C x H x W -> B x (C+2) x H x W
mask_feature = torch.cat([mask_feature, coord_feature], 1)
# Generate dynamic convolution kernel
for idx in range(self.stacked_convs):
if idx == 0:
# B x (C+2) x H x W -> B x (C+2) x S x S
kernal_feature = F.interpolate(kernal_feature,size=num_grid, mode='bilinear')
kernal_feature = self.kernal_convs[idx](kernal_feature)
mask_feature = self.feature_convs[idx](mask_feature)
kernal_feature = self.kernal_out(kernal_feature) # B x CL x S x S
mask_feature = self.feature_out(mask_feature) # B x C x H x W
batch, channel, height, width = mask_feature.shape
# Calculate the number of valid grid
max_category_num = torch.max(torch.sum(torch.ge(matched_bboxes, 1), dim=1))
max_category_num = max(max_category_num, 1)
# Calculate the location of valid grid
_, indices = torch.topk(matched_bboxes, max_category_num) # B x K
indices = indices.unsqueeze(-1).expand(-1, -1, self.text_max_length * channel) # B x K x CL
kernal_feature = kernal_feature.permute(0, 2, 3, 1).contiguous().view(batch, num_grid**2, -1) # B x SS x CL
# Collect the values of valid grid
kernal_feature = torch.gather(kernal_feature, 1, indices) # B x K x CL
# B x C x H x W -> BC x H x W -> 1 x BC x H x W
mask_feature = mask_feature.contiguous().view(-1, height, width).unsqueeze(0)
# B x K x CL -> BKL x C-> BKL x C x 1 x 1
kernal_feature = kernal_feature.view(-1, channel).unsqueeze(-1).unsqueeze(-1)
mask_pred = F.conv2d(mask_feature, kernal_feature, groups=batch).contiguous().view(batch, max_category_num,
self.text_max_length,
height,
width) # B x K x L x H x W
return mask_pred
@auto_fp16()
def forward(self, feats, matched_bboxes):
""" Forward of CMA in multiple levels.
Args:
feats (list(Tensor)): Input feature maps, in shapes of [B, C, H, W]
matched_bboxes (list(Tenor)): A tensor of shape [B, S^2] ot indicate grid categories
Returns:
list(Tensor): in shape of [B, K, L, H, W]
"""
preds = []
for i in range(len(self.featmap_indices)):
pred = self.forward_single(feats[i], matched_bboxes[i],
self.num_grids[i])
preds.append(pred)
return preds
def get_target_single(self,
gt_cbboxes,
matched_bboxes,
feat_size,
stride,
device='cuda'
):
""" Ground-truth generated according to character level annotations in single level.
Args:
gt_cbboxes (list(list(list(float))): A variable length list [N, *, *].
e.g. [[[x1, y1, x2, y2, x3, y3, x4, y4],[],[]]...]
matched_bboxes (Tensor): A tensor of shape [B, S^2] ot indicate grid categories
feat_size (tuple): inpur feature map shape
stride (int): An int number to indicate feature map stride
device (str): computation device, default in 'cuda'
Returns:
Tensor: ground-truth mask in single level, in shape of [B, K, L, H, W]
Returns:
Tensor: channel-wised weight, in shape of [B, K, L]
"""
batch, _, height, width = feat_size
max_category_num = torch.max(torch.sum(torch.ge(matched_bboxes,1), dim=1))
max_category_num = max(max_category_num, 1)
gt_mask = torch.zeros([batch, max_category_num,self.text_max_length,
height, width], dtype=torch.uint8, device=device)
mask_weight = torch.zeros([batch, max_category_num,
self.text_max_length], dtype=torch.float, device=device)
values, _ = torch.topk(matched_bboxes, int(max_category_num)) # B x K
for batch_id in range(batch):
val = values[batch_id]
gt_cbbox = gt_cbboxes[batch_id]
for idx, cboxes in enumerate(gt_cbbox):
# Calculate the valid grid corresponding to text instance
indices = torch.where(val == idx + 1)[0]
if len(indices) == 0:
continue
# Fill gt mask according to the gt_cbboxes
cboxes = cboxes.reshape(-1, 4, 2)
cboxes_downsample = (cboxes / float(stride)).astype(int)
str_length = cboxes.shape[0]
target_mask = np.zeros((self.text_max_length, height, width), dtype=np.uint8)
for c_id in range(str_length):
cbox = cboxes_downsample[c_id, :, :] # N x 4 x 2
cv2.fillPoly(target_mask[c_id, :, :], [cbox], color=1)
target_mask = torch.Tensor(target_mask)
# Assign gt to the corresponding grid
for ind in indices:
gt_mask[batch_id, ind, ...] = target_mask
mask_weight[batch_id, ind, :str_length+1] = 1
return gt_mask, mask_weight
def get_target(self, feats, gt_cbboxes, matched_bboxes):
""" Ground-truth generated according to character level annotations in multiple levels.
Args:
feats (list(Tensor)): input feature maps, in shape of [B, C, H, W]
gt_cbboxes (list(list(list(float))): A variable length list [N, *, *].
e.g. [[[x1, y1, x2, y2, x3, y3, x4, y4],[],[]]...]
matched_bboxes (list(Tensor)): A tensor of shape [B, S^2] ot indicate grid categories
Returns:
list(tuple(Tensor)): ground-truth mask in single level, in shape of [B, K, L, H, W] and
channel-wised weight, in shape of [B, K, L]
"""
mask_targets = []
for i, stride_idx in enumerate(self.featmap_indices):
stride = 4 * (2 ** stride_idx)
target = self.get_target_single(
gt_cbboxes,
matched_bboxes[i],
feats[i].shape,
stride,
device=feats[i].device
)
mask_targets.append(target)
return mask_targets
@force_fp32(apply_to=('mask_preds', ))
def loss(self, mask_preds, mask_targets):
""" Loss computation.
Args:
mask_preds (list(Tensor)): feature map predictions, in shape of [B, K, L, H, W]
mask_targets (list(Tensor)): feature map targets, in shape of [B, K, L]
Returns:
dict: losses in a dict.
"""
loss = dict()
for i, stride_idx in enumerate(self.featmap_indices):
stride = 4 * (2 ** stride_idx)
mask_pred = mask_preds[i]
mask_pred = torch.sigmoid(mask_pred)
_, _, length, height, width = mask_pred.shape
gt_mask, mask_weight = mask_targets[i]
mask_pred = mask_pred.view(-1, length, height, width)
gt_mask = gt_mask.view(-1, length, height, width)
mask_weight = mask_weight.view(-1, length).unsqueeze(-1).unsqueeze(-1)
loss_mask_att = self.loss_char_mask_att(mask_pred, gt_mask, weight_in_channel=mask_weight)
loss.update({"loss_cha_mask_att_{}x".format(stride):loss_mask_att})
return loss
| [
"mmdet.models.builder.build_loss",
"torch.nn.functional.conv2d",
"mmcv.runner.force_fp32",
"torch.nn.functional.interpolate",
"torch.nn.ModuleList",
"mmdet.models.builder.HEADS.register_module",
"torch.meshgrid",
"torch.gather",
"cv2.fillPoly",
"torch.topk",
"torch.Tensor",
"torch.linspace",
"torch.ge",
"torch.cat",
"mmcv.runner.auto_fp16",
"torch.where",
"mmcv.cnn.ConvModule",
"torch.sigmoid",
"torch.nn.Conv2d",
"numpy.zeros",
"torch.zeros",
"mmcv.cnn.normal_init"
] | [((737, 760), 'mmdet.models.builder.HEADS.register_module', 'HEADS.register_module', ([], {}), '()\n', (758, 760), False, 'from mmdet.models.builder import build_loss, HEADS\n'), ((7719, 7730), 'mmcv.runner.auto_fp16', 'auto_fp16', ([], {}), '()\n', (7728, 7730), False, 'from mmcv.runner import force_fp32, auto_fp16\n'), ((12283, 12319), 'mmcv.runner.force_fp32', 'force_fp32', ([], {'apply_to': "('mask_preds',)"}), "(apply_to=('mask_preds',))\n", (12293, 12319), False, 'from mmcv.runner import force_fp32, auto_fp16\n'), ((2758, 2773), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (2771, 2773), True, 'import torch.nn as nn\n'), ((2803, 2818), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (2816, 2818), True, 'import torch.nn as nn\n'), ((3671, 3756), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.conv_out_channels', 'kernal_out_channels'], {'kernel_size': '(1)', 'padding': '(0)'}), '(self.conv_out_channels, kernal_out_channels, kernel_size=1, padding=0\n )\n', (3680, 3756), True, 'import torch.nn as nn\n'), ((3887, 4017), 'mmcv.cnn.ConvModule', 'ConvModule', (['self.conv_out_channels', 'self.conv_out_channels'], {'kernel_size': '(1)', 'padding': '(0)', 'norm_cfg': 'norm_cfg', 'bias': '(norm_cfg is None)'}), '(self.conv_out_channels, self.conv_out_channels, kernel_size=1,\n padding=0, norm_cfg=norm_cfg, bias=norm_cfg is None)\n', (3897, 4017), False, 'from mmcv.cnn import ConvModule\n'), ((5045, 5122), 'torch.linspace', 'torch.linspace', (['(-1)', '(1)', 'kernal_feature.shape[-1]'], {'device': 'kernal_feature.device'}), '(-1, 1, kernal_feature.shape[-1], device=kernal_feature.device)\n', (5059, 5122), False, 'import torch\n'), ((5141, 5218), 'torch.linspace', 'torch.linspace', (['(-1)', '(1)', 'kernal_feature.shape[-2]'], {'device': 'kernal_feature.device'}), '(-1, 1, kernal_feature.shape[-2], device=kernal_feature.device)\n', (5155, 5218), False, 'import torch\n'), ((5246, 5278), 'torch.meshgrid', 'torch.meshgrid', (['y_range', 'x_range'], {}), '(y_range, x_range)\n', (5260, 5278), False, 'import torch\n'), ((5445, 5477), 'torch.cat', 'torch.cat', (['[x_coord, y_coord]', '(1)'], {}), '([x_coord, y_coord], 1)\n', (5454, 5477), False, 'import torch\n'), ((5549, 5594), 'torch.cat', 'torch.cat', (['[kernal_feature, coord_feature]', '(1)'], {}), '([kernal_feature, coord_feature], 1)\n', (5558, 5594), False, 'import torch\n'), ((5663, 5706), 'torch.cat', 'torch.cat', (['[mask_feature, coord_feature]', '(1)'], {}), '([mask_feature, coord_feature], 1)\n', (5672, 5706), False, 'import torch\n'), ((6565, 6609), 'torch.topk', 'torch.topk', (['matched_bboxes', 'max_category_num'], {}), '(matched_bboxes, max_category_num)\n', (6575, 6609), False, 'import torch\n'), ((6906, 6946), 'torch.gather', 'torch.gather', (['kernal_feature', '(1)', 'indices'], {}), '(kernal_feature, 1, indices)\n', (6918, 6946), False, 'import torch\n'), ((9569, 9682), 'torch.zeros', 'torch.zeros', (['[batch, max_category_num, self.text_max_length, height, width]'], {'dtype': 'torch.uint8', 'device': 'device'}), '([batch, max_category_num, self.text_max_length, height, width],\n dtype=torch.uint8, device=device)\n', (9580, 9682), False, 'import torch\n'), ((9731, 9830), 'torch.zeros', 'torch.zeros', (['[batch, max_category_num, self.text_max_length]'], {'dtype': 'torch.float', 'device': 'device'}), '([batch, max_category_num, self.text_max_length], dtype=torch.\n float, device=device)\n', (9742, 9830), False, 'import torch\n'), ((2506, 2536), 'mmdet.models.builder.build_loss', 'build_loss', (['loss_char_mask_att'], {}), '(loss_char_mask_att)\n', (2516, 2536), False, 'from mmdet.models.builder import build_loss, HEADS\n'), ((4330, 4369), 'mmcv.cnn.normal_init', 'normal_init', (['kernal_conv.conv'], {'std': '(0.01)'}), '(kernal_conv.conv, std=0.01)\n', (4341, 4369), False, 'from mmcv.cnn import normal_init\n'), ((4429, 4468), 'mmcv.cnn.normal_init', 'normal_init', (['kernal_conv.conv'], {'std': '(0.01)'}), '(kernal_conv.conv, std=0.01)\n', (4440, 4468), False, 'from mmcv.cnn import normal_init\n'), ((12845, 12869), 'torch.sigmoid', 'torch.sigmoid', (['mask_pred'], {}), '(mask_pred)\n', (12858, 12869), False, 'import torch\n'), ((2994, 3115), 'mmcv.cnn.ConvModule', 'ConvModule', (['chn', 'self.conv_out_channels'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'norm_cfg': 'norm_cfg', 'bias': '(norm_cfg is None)'}), '(chn, self.conv_out_channels, kernel_size=3, stride=1, padding=1,\n norm_cfg=norm_cfg, bias=norm_cfg is None)\n', (3004, 3115), False, 'from mmcv.cnn import ConvModule\n'), ((3309, 3430), 'mmcv.cnn.ConvModule', 'ConvModule', (['chn', 'self.conv_out_channels'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'norm_cfg': 'norm_cfg', 'bias': '(norm_cfg is None)'}), '(chn, self.conv_out_channels, kernel_size=3, stride=1, padding=1,\n norm_cfg=norm_cfg, bias=norm_cfg is None)\n', (3319, 3430), False, 'from mmcv.cnn import ConvModule\n'), ((5915, 5976), 'torch.nn.functional.interpolate', 'F.interpolate', (['kernal_feature'], {'size': 'num_grid', 'mode': '"""bilinear"""'}), "(kernal_feature, size=num_grid, mode='bilinear')\n", (5928, 5976), True, 'import torch.nn.functional as F\n'), ((6407, 6434), 'torch.ge', 'torch.ge', (['matched_bboxes', '(1)'], {}), '(matched_bboxes, 1)\n', (6415, 6434), False, 'import torch\n'), ((9463, 9490), 'torch.ge', 'torch.ge', (['matched_bboxes', '(1)'], {}), '(matched_bboxes, 1)\n', (9471, 9490), False, 'import torch\n'), ((10551, 10614), 'numpy.zeros', 'np.zeros', (['(self.text_max_length, height, width)'], {'dtype': 'np.uint8'}), '((self.text_max_length, height, width), dtype=np.uint8)\n', (10559, 10614), True, 'import numpy as np\n'), ((10838, 10863), 'torch.Tensor', 'torch.Tensor', (['target_mask'], {}), '(target_mask)\n', (10850, 10863), False, 'import torch\n'), ((10196, 10223), 'torch.where', 'torch.where', (['(val == idx + 1)'], {}), '(val == idx + 1)\n', (10207, 10223), False, 'import torch\n'), ((10753, 10807), 'cv2.fillPoly', 'cv2.fillPoly', (['target_mask[c_id, :, :]', '[cbox]'], {'color': '(1)'}), '(target_mask[c_id, :, :], [cbox], color=1)\n', (10765, 10807), False, 'import cv2\n'), ((7261, 7313), 'torch.nn.functional.conv2d', 'F.conv2d', (['mask_feature', 'kernal_feature'], {'groups': 'batch'}), '(mask_feature, kernal_feature, groups=batch)\n', (7269, 7313), True, 'import torch.nn.functional as F\n')] |
from math import floor, log, ceil
from random import gauss, sample, random
import networkit as nk
# noinspection PyUnresolvedReferences
from networkit import Graph
from networkit.generators import BarabasiAlbertGenerator, WattsStrogatzGenerator
def _make_complete(n: int):
g_ = Graph(n)
for i in g_.iterNodes():
for j in g_.iterNodes():
if i < j:
g_.addEdge(i, j)
return g_
class CompositeGenerator(object):
"""
Pretend to extend inaccessible networkit._NetworKit.StaticGraphGenerator
"""
def __init__(self, n: int, community_count: int, small_world_neighbours: int, rewiring: float,
scale_free_k: int, probability_reduce: float = 0.05):
self._n = n
self._community_count = community_count
self._small_world_neighbours = small_world_neighbours
self._rewiring = rewiring
self._scale_free_k = scale_free_k
self._probability_reduce = probability_reduce
def generate(self):
# First, generate a scale free network, which acts as our community network.
communities = BarabasiAlbertGenerator(self._scale_free_k, self._community_count, 4, True).generate()
small_world_graphs = {}
node_count = communities.numberOfNodes()
community_size = self._n / self._community_count
# Then generate a small world graph for each node with size decided
# by a Gaussian distribution around the average node size.
i = node_count - 1
for node in communities.iterNodes():
local_size = gauss(community_size, community_size / 3)
# Choose local_n such that all communities have size at least two.
local_n = max(min(round(local_size), self._n - (2 * i)), 2)
# If it's the last iteration, we much "use up" the rest of the nodes.
if i == 0:
local_n = self._n
# For a random graph to be connected, we require that 2k >> ln(n).
# (2k because of how NetworKit defines k.)
# => k < (n-1)/2
connectivity = max(self._small_world_neighbours, floor(log(local_n)))
# However, we also require that 2k < n-1, since otherwise you end
# up with double links.
connectivity = max(0, min(ceil((local_n - 1) / 2) - 1, connectivity))
if local_n > 3:
# Sometimes WattsStrogatzGenerators return unconnected graphs.
# This is due to the fact that 2k >> ln(n) is vague, and also
# bounded above by 2k < n-1.
# Therefore, we repeat the process until a connected graph is
# created. This shouldn't loop too many times.
is_connected = False
while not is_connected:
small_world_graphs[node] = WattsStrogatzGenerator(local_n, connectivity, self._rewiring).generate()
# noinspection PyUnresolvedReferences
connected_components = nk.components.ConnectedComponents(small_world_graphs[node]).run()
is_connected = connected_components.numberOfComponents() == 1
else:
small_world_graphs[node] = _make_complete(local_n)
self._n -= local_n
i -= 1
# Build a merged graph.
big_graph = Graph(0, False, False)
ranges = [0]
partition = []
neighbours = [list(communities.neighbors(node)) for node in communities.iterNodes()]
# To avoid neighbour sets having edges going both ways, delete references to nodes larger than themselves.
for n in range(len(neighbours)):
neighbours[n] = list(filter(lambda x: x < n, neighbours[n]))
for graph in small_world_graphs.values():
# noinspection PyUnresolvedReferences
nk.graphtools.append(big_graph, graph)
ranges.append(big_graph.numberOfNodes())
partition.append(list(range(ranges[-2], ranges[-1])))
# Finally, connect these small world graphs where their parent nodes are connected.
for i in range(len(neighbours)):
for j in neighbours[i]:
# Connect partitions i and j
n1 = partition[i]
n2 = partition[j]
p = 1.0
for nc1 in sample(n1, len(n1)):
for nc2 in sample(n2, len(n2)):
# Connect with probability p
if random() <= p:
big_graph.addEdge(nc1, nc2)
p = p * self._probability_reduce
return big_graph
if __name__ == '__main__':
import matplotlib.pyplot as plt
from networkit.viztasks import drawGraph
g = CompositeGenerator(4000, 15, 50, 0.1, 2).generate()
drawGraph(g)
plt.show()
| [
"math.ceil",
"networkit.components.ConnectedComponents",
"networkit.Graph",
"networkit.viztasks.drawGraph",
"networkit.generators.WattsStrogatzGenerator",
"networkit.generators.BarabasiAlbertGenerator",
"math.log",
"networkit.graphtools.append",
"random.random",
"random.gauss",
"matplotlib.pyplot.show"
] | [((285, 293), 'networkit.Graph', 'Graph', (['n'], {}), '(n)\n', (290, 293), False, 'from networkit import Graph\n'), ((4836, 4848), 'networkit.viztasks.drawGraph', 'drawGraph', (['g'], {}), '(g)\n', (4845, 4848), False, 'from networkit.viztasks import drawGraph\n'), ((4853, 4863), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4861, 4863), True, 'import matplotlib.pyplot as plt\n'), ((3359, 3381), 'networkit.Graph', 'Graph', (['(0)', '(False)', '(False)'], {}), '(0, False, False)\n', (3364, 3381), False, 'from networkit import Graph\n'), ((1582, 1623), 'random.gauss', 'gauss', (['community_size', '(community_size / 3)'], {}), '(community_size, community_size / 3)\n', (1587, 1623), False, 'from random import gauss, sample, random\n'), ((3860, 3898), 'networkit.graphtools.append', 'nk.graphtools.append', (['big_graph', 'graph'], {}), '(big_graph, graph)\n', (3880, 3898), True, 'import networkit as nk\n'), ((1117, 1192), 'networkit.generators.BarabasiAlbertGenerator', 'BarabasiAlbertGenerator', (['self._scale_free_k', 'self._community_count', '(4)', '(True)'], {}), '(self._scale_free_k, self._community_count, 4, True)\n', (1140, 1192), False, 'from networkit.generators import BarabasiAlbertGenerator, WattsStrogatzGenerator\n'), ((2144, 2156), 'math.log', 'log', (['local_n'], {}), '(local_n)\n', (2147, 2156), False, 'from math import floor, log, ceil\n'), ((2311, 2334), 'math.ceil', 'ceil', (['((local_n - 1) / 2)'], {}), '((local_n - 1) / 2)\n', (2315, 2334), False, 'from math import floor, log, ceil\n'), ((2850, 2911), 'networkit.generators.WattsStrogatzGenerator', 'WattsStrogatzGenerator', (['local_n', 'connectivity', 'self._rewiring'], {}), '(local_n, connectivity, self._rewiring)\n', (2872, 2911), False, 'from networkit.generators import BarabasiAlbertGenerator, WattsStrogatzGenerator\n'), ((3024, 3083), 'networkit.components.ConnectedComponents', 'nk.components.ConnectedComponents', (['small_world_graphs[node]'], {}), '(small_world_graphs[node])\n', (3057, 3083), True, 'import networkit as nk\n'), ((4504, 4512), 'random.random', 'random', ([], {}), '()\n', (4510, 4512), False, 'from random import gauss, sample, random\n')] |
import torch
import torchvision
import parameter as p
import numpy as np
import torchvision.transforms as transforms
def read_mnist(train):
return torchvision.datasets.MNIST( "./file",
train=train,
download=True,
transform=torchvision.transforms.Compose([
torchvision.transforms.ToTensor()
]))
def read_cifar(train):
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
#transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))
])
transform_test = transforms.Compose([
transforms.ToTensor(),
#transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
trainset = torchvision.datasets.CIFAR10(root='./file/cifar10', train=True,
download=True, transform=transform_train)
if train==True:
return trainset
else:
return torchvision.datasets.CIFAR10(root='./file/cifar10', train=train,
download=True, transform=transform_test)
def read_fmnist(train):
transform_train = transforms.Compose([
#transforms.Pad(padding=4,fill=0),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5), (0.5)),
])
transform_test = transforms.Compose([
#transforms.Pad(padding=4,fill=0),
transforms.ToTensor(),
transforms.Normalize((0.5,), (0.5,)),
])
trainset = torchvision.datasets.FashionMNIST(root='./file/fmnist', train=True,
download=True, transform=transform_train)
if train==True:
return trainset
else:
return torchvision.datasets.FashionMNIST(root='./file/fmnist', train=train,
download=True, transform=transform_test)
def my_collate(batch):
"""
Reads p.n_classes to keep a subset of classes in the dataset.
Used to subsample classes to have easier analysis of results.
Gives error if the sampled classes do not start from the first class
(e.g., wont let you sample classes 2-5, has to start from 0).
Parameters
----------
batch: torch.tensor
Batch of data to be collated.
Returns
-------
dataloader.default_collate
"""
modified_batch = []
for item in batch:
image, label = item
class_list = [*range(0, p.n_classes, 1)]
if label in class_list:
modified_batch.append(item)
return torch.utils.data.dataloader.default_collate(modified_batch)
def get_weights(selected_class, dataset):
"""
Used to get sampling weights for each instance in the dataset.
Given the selected_class, makes sure that half of the batch contains
the selected_class, and rest is equally sampled from rest of the classes.
Parameters
----------
selected_class: int
Represents the index of the selected class.
Returns
-------
all_weights: list
List of sampling weights for each instance in the dataset.
"""
if dataset == 'mnist':
dataset_ = read_mnist(train=True)
targets = dataset_.targets.detach().numpy()
elif dataset == 'cifar10':
dataset_ = read_cifar(train=True)
targets = np.array(dataset_.targets)
elif dataset == 'fmnist':
dataset_ = read_fmnist(train=True)
targets = dataset_.targets.detach().numpy()
class_sample_count = np.array([len(np.where(targets == t)[0]) for t in np.unique(targets)])
weight = 1. / class_sample_count
samples_weight = np.array([weight[t] for t in targets])
selected_class_weight = 0.5 / class_sample_count[selected_class]
other_class_diviser = 0.5 / ((p.num_classes)-1.0)
all_weights = []
for i in range(len(targets)):
if targets[i] == selected_class:
all_weights.append(selected_class_weight)
else:
num_instances = class_sample_count[targets[i]]
all_weights.append(other_class_diviser / num_instances)
return all_weights
def get_trainloader(dataset):
"""
Returns a trainloader that contains MNIST training instances with given
p.batch_size_train, and the collate function.
Used to train the model for the main classification task.
Parameters
----------
Returns
-------
train_loader: torch.dataloader
List of sampling weights for each instance in the dataset.
"""
if dataset == 'mnist':
dataset_ = read_mnist(train=True)
elif dataset == 'cifar10':
dataset_ = read_cifar(train=True)
elif dataset == 'fmnist':
dataset_ = read_fmnist(train=True)
train_loader = torch.utils.data.DataLoader(dataset_,
batch_size=p.batch_size_train,
shuffle=True,
collate_fn=my_collate)
return train_loader
def get_testloader(dataset):
"""
Returns a test that contains MNIST test instances with given
p.batch_size_test, and the collate function.
Used to train the model for the main classification task.
Parameters
----------
Returns
-------
test_loader: torch.dataloader
List of sampling weights for each instance in the dataset.
"""
if dataset == 'mnist':
dataset_ = read_mnist(train=False)
elif dataset == 'cifar10':
dataset_ = read_cifar(train=False)
elif dataset == 'fmnist':
dataset_ = read_fmnist(train=False)
test_loader = torch.utils.data.DataLoader(dataset_,
batch_size=p.batch_size_test,
shuffle=True,
collate_fn=my_collate)
return test_loader
def get_weighted_trainloader(class_index, dataset):
"""
Given a class index, returns a trainloader that uses the
sampling weights from get_weights to ensure class_index samples
are half of the batch, and the other half equally contains rest
of the classes.
This function is used for the binary classification task to tie
the classes to their corresponding clusters.
Parameters
----------
Returns
-------
test_loader: torch.dataloader
List of sampling weights for each instance in the dataset.
"""
if dataset == 'mnist':
dataset_ = read_mnist(train=True)
elif dataset == 'cifar10':
dataset_ = read_cifar(train=True)
elif dataset == 'fmnist':
dataset_ = read_fmnist(train=True)
sampler = torch.utils.data.sampler.WeightedRandomSampler(get_weights(class_index, dataset),
p.batch_size_class,
replacement=True)
train_loader = torch.utils.data.DataLoader(dataset_,
batch_size=p.batch_size_class,
shuffle=False, sampler=sampler,
collate_fn=my_collate)
return train_loader
| [
"torch.utils.data.dataloader.default_collate",
"numpy.unique",
"torchvision.datasets.FashionMNIST",
"numpy.where",
"torchvision.transforms.RandomHorizontalFlip",
"torchvision.transforms.RandomCrop",
"numpy.array",
"torchvision.datasets.CIFAR10",
"torchvision.transforms.Normalize",
"torch.utils.data.DataLoader",
"torchvision.transforms.ToTensor"
] | [((924, 1034), 'torchvision.datasets.CIFAR10', 'torchvision.datasets.CIFAR10', ([], {'root': '"""./file/cifar10"""', 'train': '(True)', 'download': '(True)', 'transform': 'transform_train'}), "(root='./file/cifar10', train=True, download=\n True, transform=transform_train)\n", (952, 1034), False, 'import torchvision\n'), ((1737, 1850), 'torchvision.datasets.FashionMNIST', 'torchvision.datasets.FashionMNIST', ([], {'root': '"""./file/fmnist"""', 'train': '(True)', 'download': '(True)', 'transform': 'transform_train'}), "(root='./file/fmnist', train=True,\n download=True, transform=transform_train)\n", (1770, 1850), False, 'import torchvision\n'), ((2818, 2877), 'torch.utils.data.dataloader.default_collate', 'torch.utils.data.dataloader.default_collate', (['modified_batch'], {}), '(modified_batch)\n', (2861, 2877), False, 'import torch\n'), ((3952, 3990), 'numpy.array', 'np.array', (['[weight[t] for t in targets]'], {}), '([weight[t] for t in targets])\n', (3960, 3990), True, 'import numpy as np\n'), ((5122, 5231), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['dataset_'], {'batch_size': 'p.batch_size_train', 'shuffle': '(True)', 'collate_fn': 'my_collate'}), '(dataset_, batch_size=p.batch_size_train,\n shuffle=True, collate_fn=my_collate)\n', (5149, 5231), False, 'import torch\n'), ((6060, 6169), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['dataset_'], {'batch_size': 'p.batch_size_test', 'shuffle': '(True)', 'collate_fn': 'my_collate'}), '(dataset_, batch_size=p.batch_size_test, shuffle\n =True, collate_fn=my_collate)\n', (6087, 6169), False, 'import torch\n'), ((7439, 7566), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['dataset_'], {'batch_size': 'p.batch_size_class', 'shuffle': '(False)', 'sampler': 'sampler', 'collate_fn': 'my_collate'}), '(dataset_, batch_size=p.batch_size_class,\n shuffle=False, sampler=sampler, collate_fn=my_collate)\n', (7466, 7566), False, 'import torch\n'), ((1154, 1264), 'torchvision.datasets.CIFAR10', 'torchvision.datasets.CIFAR10', ([], {'root': '"""./file/cifar10"""', 'train': 'train', 'download': '(True)', 'transform': 'transform_test'}), "(root='./file/cifar10', train=train, download=\n True, transform=transform_test)\n", (1182, 1264), False, 'import torchvision\n'), ((1971, 2084), 'torchvision.datasets.FashionMNIST', 'torchvision.datasets.FashionMNIST', ([], {'root': '"""./file/fmnist"""', 'train': 'train', 'download': '(True)', 'transform': 'transform_test'}), "(root='./file/fmnist', train=train,\n download=True, transform=transform_test)\n", (2004, 2084), False, 'import torchvision\n'), ((547, 583), 'torchvision.transforms.RandomCrop', 'transforms.RandomCrop', (['(32)'], {'padding': '(4)'}), '(32, padding=4)\n', (568, 583), True, 'import torchvision.transforms as transforms\n'), ((589, 622), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (620, 622), True, 'import torchvision.transforms as transforms\n'), ((628, 649), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (647, 649), True, 'import torchvision.transforms as transforms\n'), ((791, 812), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (810, 812), True, 'import torchvision.transforms as transforms\n'), ((1425, 1458), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (1456, 1458), True, 'import torchvision.transforms as transforms\n'), ((1464, 1485), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1483, 1485), True, 'import torchvision.transforms as transforms\n'), ((1491, 1521), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.5)', '(0.5)'], {}), '(0.5, 0.5)\n', (1511, 1521), True, 'import torchvision.transforms as transforms\n'), ((1636, 1657), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1655, 1657), True, 'import torchvision.transforms as transforms\n'), ((1667, 1703), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.5,)', '(0.5,)'], {}), '((0.5,), (0.5,))\n', (1687, 1703), True, 'import torchvision.transforms as transforms\n'), ((3629, 3655), 'numpy.array', 'np.array', (['dataset_.targets'], {}), '(dataset_.targets)\n', (3637, 3655), True, 'import numpy as np\n'), ((3873, 3891), 'numpy.unique', 'np.unique', (['targets'], {}), '(targets)\n', (3882, 3891), True, 'import numpy as np\n'), ((400, 433), 'torchvision.transforms.ToTensor', 'torchvision.transforms.ToTensor', ([], {}), '()\n', (431, 433), False, 'import torchvision\n'), ((3837, 3859), 'numpy.where', 'np.where', (['(targets == t)'], {}), '(targets == t)\n', (3845, 3859), True, 'import numpy as np\n')] |
import time
import pytest
from conftest import rsa, Path
from conftest import ACMERequestActions, JWSRS256, JWKRSA, _JWKBase, \
ACMEAccount, ACMEAccountActions, ACMEError, ACMEOrder, \
settings
from test_common import *
def count_equal(a_list: list, b_list: list) -> None:
"""assert equality of two lists, order not considered"""
a_list, b_list = sorted(a_list), sorted(b_list)
assert len(a_list) == len(b_list)
assert all([a == b for a, b in zip(a_list, b_list)])
class TestACMERequestActionsTest:
def test_retry_badNonce(self,
new_request_action: ACMERequestActions,
new_rsa_privkey: rsa.RSAPrivateKey):
"""
create account without using ACMEAccountAction, while using incorrect
nonce to trigger badNonce retry.
"""
req = new_request_action
jws = JWSRS256(
url=req.acme_dir['newAccount'],
nonce='badNonce',
jwk = JWKRSA(
priv_key=new_rsa_privkey,
n=new_rsa_privkey.public_key().public_numbers().n,
e=new_rsa_privkey.public_key().public_numbers().e
),
payload={
'termsOfServiceAgreed': True,
'contact': TEST_CONTACTS
},
)
jws.sign()
resp = req.new_account(jws)
assert resp.status_code == 201
class TestACMEAccountObjectCreation:
def test_init_by_create(self,
new_jwk: _JWKBase,
new_acct_action: ACMEAccountActions):
acct_obj = ACMEAccount.init_by_create(
jwk=new_jwk,
acct_actions=new_acct_action,
contact=TEST_CONTACTS
)
# reponse 201-created if an account is created
assert acct_obj._resp.status_code == 201
def test_init_by_query(self,
new_acct_action: ACMEAccountActions,
new_acct_obj: ACMEAccount):
acct_obj_query = ACMEAccount.init_by_query(
jwk=new_acct_obj.jwk_obj,
acct_actions=new_acct_action
)
# reponse 200-OK if an account is returned successfully
assert acct_obj_query._resp.status_code == 200
class TestACMEAccountActions:
def test_poll_acct_state(self, new_acct_obj: ACMEAccount):
# for an acct just created, status code will be 201
assert new_acct_obj._resp.status_code == 201
new_acct_obj.poll_acct_state()
# poll and update acct state, status code will be updated to 200
assert new_acct_obj._resp.status_code == 200
def test_update_account(self, new_acct_obj: ACMEAccount):
new_acct_obj.update_account(contact=TEST_CONTACTS_MOD)
# successful update will return 200-OK
assert new_acct_obj._resp.status_code == 200
new_acct_obj.poll_acct_state()
# assert if attribute "contact" is actually updated
count_equal(new_acct_obj.contact, TEST_CONTACTS_MOD)
def test_deactivate(self, new_acct_obj: ACMEAccount):
new_acct_obj.deactivate()
# successful deactivation will return 200-OK
assert new_acct_obj._resp.status_code == 200
assert new_acct_obj.status == 'deactivated'
# now post-as-get to a deactivated acct will have 403-Forbidden
with pytest.raises(ACMEError) as caught:
new_acct_obj.poll_acct_state()
assert caught.value.status_code == 403
def test_account_key_rollover(self,
new_jwk_i: _JWKBase,
new_acct_obj: ACMEAccount):
new_acct_obj.account_key_rollover(new_jwk_i)
# successful rollover will return 200-OK
assert new_acct_obj._resp.status_code == 200
assert new_acct_obj.jwk_obj is new_jwk_i
with pytest.raises(ACMEError) as caught:
new_acct_obj.account_key_rollover(new_jwk_i)
assert "New and old key are identical" in str(caught.value)
class TestACMEOrder:
domain = ['i.test.local']
domains = ['a.test.local', 'b.test.local']
def _test_new_order_domain(self, d: list, new_acct_obj: ACMEAccount):
order_obj = new_acct_obj.new_order(identifiers=d)
# successful order creation will return 201-created
assert new_acct_obj._resp.status_code == 201
assert order_obj._resp.status_code == 201
# only one order related to the account
assert len(new_acct_obj.order_objs) == 1
assert new_acct_obj.order_objs[0] is order_obj
count_equal(d, order_obj.identifier_values)
def test_new_order_single_domain(self, new_acct_obj: ACMEAccount):
self._test_new_order_domain(self.domain, new_acct_obj)
def test_new_order_multi_domain(self, new_acct_obj: ACMEAccount):
self._test_new_order_domain(self.domains, new_acct_obj)
def test_poll_order_state(self, new_order_obj: ACMEOrder):
new_order_obj.poll_order_state()
assert new_order_obj._resp.status_code == 200
def test_get_orders(self,
new_acct_action: ACMEAccountActions,
new_acct_obj: ACMEAccount,
new_order_obj: ACMEOrder):
assert len(new_acct_obj.order_objs) == 1
# create new orders directly from account actions
new_acct_action.new_order(
acct_obj=new_acct_obj,
identifiers=[dict(type='dns', value=v) for v in self.domains],
not_before='',
not_after='',
jws_type=new_acct_obj.jwk_obj.related_JWS
)
# refresh account order list
new_acct_obj.get_orders()
assert len(new_acct_obj.order_objs) == 2
class TestACMEAuthorization:
domain = ['a.test.local', 'b.test.local']
def test_poll_auth_state(self, new_order_obj: ACMEOrder):
assert len(new_order_obj.auth_objs) == 2
for auth in new_order_obj.auth_objs:
auth.poll_auth_state()
assert auth._resp.status_code == 200
def test_deactivate_auth(self, new_order_obj: ACMEOrder):
auth_to_deactivate = new_order_obj.auth_objs[0]
auth_to_deactivate.deactivate_auth()
# successful deactivation will return 200-OK
assert auth_to_deactivate._resp.status_code == 200
assert auth_to_deactivate.status == 'deactivated'
# related order state will also become "deactivated"
new_order_obj.poll_order_state()
assert new_order_obj.status == 'deactivated'
class TestACMEChallenge:
"""respond using pebble challtest"""
# use one order with 1 auth to save test time
domain = ['a.test.local']
def test_respond_http(self, new_order_obj: ACMEOrder):
jwk = new_order_obj.related_acct.jwk_obj
for auth in new_order_obj.auth_objs:
add_http_01(auth.chall_http.token, jwk)
auth.chall_http.respond()
# if poll auth immediately after repond, status should be "pending"
auth.poll_auth_state()
assert auth.status == 'pending'
time.sleep(4)
auth.poll_auth_state()
assert auth.chall_http.status == 'valid'
assert auth.status == 'valid'
# once all auth valid, order state become "ready"
new_order_obj.poll_order_state()
assert new_order_obj.status == 'ready'
@staticmethod
def _test_respond_dns(new_order_obj: ACMEOrder):
jwk = new_order_obj.related_acct.jwk_obj
for auth in new_order_obj.auth_objs:
add_dns_01(auth.chall_dns.token, auth.identifier_value, jwk)
auth.chall_dns.respond()
# if poll auth immediately after repond, status should be "pending"
auth.poll_auth_state()
assert auth.status == 'pending'
time.sleep(5)
auth.poll_auth_state()
assert auth.chall_dns.status == 'valid'
assert auth.status == 'valid'
# once all auth valid, order state become "ready"
new_order_obj.poll_order_state()
assert new_order_obj.status == 'ready'
def test_respond_dns(self, new_order_obj: ACMEOrder):
self._test_respond_dns(new_order_obj)
@pytest.mark.domain('*.test.local')
def test_respond_dns_wildcard(self, new_order_obj: ACMEOrder):
print(new_order_obj.identifier_values)
self._test_respond_dns(new_order_obj)
class TestACMEOrderCertificate:
domain = ['a.test.local']
def test_download_cert(self,
new_tmp_wd: Tuple[Path, Path, Path, Path],
new_rsa_privkey_i: rsa.RSAPrivateKey,
new_ready_order: ACMEOrder):
new_ready_order.poll_order_state()
assert new_ready_order.status == 'ready'
wd, cert_path, chain_path, fullchain_path = new_tmp_wd
download_root_cert(wd)
# test finalize order
new_ready_order.finalize_order(
privkey=new_rsa_privkey_i,
emailAddress='<EMAIL>',
C='CN',
ST='test ST',
L='test L',
O='test org',
OU='test OU'
)
time.sleep(1)
new_ready_order.poll_order_state()
# after finalized, state become "valid", cannot be finalized again
assert new_ready_order.status == 'valid'
# test download cert
download_resp = new_ready_order.download_certificate(str(wd))
assert download_resp.status_code == 200
# external openssl verify
completed_p = openssl_verify(cert_path, chain_path, wd)
assert completed_p.returncode == 0 | [
"conftest.ACMEAccount.init_by_query",
"pytest.mark.domain",
"time.sleep",
"pytest.raises",
"conftest.ACMEAccount.init_by_create"
] | [((8331, 8365), 'pytest.mark.domain', 'pytest.mark.domain', (['"""*.test.local"""'], {}), "('*.test.local')\n", (8349, 8365), False, 'import pytest\n'), ((1656, 1752), 'conftest.ACMEAccount.init_by_create', 'ACMEAccount.init_by_create', ([], {'jwk': 'new_jwk', 'acct_actions': 'new_acct_action', 'contact': 'TEST_CONTACTS'}), '(jwk=new_jwk, acct_actions=new_acct_action,\n contact=TEST_CONTACTS)\n', (1682, 1752), False, 'from conftest import ACMERequestActions, JWSRS256, JWKRSA, _JWKBase, ACMEAccount, ACMEAccountActions, ACMEError, ACMEOrder, settings\n'), ((2078, 2164), 'conftest.ACMEAccount.init_by_query', 'ACMEAccount.init_by_query', ([], {'jwk': 'new_acct_obj.jwk_obj', 'acct_actions': 'new_acct_action'}), '(jwk=new_acct_obj.jwk_obj, acct_actions=\n new_acct_action)\n', (2103, 2164), False, 'from conftest import ACMERequestActions, JWSRS256, JWKRSA, _JWKBase, ACMEAccount, ACMEAccountActions, ACMEError, ACMEOrder, settings\n'), ((9288, 9301), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (9298, 9301), False, 'import time\n'), ((3417, 3441), 'pytest.raises', 'pytest.raises', (['ACMEError'], {}), '(ACMEError)\n', (3430, 3441), False, 'import pytest\n'), ((3923, 3947), 'pytest.raises', 'pytest.raises', (['ACMEError'], {}), '(ACMEError)\n', (3936, 3947), False, 'import pytest\n'), ((7186, 7199), 'time.sleep', 'time.sleep', (['(4)'], {}), '(4)\n', (7196, 7199), False, 'import time\n'), ((7923, 7936), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (7933, 7936), False, 'import time\n')] |
# coding=utf-8
"""
author = jamon
"""
from aiohttp import web
from obespoir.httpserver.route import HttpHandler
from obespoir.server.server import Server
@HttpHandler("/")
async def index(request):
return web.Response(body="hello", content_type="text/html")
@HttpHandler("/update_remote_rpc_config")
async def update_remote_rpc_config(request):
await Server().update_remote_rpc_config()
return web.Response(body="ok~", content_type="text/html") | [
"aiohttp.web.Response",
"obespoir.httpserver.route.HttpHandler",
"obespoir.server.server.Server"
] | [((159, 175), 'obespoir.httpserver.route.HttpHandler', 'HttpHandler', (['"""/"""'], {}), "('/')\n", (170, 175), False, 'from obespoir.httpserver.route import HttpHandler\n'), ((269, 309), 'obespoir.httpserver.route.HttpHandler', 'HttpHandler', (['"""/update_remote_rpc_config"""'], {}), "('/update_remote_rpc_config')\n", (280, 309), False, 'from obespoir.httpserver.route import HttpHandler\n'), ((213, 265), 'aiohttp.web.Response', 'web.Response', ([], {'body': '"""hello"""', 'content_type': '"""text/html"""'}), "(body='hello', content_type='text/html')\n", (225, 265), False, 'from aiohttp import web\n'), ((412, 462), 'aiohttp.web.Response', 'web.Response', ([], {'body': '"""ok~"""', 'content_type': '"""text/html"""'}), "(body='ok~', content_type='text/html')\n", (424, 462), False, 'from aiohttp import web\n'), ((365, 373), 'obespoir.server.server.Server', 'Server', ([], {}), '()\n', (371, 373), False, 'from obespoir.server.server import Server\n')] |
"""
Created by: <NAME>
This Module is a updated version of my first feature extraction exploration steps. It is meant to allow me to revisit this
analysis in the future as well as check old findings with ease.
"""
import numpy as np
from ipywidgets import interact, interactive, fixed, interact_manual, IntSlider
import ipywidgets as widgets
import scipy
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import os
import math
def view_corr_sep(CFT, CFS, Template, CH_Sel, FREQ_SEL, Top, Bottom):
"""Plots the Pearson Correlation Coefficient Histogram of Two Classes (Assummed to be Song vs. Silence) with Normalized Mean in solid Line
Parameters:
-----------
CFT: list
(Channel Frequency Trials) Re-organized Neural Data that were used for constructing the Templates
[Ch]->[Freq]->[Time(Samples) x Trials]
CFS: list
(Channel Frequency Seperated) Re-organized Neural Data to be compared against Template's source
[Ch]->[Freq]_>[Time(Samples) x Trials]
Template: list
Templates (Mean) of Every Frequency Band for Each Channel
[Ch]->[Freq]_>[Time(Samples) x 1]
CH_Sel: int
Selected Recording Channel
FREQ_SEL: int
Selected Frequency Band
Top: list
List of High Frequency Cuttoffs of Bandpass's used
Bottom: list
List of Low Frequency Cutoffs
"""
# TODO: Add Asserts
# TODO: Validate it works as intended
# TODO: Push to Module?
assert isinstance(CFT, list)
assert isinstance(CFS, list)
assert isinstance(Template, list)
Num_Trials = len(CFT[CH_Sel][FREQ_SEL][1,:])
Song_CorrCoef = np.zeros((1,Num_Trials))
Silence_CorrCoef = np.zeros((1,Num_Trials))
P_Value_Counter_Song = 0 # TODO: Evalute P_Value Handling
P_Value_Counter_Silence = 0 # TODO: Evalute P_Value Handling
for k in range(Num_Trials):
Song_CorrCoef[0,k], p_value1 = scipy.stats.pearsonr((CFT[CH_Sel][FREQ_SEL][:,k]),(Template[CH_Sel][FREQ_SEL][:,0]))
Silence_CorrCoef[0,k], p_value2 = scipy.stats.pearsonr((CFS[CH_Sel][FREQ_SEL][:,k]),(Template[CH_Sel][FREQ_SEL][:,0]))
if p_value1 > .05:
# print 'P Value too high, Song P_Value = '+ str(p_value1)
P_Value_Counter_Song = P_Value_Counter_Song + 1
if p_value2 > .05:
# print 'P Value too high, Silence P_Value = '+ str(p_value2)
P_Value_Counter_Silence = P_Value_Counter_Silence + 1
Feat_Song_men2 = np.mean(Song_CorrCoef)
Feat_Silence_men2 = np.mean(Silence_CorrCoef)
# TODO: Review this Handling of P-Values
print('Number of Song P-Values Greater than .05: ' + str(P_Value_Counter_Song))
print('Number of Silence P-Values Greater than .05: ' + str(P_Value_Counter_Silence))
plt.figure(figsize = (8,7))
plt.title('Correlation (Channel %d Frequency Band= %d-%d)' %(CH_Sel, Bottom[FREQ_SEL], Top[FREQ_SEL] ))
plt.axvline(x=Feat_Song_men2, color = 'coral', linewidth='4')
plt.axvline(x=Feat_Silence_men2, color = 'r', linewidth='4')
plt.xlabel('Correlation')
plt.xlim(-1,1)
plt.hist(np.transpose(Song_CorrCoef), 20, (-1,1), normed =True, label ='Song', color = 'blue', edgecolor= 'black')
plt.xticks(rotation='vertical')
plt.hist(np.transpose(Silence_CorrCoef), 20, range =(-1,1), normed =True, label ='Silence', color = 'green', edgecolor= 'black')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
def corr_sep_GUI(Song, Silence, All_Temp, Top, Bottom, Num_Chan ):
"""GUI for Viewing Compared Correlations Plots
Parameters:
-----------
CFT: list
(Channel Frequency Trials) Re-organized Neural Data that were used for constructing the Templates
[Ch]->[Freq]_>[Time(Samples) x Trials]
CFS: list
(Channel Frequency Seperated) Re-organized Neural Data to be compared against Template's source
[Ch]->[Freq]_>[Time(Samples) x Trials]
All_Temp: list
Templates (Mean) of Every Frequency Band for Each Channel
[Ch]->[Freq]_>[Time(Samples) x 1]
CH_Sel: int
Selected Recording Channel
FREQ_SEL: int
Selected Frequency Band
Top: ndarray
List of High Frequency Cuttoffs of Bandpass's used
Bottom: ndarray
List of Low Frequency Cutoffs
Num_Chan: int
Number of Channels
"""
Channel_widget = widgets.IntSlider(value=0, min=0, max= Num_Chan-1,step=1,description='Channel')
Freq_widget = widgets.IntSlider(value=0, min=0, max= len(Top)-1 ,step=1,description='Freq')
interact(view_corr_sep, CFT =fixed(Song) , CFS = fixed(Silence), Template = fixed(All_Temp),
CH_Sel =Channel_widget, FREQ_SEL= Freq_widget, Top = fixed(Top), Bottom= fixed(Bottom))
def Corr_Seperation(Channel_Freq_Song, Channel_Freq_Silence, Match_Test, Num_Channels, Num_Freqs, Top, Bottom, Trial_Index, Plot= False):
"""Create a Heatmap that seeks to visualize the discernability between instances of Song Activity and Silence
Parameters:
-----------
Channel_Freq_Song: list
Re-organized Neural Data that were used for constructing the Templates
[Ch]->[Freq]_>[Time(Samples) x Trials]
Channel_Freq_Silence: list
Re-organized Neural Data to be compared against Template's source
[Ch]->[Freq]_>[Time(Samples) x Trials]
Match_Test: list
Templates (Mean) of Every Frequency Band for Each Channel
[Ch]->[Freq]_>[Time(Samples) x 1]
Num_Channels: int
Number of recording Channels
Num_Freq: int
Number of Frequency Bands
Top: list
List of High Frequency Cuttoffs of Bandpass's used
Bottom: list
List of Low Frequency Cutoffs
Trial_Index: list ???
List of Trials to Be used for Desired Label
Plot: bool (Optional)
If set to True it will plot a heatmap of the Normalized results, Defaults to False
Returns:
--------
Feat_Seperation_Norm_Corr: list
Normalized diffence in mean Pearson Correlation Coefficients of two Classes (The First class is used for the Template)
[Channels x Freq. Bands]
Feat_Seperation_Edges_Corr: list
Distance between Edges of the Histogram of Correlation Coefficients
[Channels x Freq. Bands]
"""
#1. Dynamic Handeling of Channels, Frequency Bands, and Trials
Num_Channels = len(Channel_Freq_Song)
Num_Trials = len(Channel_Freq_Song[0][0][1,:]) ##### This needs to be made more Dynamics
Num_Features = len(Channel_Freq_Song[0])
#2. Initiate Memory for each Channel
Feat_Seperation_Edges_Corr = np.zeros((16, Num_Features)) # For Edges
Feat_Seperation_Norm_Corr = np.zeros((16, Num_Features)) # For Norm??
#3. Initiat Seperate List for Song and Silence
Chan_Corr_Song = []
Chan_Corr_Silence = []
P_Value_Counter_Song = 0 # TODO: Evalute P_Value Handling
P_Value_Counter_Silence = 0 # TODO: Evalute P_Value Handling
#4. Meat of Function
for CH_Sel in range(Num_Channels):
for FREQ_SEL in range(len(Top)):
#4.1 Create Memory Space
Song_CorrCoef = np.zeros((1,len(Trial_Index)))
Silence_CorrCoef = np.zeros((1,len(Trial_Index)))
Freq_Holder_Song= []
Freq_Holder_Silence= []
#4.2 For Each Trial Find the Correlation Coefficients
for k in range(len(Trial_Index)):
Song_CorrCoef[0,k], p_value1 = scipy.stats.pearsonr((Channel_Freq_Song[CH_Sel][FREQ_SEL][:,k]),(Match_Test[CH_Sel][FREQ_SEL][:,0]))
Silence_CorrCoef[0,k], p_value2 = scipy.stats.pearsonr((Channel_Freq_Silence[CH_Sel][FREQ_SEL][:,k]),(Match_Test[CH_Sel][FREQ_SEL][:,0]))
if p_value1 > .05:
# print 'P Value too high, Song P_Value = '+ str(p_value1)
P_Value_Counter_Song = P_Value_Counter_Song + 1
if p_value2 > .05:
# print 'P Value too high, Silence P_Value = '+ str(p_value2)
P_Value_Counter_Silence = P_Value_Counter_Silence + 1
#4.3 Find Difference between Edges to Determine Overlap
Feat_Seperation_Edges_Corr[CH_Sel,FREQ_SEL] = find_edges(np.median(Song_CorrCoef), np.median(Silence_CorrCoef), Song_CorrCoef, Silence_CorrCoef) # Store Edge Overlap Result
#4.4 Normalized Mean Distance:
# Divide the difference between the mean of Song Corr-Coef and Silence Corr-Coef by the Sum of their Standard Deviations
# ((Mean of Song Corr-Coef) - (Mean of Silence Corr-Coef))/ (Sum of Standard Deviation of Song Corr-Coef & Silence Corr-Coef)
Feat_Seperation_Norm_Corr[CH_Sel,FREQ_SEL] = ((np.mean(Song_CorrCoef) - np.std(Silence_CorrCoef))/((np.std(Song_CorrCoef))+(np.std(Silence_CorrCoef))))
#4.7 Store Values of Coefficents to List (Each entry is a Different Frequency Bin)
Freq_Holder_Song.append(Song_CorrCoef)
Freq_Holder_Silence.append(Silence_CorrCoef)
#4.8 Store Lists of Frequency Bins to a List (Each entry is a Different Channel)
Chan_Corr_Song.append(Freq_Holder_Song)
Chan_Corr_Silence.append(Freq_Holder_Silence)
#5. Optionally Print Results
if Plot ==True:
plot_corr_seperation(Feat_Seperation_Norm_Corr, Top, Bottom, Num_Channels, Num_Freqs)
print('Number of Song P-Values Greater than .05: ' + str(P_Value_Counter_Song))
print('Number of Silence P-Values Greater than .05: ' + str(P_Value_Counter_Silence))
return Feat_Seperation_Norm_Corr, Feat_Seperation_Edges_Corr,
def find_edges(Feat_Song_med_holder, Feat_Silence_med_holder, Song_CorrCoef, Silence_CorrCoef):
"""Find Difference between Edges to Determine Overlap
Parameters:
-----------
Feat_Song_med_holder: float64
Median of Song Class
(1x 14)
Feat_Silence_med_holder: float64
Median of Silence Class
(1x14)
Song_CorrCoef: ndarray
Pearson Coefficients for Song Class
(1 x Trials)
Silence_CorrCoef: ndarray
Pearson Coefficients for Silence Class
(1 x Trials)
Return:
-------
Result_Holder: float
A niave approximation of the distance between the boundaries of the Histograms of Pearson Correlation Coefficients
"""
if Feat_Song_med_holder >= Feat_Silence_med_holder:
Result_Holder = np.amin(Song_CorrCoef)- np.amax(Silence_CorrCoef)
elif Feat_Song_med_holder < Feat_Silence_med_holder:
Result_Holder = np.amin(Silence_CorrCoef)- np.amax(Song_CorrCoef)
return Result_Holder
def plot_corr_seperation(NormSep_Corr, Top, Bottom, Num_Channels, Num_Features):
"""Plot seperation in Pearons Correlation Coefficients in a Heatmap
Parameter:
----------
NormSep_Corr: list
Normalized diffence in mean Pearson Correlation Coefficients of two Classes (The First class is used for the Template)
[Channels x Freq. Bands]
Top: list
List of High Frequency Cuttoffs of Bandpass's used
Bottom: list
List of Low Frequency Cutoffs
Num_Channels: int
Number of recording Channels
Num_Features: int
Number of Frequency Bands
"""
X_labels = []
for i in range(len(Top)):
X_labels.append( str(Bottom[i]) + '-' + str(Top[i]))
plt.figure(figsize=(15,15))
plt.imshow(NormSep_Corr, cmap='hot',aspect='auto', interpolation='nearest', origin='lower')#, vmax=3)##### Account for Scaling
plt.xlabel('Frequency'), plt.ylabel('Channel')
plt.title('Normalized Distance between Means')
plt.yticks(list(range(Num_Channels)), list(range(Num_Channels))) #Dynamic Control of Number of Freq Bands
plt.xticks(list(range(Num_Features)), X_labels) #Dynamic Control of Number of Channels
plt.colorbar()
# Channel v. Channel Correlation (Per Freq. Band)
def Chan_v_Chan_Corr(Song_Templates):
"""Channel v. Channel Correlation Comparision (per Freq. Band)
Parameters:
-----------
Song_Templates:
Templates (Mean) of Every Frequency Band for Each Channel
[Ch]->[Freq]_>(Time(Samples) x 1)
Returns:
--------
CvC_Corrs: list
Templates (Mean) of Every Frequency Band for Each Channel
[Freq]->[Channel v. Channel]
"""
# Find Number of Channels and Frequency Bands
Num_Freqs = len(Song_Templates[0]) # Number of Frequency Bands
Num_Chans = len(Song_Templates) # Number of Channels
# Initiate List for all Matrixes
CvC_Corrs = []
p_value_count = 0
# Iterate over each Combination of Channels for each Correlation Matrixes
for i in range(Num_Freqs):
CvC_Holder = np.zeros([Num_Chans, Num_Chans])
for j in range (Num_Chans):
for k in range(Num_Chans):
CvC_Holder[j,k], p_value =scipy.stats.pearsonr(Song_Templates[j][i], Song_Templates[k][i])
if p_value > .05:
p_value_count =+1
CvC_Corrs.append(CvC_Holder)
print('Number of Bad P-Values: '+ str(p_value_count))
return CvC_Corrs
def CvC_Corr_Heatmap(All_CvC_Corr, Selected_Freq, Top, Bottom, Absolute = False):
"""Function for Visualizing the Channel vs. Channel Heatmap
Parameters:
-----------
All_CvC_Corr: list
Templates (Mean) of Every Frequency Band for Each Channel
[Freq]->[Channel v. Channel]
Selected_Freq: int
The Selected Frequency Band to be Visualized
Top: ndarray
List of High Frequency Cuttoffs of Bandpass's used
Bottom: ndarray
List of Low Frequency Cutoffs
Absolute: bool
(Defaults to False)
"""
X_labels = [x +1 for x in range(len(All_CvC_Corr[0]))] #Dynamic Control of Number of Channels
Freq_label = [str(Bottom[i]) + '-' + str(Top[i]) for i in range(len(Top))]
plt.figure(figsize=(10,8))
if Absolute == False:
plt.imshow(All_CvC_Corr[Selected_Freq], cmap='seismic',aspect='auto', interpolation='nearest', origin='lower', vmax=1, vmin = -1)##### Account for Scaling
if Absolute == True:
plt.imshow(abs(All_CvC_Corr[Selected_Freq]), cmap='hot',aspect='auto', interpolation='nearest', origin='lower', vmax=1, vmin = 0)##### Account for Scaling
plt.xlabel('Channel', size = 16), plt.ylabel('Channel', size = 16)
plt.title('Correlation between Channels Frequency Band (%s)' %(Freq_label[Selected_Freq]), size=25)
plt.yticks(list(range(len(X_labels))), X_labels, size = 12)
plt.xticks(list(range(len(X_labels))), X_labels, size = 12)
plt.colorbar()
## Potentially Add Function that RE-organize the Plot into order of Shanks and Depth
def CvC_Gui(CvC_Corr_Results, Top, Bottom):
"""Interactively View Channel v. Channel Correlation Heatmap
Parameters:
-----------
All_CvC_Corr: list
Templates (Mean) of Every Frequency Band for Each Channel
[Freq]->[Channel v. Channel]
Top: ndarray
List of High Frequency Cuttoffs of Bandpass's used
Bottom: ndarray
List of Low Frequency Cutoffs
"""
Num_Bands = len(CvC_Corr_Results)
Freq_widget = widgets.IntSlider(value=0, min=0, max=Num_Bands-1,step=1,description='Freq Band Width')
Absolute_widget= widgets.ToggleButton( value=True,
description='Absolute Value',
disabled=False,
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Description',
icon='check')
interact(CvC_Corr_Heatmap, All_CvC_Corr= fixed(CvC_Corr_Results), Selected_Freq= Freq_widget, Top= fixed(Top), Bottom= fixed(Bottom) , Absolute = Absolute_widget)
# Code for Importing the Geometry of the Probe [Copied from Spatial Mapping Development 6/4/17]
## Need to Make a Program that Reads the .prb (Text File) to Get the Locations [Develop this on Spatial Development]
def Import_Probe_Geometry(bird_id = 'z020', sess_name = 'day-2016-06-02'):
"""Import the .npy files that seem to be the probe's geometry (Not Everyday has these Files)
Parameters:
-----------
bird_id: str
Bird Indentifier to Locate Specified Bird's data folder
sess_name: str
Experiment Day to Locate it's Folder
Returns:
--------
Channel_Locations: np.memmap
Coordinants of contact points
(Number of Contact Points by (|X coordinate | Y coordinate|))
Channel_map: np.memmap
Identities of the Contact Points
(1 x Number of Contact Points)
"""
experiment_folder = '/net/expData/birdSong/'
ss_data_folder = os.path.join(experiment_folder, 'ss_data')
kwd_file_folder = os.path.join(ss_data_folder, bird_id, sess_name)
kwd_files = [f for f in os.listdir(kwd_file_folder) if f.endswith('.kwd')]
assert(len(kwd_files)==1)
kwd_file = kwd_files[0]
print(kwd_file) # Sanity Check to Make Sure You are working with the Correct File
Location_files = [f for f in os.listdir(kwd_file_folder) if f.endswith('.npy')]
Location_files
Chan_Loc = 'channel_positions.npy'
Map_Loc = 'channel_map.npy'
Channel_Locations = np.load(os.path.join(kwd_file_folder, Chan_Loc), mmap_mode='r')
Channel_map = np.load(os.path.join( kwd_file_folder, Map_Loc), mmap_mode='r')
return Channel_Locations, Channel_map
# Code for Plotting the Probe Geometry
def Plot_Geometry(Ch_Locations, Ch_map, bird_id):
"""
Parameters:
-----------
Channel_Locations: np.memmap
Coordinants of contact points
(Number of Contact Points by (|X coordinate | Y coordinate|))
Channel_map: np.memmap
Identities of the Contact Points
(1 x Number of Contact Points)
bird_id: str
Bird Indentifier to Locate Specified Bird's data folder
"""
fig = plt.figure(figsize=(12,6))
ax = fig.add_subplot(111)
A = Ch_Locations[:,0]
B = Ch_Locations[:,1]
plt.scatter(A,B)
for i, txt in enumerate(A):
ax.annotate(Ch_map[0][i], (A[i],B[i]), size =12)
plt.grid()
plt.title('Bird ID: ' + str(bird_id))
plt.show()
# g
| [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.ylabel",
"scipy.stats.pearsonr",
"matplotlib.pyplot.imshow",
"numpy.mean",
"os.listdir",
"ipywidgets.IntSlider",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.scatter",
"ipywidgets.fixed",
"matplotlib.pyplot.xticks",
"numpy.amin",
"numpy.std",
"matplotlib.pyplot.title",
"matplotlib.pyplot.xlim",
"numpy.transpose",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show",
"numpy.median",
"ipywidgets.ToggleButton",
"matplotlib.pyplot.colorbar",
"os.path.join",
"numpy.zeros",
"matplotlib.pyplot.figure",
"numpy.amax",
"matplotlib.pyplot.axvline"
] | [((1693, 1718), 'numpy.zeros', 'np.zeros', (['(1, Num_Trials)'], {}), '((1, Num_Trials))\n', (1701, 1718), True, 'import numpy as np\n'), ((1741, 1766), 'numpy.zeros', 'np.zeros', (['(1, Num_Trials)'], {}), '((1, Num_Trials))\n', (1749, 1766), True, 'import numpy as np\n'), ((2528, 2550), 'numpy.mean', 'np.mean', (['Song_CorrCoef'], {}), '(Song_CorrCoef)\n', (2535, 2550), True, 'import numpy as np\n'), ((2575, 2600), 'numpy.mean', 'np.mean', (['Silence_CorrCoef'], {}), '(Silence_CorrCoef)\n', (2582, 2600), True, 'import numpy as np\n'), ((2824, 2850), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 7)'}), '(figsize=(8, 7))\n', (2834, 2850), True, 'import matplotlib.pyplot as plt\n'), ((2856, 2963), 'matplotlib.pyplot.title', 'plt.title', (["('Correlation (Channel %d Frequency Band= %d-%d)' % (CH_Sel, Bottom[\n FREQ_SEL], Top[FREQ_SEL]))"], {}), "('Correlation (Channel %d Frequency Band= %d-%d)' % (CH_Sel,\n Bottom[FREQ_SEL], Top[FREQ_SEL]))\n", (2865, 2963), True, 'import matplotlib.pyplot as plt\n'), ((2964, 3023), 'matplotlib.pyplot.axvline', 'plt.axvline', ([], {'x': 'Feat_Song_men2', 'color': '"""coral"""', 'linewidth': '"""4"""'}), "(x=Feat_Song_men2, color='coral', linewidth='4')\n", (2975, 3023), True, 'import matplotlib.pyplot as plt\n'), ((3030, 3088), 'matplotlib.pyplot.axvline', 'plt.axvline', ([], {'x': 'Feat_Silence_men2', 'color': '"""r"""', 'linewidth': '"""4"""'}), "(x=Feat_Silence_men2, color='r', linewidth='4')\n", (3041, 3088), True, 'import matplotlib.pyplot as plt\n'), ((3095, 3120), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Correlation"""'], {}), "('Correlation')\n", (3105, 3120), True, 'import matplotlib.pyplot as plt\n'), ((3125, 3140), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-1)', '(1)'], {}), '(-1, 1)\n', (3133, 3140), True, 'import matplotlib.pyplot as plt\n'), ((3263, 3294), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {'rotation': '"""vertical"""'}), "(rotation='vertical')\n", (3273, 3294), True, 'import matplotlib.pyplot as plt\n'), ((3432, 3494), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'bbox_to_anchor': '(1.05, 1)', 'loc': '(2)', 'borderaxespad': '(0.0)'}), '(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.0)\n', (3442, 3494), True, 'import matplotlib.pyplot as plt\n'), ((4420, 4507), 'ipywidgets.IntSlider', 'widgets.IntSlider', ([], {'value': '(0)', 'min': '(0)', 'max': '(Num_Chan - 1)', 'step': '(1)', 'description': '"""Channel"""'}), "(value=0, min=0, max=Num_Chan - 1, step=1, description=\n 'Channel')\n", (4437, 4507), True, 'import ipywidgets as widgets\n'), ((6674, 6702), 'numpy.zeros', 'np.zeros', (['(16, Num_Features)'], {}), '((16, Num_Features))\n', (6682, 6702), True, 'import numpy as np\n'), ((6747, 6775), 'numpy.zeros', 'np.zeros', (['(16, Num_Features)'], {}), '((16, Num_Features))\n', (6755, 6775), True, 'import numpy as np\n'), ((11652, 11680), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 15)'}), '(figsize=(15, 15))\n', (11662, 11680), True, 'import matplotlib.pyplot as plt\n'), ((11685, 11781), 'matplotlib.pyplot.imshow', 'plt.imshow', (['NormSep_Corr'], {'cmap': '"""hot"""', 'aspect': '"""auto"""', 'interpolation': '"""nearest"""', 'origin': '"""lower"""'}), "(NormSep_Corr, cmap='hot', aspect='auto', interpolation='nearest',\n origin='lower')\n", (11695, 11781), True, 'import matplotlib.pyplot as plt\n'), ((11867, 11913), 'matplotlib.pyplot.title', 'plt.title', (['"""Normalized Distance between Means"""'], {}), "('Normalized Distance between Means')\n", (11876, 11913), True, 'import matplotlib.pyplot as plt\n'), ((12119, 12133), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (12131, 12133), True, 'import matplotlib.pyplot as plt\n'), ((14204, 14231), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 8)'}), '(figsize=(10, 8))\n', (14214, 14231), True, 'import matplotlib.pyplot as plt\n'), ((14689, 14792), 'matplotlib.pyplot.title', 'plt.title', (["('Correlation between Channels Frequency Band (%s)' % Freq_label[Selected_Freq]\n )"], {'size': '(25)'}), "('Correlation between Channels Frequency Band (%s)' % Freq_label[\n Selected_Freq], size=25)\n", (14698, 14792), True, 'import matplotlib.pyplot as plt\n'), ((14921, 14935), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (14933, 14935), True, 'import matplotlib.pyplot as plt\n'), ((15507, 15603), 'ipywidgets.IntSlider', 'widgets.IntSlider', ([], {'value': '(0)', 'min': '(0)', 'max': '(Num_Bands - 1)', 'step': '(1)', 'description': '"""Freq Band Width"""'}), "(value=0, min=0, max=Num_Bands - 1, step=1, description=\n 'Freq Band Width')\n", (15524, 15603), True, 'import ipywidgets as widgets\n'), ((15621, 15758), 'ipywidgets.ToggleButton', 'widgets.ToggleButton', ([], {'value': '(True)', 'description': '"""Absolute Value"""', 'disabled': '(False)', 'button_style': '""""""', 'tooltip': '"""Description"""', 'icon': '"""check"""'}), "(value=True, description='Absolute Value', disabled=\n False, button_style='', tooltip='Description', icon='check')\n", (15641, 15758), True, 'import ipywidgets as widgets\n'), ((17122, 17164), 'os.path.join', 'os.path.join', (['experiment_folder', '"""ss_data"""'], {}), "(experiment_folder, 'ss_data')\n", (17134, 17164), False, 'import os\n'), ((17187, 17235), 'os.path.join', 'os.path.join', (['ss_data_folder', 'bird_id', 'sess_name'], {}), '(ss_data_folder, bird_id, sess_name)\n', (17199, 17235), False, 'import os\n'), ((18348, 18375), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 6)'}), '(figsize=(12, 6))\n', (18358, 18375), True, 'import matplotlib.pyplot as plt\n'), ((18463, 18480), 'matplotlib.pyplot.scatter', 'plt.scatter', (['A', 'B'], {}), '(A, B)\n', (18474, 18480), True, 'import matplotlib.pyplot as plt\n'), ((18583, 18593), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (18591, 18593), True, 'import matplotlib.pyplot as plt\n'), ((18640, 18650), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (18648, 18650), True, 'import matplotlib.pyplot as plt\n'), ((1970, 2058), 'scipy.stats.pearsonr', 'scipy.stats.pearsonr', (['CFT[CH_Sel][FREQ_SEL][:, k]', 'Template[CH_Sel][FREQ_SEL][:, 0]'], {}), '(CFT[CH_Sel][FREQ_SEL][:, k], Template[CH_Sel][FREQ_SEL\n ][:, 0])\n', (1990, 2058), False, 'import scipy\n'), ((2097, 2185), 'scipy.stats.pearsonr', 'scipy.stats.pearsonr', (['CFS[CH_Sel][FREQ_SEL][:, k]', 'Template[CH_Sel][FREQ_SEL][:, 0]'], {}), '(CFS[CH_Sel][FREQ_SEL][:, k], Template[CH_Sel][FREQ_SEL\n ][:, 0])\n', (2117, 2185), False, 'import scipy\n'), ((3153, 3180), 'numpy.transpose', 'np.transpose', (['Song_CorrCoef'], {}), '(Song_CorrCoef)\n', (3165, 3180), True, 'import numpy as np\n'), ((3308, 3338), 'numpy.transpose', 'np.transpose', (['Silence_CorrCoef'], {}), '(Silence_CorrCoef)\n', (3320, 3338), True, 'import numpy as np\n'), ((11816, 11839), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Frequency"""'], {}), "('Frequency')\n", (11826, 11839), True, 'import matplotlib.pyplot as plt\n'), ((11841, 11862), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Channel"""'], {}), "('Channel')\n", (11851, 11862), True, 'import matplotlib.pyplot as plt\n'), ((13024, 13056), 'numpy.zeros', 'np.zeros', (['[Num_Chans, Num_Chans]'], {}), '([Num_Chans, Num_Chans])\n', (13032, 13056), True, 'import numpy as np\n'), ((14266, 14398), 'matplotlib.pyplot.imshow', 'plt.imshow', (['All_CvC_Corr[Selected_Freq]'], {'cmap': '"""seismic"""', 'aspect': '"""auto"""', 'interpolation': '"""nearest"""', 'origin': '"""lower"""', 'vmax': '(1)', 'vmin': '(-1)'}), "(All_CvC_Corr[Selected_Freq], cmap='seismic', aspect='auto',\n interpolation='nearest', origin='lower', vmax=1, vmin=-1)\n", (14276, 14398), True, 'import matplotlib.pyplot as plt\n'), ((14618, 14648), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Channel"""'], {'size': '(16)'}), "('Channel', size=16)\n", (14628, 14648), True, 'import matplotlib.pyplot as plt\n'), ((14652, 14682), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Channel"""'], {'size': '(16)'}), "('Channel', size=16)\n", (14662, 14682), True, 'import matplotlib.pyplot as plt\n'), ((17685, 17724), 'os.path.join', 'os.path.join', (['kwd_file_folder', 'Chan_Loc'], {}), '(kwd_file_folder, Chan_Loc)\n', (17697, 17724), False, 'import os\n'), ((17767, 17805), 'os.path.join', 'os.path.join', (['kwd_file_folder', 'Map_Loc'], {}), '(kwd_file_folder, Map_Loc)\n', (17779, 17805), False, 'import os\n'), ((4630, 4641), 'ipywidgets.fixed', 'fixed', (['Song'], {}), '(Song)\n', (4635, 4641), False, 'from ipywidgets import interact, interactive, fixed, interact_manual, IntSlider\n'), ((4650, 4664), 'ipywidgets.fixed', 'fixed', (['Silence'], {}), '(Silence)\n', (4655, 4664), False, 'from ipywidgets import interact, interactive, fixed, interact_manual, IntSlider\n'), ((4677, 4692), 'ipywidgets.fixed', 'fixed', (['All_Temp'], {}), '(All_Temp)\n', (4682, 4692), False, 'from ipywidgets import interact, interactive, fixed, interact_manual, IntSlider\n'), ((4760, 4770), 'ipywidgets.fixed', 'fixed', (['Top'], {}), '(Top)\n', (4765, 4770), False, 'from ipywidgets import interact, interactive, fixed, interact_manual, IntSlider\n'), ((4780, 4793), 'ipywidgets.fixed', 'fixed', (['Bottom'], {}), '(Bottom)\n', (4785, 4793), False, 'from ipywidgets import interact, interactive, fixed, interact_manual, IntSlider\n'), ((10619, 10641), 'numpy.amin', 'np.amin', (['Song_CorrCoef'], {}), '(Song_CorrCoef)\n', (10626, 10641), True, 'import numpy as np\n'), ((10643, 10668), 'numpy.amax', 'np.amax', (['Silence_CorrCoef'], {}), '(Silence_CorrCoef)\n', (10650, 10668), True, 'import numpy as np\n'), ((16063, 16086), 'ipywidgets.fixed', 'fixed', (['CvC_Corr_Results'], {}), '(CvC_Corr_Results)\n', (16068, 16086), False, 'from ipywidgets import interact, interactive, fixed, interact_manual, IntSlider\n'), ((16121, 16131), 'ipywidgets.fixed', 'fixed', (['Top'], {}), '(Top)\n', (16126, 16131), False, 'from ipywidgets import interact, interactive, fixed, interact_manual, IntSlider\n'), ((16141, 16154), 'ipywidgets.fixed', 'fixed', (['Bottom'], {}), '(Bottom)\n', (16146, 16154), False, 'from ipywidgets import interact, interactive, fixed, interact_manual, IntSlider\n'), ((17264, 17291), 'os.listdir', 'os.listdir', (['kwd_file_folder'], {}), '(kwd_file_folder)\n', (17274, 17291), False, 'import os\n'), ((17497, 17524), 'os.listdir', 'os.listdir', (['kwd_file_folder'], {}), '(kwd_file_folder)\n', (17507, 17524), False, 'import os\n'), ((7512, 7616), 'scipy.stats.pearsonr', 'scipy.stats.pearsonr', (['Channel_Freq_Song[CH_Sel][FREQ_SEL][:, k]', 'Match_Test[CH_Sel][FREQ_SEL][:, 0]'], {}), '(Channel_Freq_Song[CH_Sel][FREQ_SEL][:, k], Match_Test[\n CH_Sel][FREQ_SEL][:, 0])\n', (7532, 7616), False, 'import scipy\n'), ((7663, 7769), 'scipy.stats.pearsonr', 'scipy.stats.pearsonr', (['Channel_Freq_Silence[CH_Sel][FREQ_SEL][:, k]', 'Match_Test[CH_Sel][FREQ_SEL][:, 0]'], {}), '(Channel_Freq_Silence[CH_Sel][FREQ_SEL][:, k],\n Match_Test[CH_Sel][FREQ_SEL][:, 0])\n', (7683, 7769), False, 'import scipy\n'), ((8286, 8310), 'numpy.median', 'np.median', (['Song_CorrCoef'], {}), '(Song_CorrCoef)\n', (8295, 8310), True, 'import numpy as np\n'), ((8312, 8339), 'numpy.median', 'np.median', (['Silence_CorrCoef'], {}), '(Silence_CorrCoef)\n', (8321, 8339), True, 'import numpy as np\n'), ((10750, 10775), 'numpy.amin', 'np.amin', (['Silence_CorrCoef'], {}), '(Silence_CorrCoef)\n', (10757, 10775), True, 'import numpy as np\n'), ((10777, 10799), 'numpy.amax', 'np.amax', (['Song_CorrCoef'], {}), '(Song_CorrCoef)\n', (10784, 10799), True, 'import numpy as np\n'), ((13174, 13238), 'scipy.stats.pearsonr', 'scipy.stats.pearsonr', (['Song_Templates[j][i]', 'Song_Templates[k][i]'], {}), '(Song_Templates[j][i], Song_Templates[k][i])\n', (13194, 13238), False, 'import scipy\n'), ((8784, 8806), 'numpy.mean', 'np.mean', (['Song_CorrCoef'], {}), '(Song_CorrCoef)\n', (8791, 8806), True, 'import numpy as np\n'), ((8809, 8833), 'numpy.std', 'np.std', (['Silence_CorrCoef'], {}), '(Silence_CorrCoef)\n', (8815, 8833), True, 'import numpy as np\n'), ((8837, 8858), 'numpy.std', 'np.std', (['Song_CorrCoef'], {}), '(Song_CorrCoef)\n', (8843, 8858), True, 'import numpy as np\n'), ((8861, 8885), 'numpy.std', 'np.std', (['Silence_CorrCoef'], {}), '(Silence_CorrCoef)\n', (8867, 8885), True, 'import numpy as np\n')] |
import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="djandock",
version="1.6.0",
author="marxygen",
author_email="<EMAIL>",
description="A simple utility to create Django projects with virtual environment, Git and Docker all set up",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/marxygen/djandock",
project_urls={
"Bug Tracker": "https://github.com/marxygen/djandock/issues",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_dir={"": "src"},
package_data={'djandock': ['Dockerfile', 'docker-compose.yml']},
packages=setuptools.find_packages(where="src"),
python_requires=">=3.6",
) | [
"setuptools.find_packages"
] | [((844, 881), 'setuptools.find_packages', 'setuptools.find_packages', ([], {'where': '"""src"""'}), "(where='src')\n", (868, 881), False, 'import setuptools\n')] |
import requests
import json
# insert your token(https://github.com/settings/tokens) bellow for private repositories as well
token =""
def github_get_user_data(username):
url = f"https://api.github.com/users/{username}"
# make the request and return the json
user_data = requests.get(url).json()
# pretty print JSON data
return user_data
def github_get_user_public_repos(username):
url = f"https://api.github.com/users/{username}/repos"
user_repo_data = requests.get(url).json()
repo_list = list()
for rep in user_repo_data:
repo_list.append(rep['clone_url'])
return repo_list
def github_get_user_private_repos(username, token):
url = 'https://api.github.com/user/repos'
# create a re-usable session object with the user creds in-built
gh_session = requests.Session()
gh_session.auth = (username, token)
user_repo_data = json.loads(gh_session.get(url).text)
repo_list = list()
for rep in user_repo_data:
repo_list.append(rep['clone_url'])
return repo_list
def github_get_user_repos(username,token=""):
repo_list = list()
if token=="":
repo_list = github_get_user_public_repos(username)
else:
repo_list = github_get_user_private_repos(username, token)
return repo_list
print(github_get_user_repos("MauMendes"))
print("#################################################")
print(github_get_user_repos("MauMendes", token ))
| [
"requests.Session",
"requests.get"
] | [((820, 838), 'requests.Session', 'requests.Session', ([], {}), '()\n', (836, 838), False, 'import requests\n'), ((284, 301), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (296, 301), False, 'import requests\n'), ((488, 505), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (500, 505), False, 'import requests\n')] |
from typing import Callable, List, Tuple, Union
import numpy as np
from robustnessgym.core.identifier import Identifier
from robustnessgym.core.operation import Operation, lookup
from robustnessgym.core.slice import SliceDataPanel as DataPanel
from robustnessgym.slicebuilders.subpopulation import Subpopulation
class BinningMixin:
def __init__(
self,
intervals: List[Tuple[Union[int, float, str], Union[int, float, str]]],
bin_creation_fn: Callable = None,
bin_fn: Callable = None,
*args,
**kwargs,
):
super(BinningMixin, self).__init__(*args, **kwargs)
# Set the intervals
self.intervals = intervals
self.left_limits = None
self.right_limits = None
# Keep track of scores
self.scores = []
# Assign the bin fns
if bin_creation_fn and bin_fn:
self.create_bins = bin_creation_fn
self.bin = bin_fn
def _reset_scores(self):
self.scores = []
def replace_percentile(self, limit):
if isinstance(limit, str) and limit.endswith("%"):
return np.percentile(self.scores, float(limit.replace("%", "")))
elif isinstance(limit, float) or isinstance(limit, int):
return limit
else:
raise NotImplementedError
def create_bins(self):
for i in range(len(self.intervals)):
(left_limit, right_limit) = self.intervals[i]
self.intervals[i] = (
self.replace_percentile(left_limit),
self.replace_percentile(right_limit),
)
self.left_limits = np.array([interval[0] for interval in self.intervals])
self.right_limits = np.array([interval[1] for interval in self.intervals])
def bin(self, scores: List[Union[int, float]]) -> np.ndarray:
# Convert to np.ndarray
scores = np.array(scores)
# Bin the scores
return (
(self.left_limits <= scores[:, np.newaxis])
& (scores[:, np.newaxis] <= self.right_limits)
).astype(np.int32)
class ScoreSubpopulation(Subpopulation, BinningMixin):
def __init__(
self,
intervals: List[Tuple[Union[int, float, str], Union[int, float, str]]],
identifiers: List[Identifier] = None,
score_fn: Callable = None,
bin_creation_fn: Callable = None,
bin_fn: Callable = None,
*args,
**kwargs,
):
if not identifiers:
if score_fn:
identifiers = [
Identifier(
_name=self.__class__.__name__,
gte=interval[0],
lte=interval[1],
score_fn=score_fn,
)
for interval in intervals
]
else:
identifiers = [
Identifier(
_name=self.__class__.__name__,
gte=interval[0],
lte=interval[1],
)
for interval in intervals
]
super(ScoreSubpopulation, self).__init__(
intervals=intervals,
identifiers=identifiers,
bin_creation_fn=bin_creation_fn,
bin_fn=bin_fn,
*args,
**kwargs,
)
# Assign the score fn
if score_fn:
self.score = score_fn
def prepare_dataset(
self,
dp: DataPanel,
columns: List[str],
batch_size: int = 32,
*args,
**kwargs,
) -> None:
# First reset the scores
self._reset_scores()
# Prepare the dataset
super(ScoreSubpopulation, self).prepare_dataset(
dp=dp,
columns=columns,
batch_size=batch_size,
*args,
**kwargs,
)
# Create the bins
self.create_bins()
def prepare_batch(
self,
batch: DataPanel,
columns: List[str],
*args,
**kwargs,
) -> None:
# Compute the scores
if isinstance(self.score, Operation):
self.scores.extend(lookup(batch, self.score, columns))
elif isinstance(self.score, Callable):
self.scores.extend(self.score(batch, columns))
else:
raise RuntimeError("score function invalid.")
def score(
self,
batch: DataPanel,
columns: List[str],
*args,
**kwargs,
) -> np.ndarray:
raise NotImplementedError("Return a vector of float scores for each example.")
def apply(
self,
batch: DataPanel,
columns: List[str],
slice_membership: np.ndarray = None,
*args,
**kwargs,
) -> np.ndarray:
# Keep track of the score of each example
if isinstance(self.score, Operation):
scores = lookup(batch, self.score, columns)
elif isinstance(self.score, Callable):
scores = self.score(batch, columns)
else:
raise RuntimeError("score function invalid.")
assert (
len(scores) == slice_membership.shape[0]
), "Must have exactly one score per example."
return self.bin(scores=scores)
class MultiScoreSubpopulation(Subpopulation, BinningMixin):
def __init__(
self,
intervals: List[Tuple[Union[int, float, str], Union[int, float, str]]],
identifiers: List[Identifier] = None,
score_fn: Callable = None,
bin_creation_fn: Callable = None,
bin_fn: Callable = None,
*args,
**kwargs,
):
if not identifiers:
if score_fn:
identifiers = [
Identifier(
_name=self.__class__.__name__,
gte=interval[0],
lte=interval[1],
score_fn=score_fn,
)
for interval in intervals
]
else:
identifiers = [
Identifier(
_name=self.__class__.__name__, gte=interval[0], lte=interval[1]
)
for interval in intervals
]
super(MultiScoreSubpopulation, self).__init__(
intervals=intervals,
identifiers=identifiers,
bin_creation_fn=bin_creation_fn,
bin_fn=bin_fn,
*args,
**kwargs,
)
# Assign the score fn
if score_fn:
self.score = score_fn
def prepare_dataset(
self,
dp: DataPanel,
columns: List[str],
batch_size: int = 32,
*args,
**kwargs,
) -> None:
# First reset the scores
self._reset_scores()
# Prepare the dataset
super(MultiScoreSubpopulation, self).prepare_dataset(
dp=dp,
columns=columns,
batch_size=batch_size,
)
# Create the bins
self.create_bins()
def prepare_batch(
self,
batch: DataPanel,
columns: List[str],
*args,
**kwargs,
) -> None:
# Compute the scores
if isinstance(self.score, Operation):
self.scores.extend(lookup(batch, self.score, columns))
elif isinstance(self.score, Callable):
self.scores.extend(self.score(batch, columns))
else:
raise RuntimeError("score function invalid.")
BinarySubpopulation = lambda name, score_fn: ScoreSubpopulation(
identifiers=[Identifier(f"No{name}"), Identifier(f"{name}")],
intervals=[(0, 0), (1, 1)],
score_fn=score_fn,
)
PercentileSubpopulation = lambda name, score_fn: ScoreSubpopulation(
identifiers=[
Identifier(f"{name}", gte=f"{gte}%", lte=f"{lte}%")
for (gte, lte) in [
(0, 5),
(0, 10),
(0, 20),
(20, 40),
(40, 60),
(60, 80),
(80, 100),
(90, 100),
(95, 100),
]
],
intervals=[
("0%", "5%"),
("0%", "10%"),
("0%", "20%"),
("20%", "40%"),
("40%", "60%"),
("60%", "80%"),
("80%", "100%"),
("90%", "100%"),
("95%", "100%"),
],
score_fn=score_fn,
)
IntervalSubpopulation = lambda name, intervals, score_fn: ScoreSubpopulation(
identifiers=[
Identifier(f"{name}", gte=f"{gte}", lte=f"{lte}") for (gte, lte) in intervals
],
intervals=intervals,
score_fn=score_fn,
)
| [
"robustnessgym.core.identifier.Identifier",
"numpy.array",
"robustnessgym.core.operation.lookup"
] | [((1644, 1698), 'numpy.array', 'np.array', (['[interval[0] for interval in self.intervals]'], {}), '([interval[0] for interval in self.intervals])\n', (1652, 1698), True, 'import numpy as np\n'), ((1727, 1781), 'numpy.array', 'np.array', (['[interval[1] for interval in self.intervals]'], {}), '([interval[1] for interval in self.intervals])\n', (1735, 1781), True, 'import numpy as np\n'), ((1898, 1914), 'numpy.array', 'np.array', (['scores'], {}), '(scores)\n', (1906, 1914), True, 'import numpy as np\n'), ((4979, 5013), 'robustnessgym.core.operation.lookup', 'lookup', (['batch', 'self.score', 'columns'], {}), '(batch, self.score, columns)\n', (4985, 5013), False, 'from robustnessgym.core.operation import Operation, lookup\n'), ((4239, 4273), 'robustnessgym.core.operation.lookup', 'lookup', (['batch', 'self.score', 'columns'], {}), '(batch, self.score, columns)\n', (4245, 4273), False, 'from robustnessgym.core.operation import Operation, lookup\n'), ((7410, 7444), 'robustnessgym.core.operation.lookup', 'lookup', (['batch', 'self.score', 'columns'], {}), '(batch, self.score, columns)\n', (7416, 7444), False, 'from robustnessgym.core.operation import Operation, lookup\n'), ((7708, 7731), 'robustnessgym.core.identifier.Identifier', 'Identifier', (['f"""No{name}"""'], {}), "(f'No{name}')\n", (7718, 7731), False, 'from robustnessgym.core.identifier import Identifier\n'), ((7733, 7754), 'robustnessgym.core.identifier.Identifier', 'Identifier', (['f"""{name}"""'], {}), "(f'{name}')\n", (7743, 7754), False, 'from robustnessgym.core.identifier import Identifier\n'), ((7910, 7961), 'robustnessgym.core.identifier.Identifier', 'Identifier', (['f"""{name}"""'], {'gte': 'f"""{gte}%"""', 'lte': 'f"""{lte}%"""'}), "(f'{name}', gte=f'{gte}%', lte=f'{lte}%')\n", (7920, 7961), False, 'from robustnessgym.core.identifier import Identifier\n'), ((8572, 8621), 'robustnessgym.core.identifier.Identifier', 'Identifier', (['f"""{name}"""'], {'gte': 'f"""{gte}"""', 'lte': 'f"""{lte}"""'}), "(f'{name}', gte=f'{gte}', lte=f'{lte}')\n", (8582, 8621), False, 'from robustnessgym.core.identifier import Identifier\n'), ((2571, 2669), 'robustnessgym.core.identifier.Identifier', 'Identifier', ([], {'_name': 'self.__class__.__name__', 'gte': 'interval[0]', 'lte': 'interval[1]', 'score_fn': 'score_fn'}), '(_name=self.__class__.__name__, gte=interval[0], lte=interval[1],\n score_fn=score_fn)\n', (2581, 2669), False, 'from robustnessgym.core.identifier import Identifier\n'), ((2919, 2994), 'robustnessgym.core.identifier.Identifier', 'Identifier', ([], {'_name': 'self.__class__.__name__', 'gte': 'interval[0]', 'lte': 'interval[1]'}), '(_name=self.__class__.__name__, gte=interval[0], lte=interval[1])\n', (2929, 2994), False, 'from robustnessgym.core.identifier import Identifier\n'), ((5822, 5920), 'robustnessgym.core.identifier.Identifier', 'Identifier', ([], {'_name': 'self.__class__.__name__', 'gte': 'interval[0]', 'lte': 'interval[1]', 'score_fn': 'score_fn'}), '(_name=self.__class__.__name__, gte=interval[0], lte=interval[1],\n score_fn=score_fn)\n', (5832, 5920), False, 'from robustnessgym.core.identifier import Identifier\n'), ((6170, 6245), 'robustnessgym.core.identifier.Identifier', 'Identifier', ([], {'_name': 'self.__class__.__name__', 'gte': 'interval[0]', 'lte': 'interval[1]'}), '(_name=self.__class__.__name__, gte=interval[0], lte=interval[1])\n', (6180, 6245), False, 'from robustnessgym.core.identifier import Identifier\n')] |
from ScautEvent.server import EventServer
server = EventServer("localhost", 5000)
server.start()
| [
"ScautEvent.server.EventServer"
] | [((52, 82), 'ScautEvent.server.EventServer', 'EventServer', (['"""localhost"""', '(5000)'], {}), "('localhost', 5000)\n", (63, 82), False, 'from ScautEvent.server import EventServer\n')] |
"""
GUI to watermark your pictures with text and/or another picture.
This module is maintained by <NAME> <<EMAIL>>.
You can always get the latest version of this module at:
https://github.com/BoboTiG/watermark-me
If that URL should fail, try contacting the author.
"""
from distutils.version import StrictVersion
from typing import Any, Dict, List
__all__ = ("get_update_status",)
Version = Dict[str, Any]
Versions = List[Version]
def get_latest_version(versions: Versions) -> str:
""" Get the most recent version. """
versions_currated = [version["name"] for version in versions]
highest = str(max(map(StrictVersion, versions_currated)))
return highest # ᕦ(ò_óˇ)ᕤ
def get_update_status(current_version: str, versions: Versions) -> str:
"""Given a version, determine the definitive status of the application."""
if not versions:
return ""
latest = get_latest_version(versions)
if not latest or StrictVersion(latest) <= StrictVersion(current_version):
return ""
return latest
| [
"distutils.version.StrictVersion"
] | [((948, 969), 'distutils.version.StrictVersion', 'StrictVersion', (['latest'], {}), '(latest)\n', (961, 969), False, 'from distutils.version import StrictVersion\n'), ((973, 1003), 'distutils.version.StrictVersion', 'StrictVersion', (['current_version'], {}), '(current_version)\n', (986, 1003), False, 'from distutils.version import StrictVersion\n')] |
"""
"""
import argparse
import time
import numpy as np
import math, random
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import torch.optim as optim
import torch.autograd as autograd
import torch.nn.functional as F
from torch.optim.lr_scheduler import StepLR, ReduceLROnPlateau
import gym
from Wrapper.layers import *
from Wrapper.wrappers import make_atari, wrap_deepmind, wrap_pytorch
from models.dqn import QLearner, compute_td_loss, ReplayBuffer
USE_CUDA = torch.cuda.is_available()
parser = argparse.ArgumentParser()
# CUDA
parser.add_argument('--seed', type=int, default=1,
help='Random seed')
# Wrapper
parser.add_argument('--frame_stack', action='store_true',
help='Num of frames to stack, default is using prev four frames')
# QLearner
parser.add_argument('--batch_size', type=int, default=16,
help='')
parser.add_argument('--num_frames', type=int, default=1000000,
help='')
# Environment
parser.add_argument('--render', type=int, default=0,
help='Rendering the environment state')
# Training
parser.add_argument('--gamma', type=float, default=0.99,
help='Temporal discounting parameter')
parser.add_argument('--epsilon_start', type=float, default=1.0,
help='Initial probability of selecting random action')
parser.add_argument('--epsilon_final', type=float, default=0.01,
help='Final probability of selecting random action')
parser.add_argument('--epsilon_decay', type=float, default=30000,
help='Decay for probability of selecting random action') # epsilon_decay = 0.99
parser.add_argument('--N', type=int, default=1,
help='Horizon for N-step Q-estimates')
parser.add_argument('--number_of_updates', type=int, default=10,
help='Number of updates for each batch with batch_size')
parser.add_argument('--target_update_freq', type=int, default=10000,
help='Copy current model to target model')
# Optimization
parser.add_argument('--optimizer', choices=['Adam','RMSprop'],
default='Adam',
help='Optimizer to use for training')
parser.add_argument('--lr', type=float, default=0.00001,
help='Learning rate of optimizer (default from mjacar)')
parser.add_argument('--use_optim_scheduler', action='store_true',
help='Whether use scheduler for the optimizer or not')
parser.add_argument('--initial_lr', type=float, default=0.0003,
help='Initial Learning rate of optimizer')
parser.add_argument('--step_size', type=int, default=50000,
help='Size of step for the optimizer scheduler')
# ReplayBuffer
parser.add_argument('--capacity', type=int, default=1000000,
help='Number of states to store in the replay buffer')
# Saving Results
parser.add_argument('--save_freq_frame', type=int, default=100000,
help='Save model and results every save_freq_frame times')
parser.add_argument('--save_result_path', default='../results/DQN/results.npy',
help='Path to output data file with score history')
parser.add_argument('--save_model_path', default='../results/DQN/weights_only.pth',
help='Path to output data file for saving the trainned model')
parser.add_argument('--save_interim_path', default='../results/DQN/interim/',
help='Path to interim output data file with score history')
parser.add_argument('--interim_fn', default='interim_data',
help='Filename to interim output data file')
def main(args):
# CUDA
use_cuda = torch.cuda.is_available()
device = torch.device("cuda:0" if use_cuda else "cpu")
print("Using cuda: ", use_cuda)
# Environment
env_id = "PongNoFrameskip-v4"
env = make_atari(env_id)
env = wrap_deepmind(env, args.frame_stack)
env = wrap_pytorch(env)
# Random seed
env.seed(args.seed)
torch.manual_seed(args.seed)
# Initializing
replay_initial = 10000 #50000
replay_buffer = ReplayBuffer(args.capacity)
# model = QLearner(env, args, replay_buffer)
# Initialize target q function and q function
model_Q = QLearner(env, args, replay_buffer)
model_target_Q = QLearner(env, args, replay_buffer)
if args.optimizer == 'Adam':
if args.use_optim_scheduler:
optimizer = optim.Adam(model_Q.parameters(), lr=args.initial_lr)
scheduler = StepLR(optimizer, step_size=args.step_size, gamma=args.gamma)
# scheduler = ReduceLROnPlateau(optimizer, mode='max', factor=0.1, patience=1000, verbose=True)
else:
optimizer = optim.Adam(model_Q .parameters(), args.lr)
elif args.optimizer == 'RMSprop':
optimizer = optim.RMSprop(model_Q.parameters(), args.lr)
if USE_CUDA:
model_Q = model_Q.cuda()
model_target_Q = model_target_Q.cuda()
# Training loop
epsilon_by_frame = lambda frame_idx: args.epsilon_final + (args.epsilon_start - args.epsilon_final) * math.exp(-1. * frame_idx / args.epsilon_decay)
losses = []
learning_rates = []
all_rewards = []
episode_reward = 0
num_param_updates = 0
mean_reward = -float('nan')
mean_reward2 = -float('nan')
best_mean_reward = -float('inf')
best_mean_reward2 = -float('inf')
best_18_reward = -float('inf')
best_19_reward = -float('inf')
best_20_reward = -float('inf')
best_21_reward = -float('inf')
time_history = [] # records time (in sec) of each episode
old_lr = args.initial_lr
state = env.reset()
start_time_frame = time.time()
for frame_idx in range(1, args.num_frames + 1):
start_time = time.time()
epsilon = epsilon_by_frame(frame_idx)
action = model_Q.act(state, epsilon)
next_state, reward, done, _ = env.step(action)
replay_buffer.push(state, action, reward, next_state, done)
state = next_state
episode_reward += reward
if done:
state = env.reset()
all_rewards.append(episode_reward)
time_history.append(time.time() - start_time)
episode_reward = 0
if args.render==1:
env.render()
if len(replay_buffer) > replay_initial:
for nou in range(args.number_of_updates):
loss = compute_td_loss(model_Q, model_target_Q, args.batch_size, args.gamma, replay_buffer, args.N)
optimizer.zero_grad()
loss.backward()
optimizer.step()
losses.append(loss.data.cpu().numpy())
num_param_updates += 1
# Periodically update the target network by Q network to target Q network
if num_param_updates % args.target_update_freq == 0:
model_target_Q.load_state_dict(model_Q.state_dict())
if args.use_optim_scheduler:
# scheduler.step(mean_reward2)
scheduler.step()
new_lr = scheduler.get_last_lr()
# new_lr = optimizer.param_groups[0]['lr']
if new_lr != old_lr:
learning_rates.append(new_lr)
print('NewLearningRate: ', new_lr)
old_lr = new_lr
if frame_idx % 10000 == 0 and len(replay_buffer) <= replay_initial:
print("Preparing replay buffer with len -- ", len(replay_buffer),
"Frame:", frame_idx,
"Total time so far:", (time.time() - start_time_frame))
if frame_idx % 10000 == 0 and len(replay_buffer) > replay_initial:
mean_reward = np.mean(all_rewards[-10:])
mean_reward2 = np.mean(all_rewards[-100:])
best_mean_reward = max(best_mean_reward, mean_reward)
best_mean_reward2 = max(best_mean_reward2, mean_reward2)
print("Frame:", frame_idx,
"Loss:", np.mean(losses),
"Total Rewards:", all_rewards[-1],
"Average Rewards over all frames:", np.mean(all_rewards),
"Last-10 average reward:", mean_reward,
"Best mean reward of last-10:", best_mean_reward,
"Last-100 average reward:", mean_reward2,
"Best mean reward of last-100:", best_mean_reward2,
"Time:", time_history[-1],
"Total time so far:", (time.time() - start_time_frame))
if mean_reward >= 18.0:
if mean_reward > best_18_reward:
best_18_reward = mean_reward
torch.save(model_Q.state_dict(), args.save_interim_path + \
'fmodel_best_18_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.pth'\
%(args.lr,frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn))
if mean_reward >= 19.0:
if mean_reward > best_19_reward:
best_19_reward = mean_reward
torch.save(model_Q.state_dict(), args.save_interim_path + \
'fmodel_best_19_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.pth'\
%(args.lr,frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn))
if mean_reward >= 20.0:
if mean_reward > best_20_reward:
best_20_reward = mean_reward
torch.save(model_Q.state_dict(), args.save_interim_path + \
'fmodel_best_20_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.pth'\
%(args.lr,frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn))
if mean_reward >= 21.0:
if mean_reward > best_21_reward:
best_21_reward = mean_reward
torch.save(model_Q.state_dict(), args.save_interim_path + \
'fmodel_best_21_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.pth'\
%(args.lr,frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn))
if frame_idx % args.save_freq_frame == 0:
results = [losses, all_rewards, time_history]
torch.save(model_Q.state_dict(), args.save_model_path)
np.save(args.save_result_path, results)
if frame_idx == 10000:
results = [losses, all_rewards, time_history]
torch.save(model_Q.state_dict(), args.save_interim_path + \
'fmodel_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.pth'\
%(args.lr,frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn))
np.save(args.save_interim_path + \
'fresults_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.npy' \
%(args.lr, frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn), \
results)
if frame_idx % 500000 == 0:
results = [losses, all_rewards, time_history]
torch.save(model_Q.state_dict(), args.save_interim_path + \
'fmodel_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.pth' \
%(args.lr,frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn))
np.save(args.save_interim_path + \
'fresults_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.npy' \
%(args.lr,frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn), \
results)
if __name__ == '__main__':
args = parser.parse_args()
print(args)
main(args)
| [
"Wrapper.wrappers.wrap_deepmind",
"torch.manual_seed",
"numpy.mean",
"argparse.ArgumentParser",
"torch.optim.lr_scheduler.StepLR",
"models.dqn.ReplayBuffer",
"torch.cuda.is_available",
"models.dqn.QLearner",
"time.time",
"math.exp",
"Wrapper.wrappers.wrap_pytorch",
"Wrapper.wrappers.make_atari",
"models.dqn.compute_td_loss",
"numpy.save",
"torch.device"
] | [((486, 511), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (509, 511), False, 'import torch\n'), ((523, 548), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (546, 548), False, 'import argparse\n'), ((3737, 3762), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3760, 3762), False, 'import torch\n'), ((3776, 3821), 'torch.device', 'torch.device', (["('cuda:0' if use_cuda else 'cpu')"], {}), "('cuda:0' if use_cuda else 'cpu')\n", (3788, 3821), False, 'import torch\n'), ((3921, 3939), 'Wrapper.wrappers.make_atari', 'make_atari', (['env_id'], {}), '(env_id)\n', (3931, 3939), False, 'from Wrapper.wrappers import make_atari, wrap_deepmind, wrap_pytorch\n'), ((3950, 3986), 'Wrapper.wrappers.wrap_deepmind', 'wrap_deepmind', (['env', 'args.frame_stack'], {}), '(env, args.frame_stack)\n', (3963, 3986), False, 'from Wrapper.wrappers import make_atari, wrap_deepmind, wrap_pytorch\n'), ((3997, 4014), 'Wrapper.wrappers.wrap_pytorch', 'wrap_pytorch', (['env'], {}), '(env)\n', (4009, 4014), False, 'from Wrapper.wrappers import make_atari, wrap_deepmind, wrap_pytorch\n'), ((4062, 4090), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (4079, 4090), False, 'import torch\n'), ((4165, 4192), 'models.dqn.ReplayBuffer', 'ReplayBuffer', (['args.capacity'], {}), '(args.capacity)\n', (4177, 4192), False, 'from models.dqn import QLearner, compute_td_loss, ReplayBuffer\n'), ((4306, 4340), 'models.dqn.QLearner', 'QLearner', (['env', 'args', 'replay_buffer'], {}), '(env, args, replay_buffer)\n', (4314, 4340), False, 'from models.dqn import QLearner, compute_td_loss, ReplayBuffer\n'), ((4362, 4396), 'models.dqn.QLearner', 'QLearner', (['env', 'args', 'replay_buffer'], {}), '(env, args, replay_buffer)\n', (4370, 4396), False, 'from models.dqn import QLearner, compute_td_loss, ReplayBuffer\n'), ((5727, 5738), 'time.time', 'time.time', ([], {}), '()\n', (5736, 5738), False, 'import time\n'), ((5812, 5823), 'time.time', 'time.time', ([], {}), '()\n', (5821, 5823), False, 'import time\n'), ((4569, 4630), 'torch.optim.lr_scheduler.StepLR', 'StepLR', (['optimizer'], {'step_size': 'args.step_size', 'gamma': 'args.gamma'}), '(optimizer, step_size=args.step_size, gamma=args.gamma)\n', (4575, 4630), False, 'from torch.optim.lr_scheduler import StepLR, ReduceLROnPlateau\n'), ((7750, 7776), 'numpy.mean', 'np.mean', (['all_rewards[-10:]'], {}), '(all_rewards[-10:])\n', (7757, 7776), True, 'import numpy as np\n'), ((7804, 7831), 'numpy.mean', 'np.mean', (['all_rewards[-100:]'], {}), '(all_rewards[-100:])\n', (7811, 7831), True, 'import numpy as np\n'), ((10449, 10488), 'numpy.save', 'np.save', (['args.save_result_path', 'results'], {}), '(args.save_result_path, results)\n', (10456, 10488), True, 'import numpy as np\n'), ((10848, 11046), 'numpy.save', 'np.save', (["(args.save_interim_path + \n 'fresults_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.npy' % (args.lr,\n frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn))", 'results'], {}), "(args.save_interim_path + \n 'fresults_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.npy' % (args.lr,\n frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn),\n results)\n", (10855, 11046), True, 'import numpy as np\n'), ((11463, 11661), 'numpy.save', 'np.save', (["(args.save_interim_path + \n 'fresults_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.npy' % (args.lr,\n frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn))", 'results'], {}), "(args.save_interim_path + \n 'fresults_lr%s_frame_%s_framestack_%s_scheduler_%s_%s.npy' % (args.lr,\n frame_idx, args.frame_stack, args.use_optim_scheduler, args.interim_fn),\n results)\n", (11470, 11661), True, 'import numpy as np\n'), ((5149, 5196), 'math.exp', 'math.exp', (['(-1.0 * frame_idx / args.epsilon_decay)'], {}), '(-1.0 * frame_idx / args.epsilon_decay)\n', (5157, 5196), False, 'import math, random\n'), ((6465, 6561), 'models.dqn.compute_td_loss', 'compute_td_loss', (['model_Q', 'model_target_Q', 'args.batch_size', 'args.gamma', 'replay_buffer', 'args.N'], {}), '(model_Q, model_target_Q, args.batch_size, args.gamma,\n replay_buffer, args.N)\n', (6480, 6561), False, 'from models.dqn import QLearner, compute_td_loss, ReplayBuffer\n'), ((8033, 8048), 'numpy.mean', 'np.mean', (['losses'], {}), '(losses)\n', (8040, 8048), True, 'import numpy as np\n'), ((8157, 8177), 'numpy.mean', 'np.mean', (['all_rewards'], {}), '(all_rewards)\n', (8164, 8177), True, 'import numpy as np\n'), ((6229, 6240), 'time.time', 'time.time', ([], {}), '()\n', (6238, 6240), False, 'import time\n'), ((7615, 7626), 'time.time', 'time.time', ([], {}), '()\n', (7624, 7626), False, 'import time\n'), ((8521, 8532), 'time.time', 'time.time', ([], {}), '()\n', (8530, 8532), False, 'import time\n')] |
"""
Copyright 2015 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cloudcafe.compute.composites import _ComputeAuthComposite
from cloudcafe.compute.extensions.ip_associations_api.client import \
IPAssociationsClient
class IPAssociationsComposite(object):
_auth_composite = _ComputeAuthComposite
def __init__(self, auth_composite=None):
self.auth_composite = auth_composite or self._auth_composite()
self.client = IPAssociationsClient(**self.auth_composite.client_args)
self.config = None
self.behaviors = None
| [
"cloudcafe.compute.extensions.ip_associations_api.client.IPAssociationsClient"
] | [((941, 996), 'cloudcafe.compute.extensions.ip_associations_api.client.IPAssociationsClient', 'IPAssociationsClient', ([], {}), '(**self.auth_composite.client_args)\n', (961, 996), False, 'from cloudcafe.compute.extensions.ip_associations_api.client import IPAssociationsClient\n')] |
#!/usr/bin/python
#
# particle_randomizer.py
# CSPong
# Created by <NAME> on 3/02/2016.
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Tag Games Limited
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import sys
import os
import shutil
import random
import file_system_utils
particle_prefixes = ["YellowMagma", "BlueMagma", "PinkIceCream", "BlueIceCream", "Smoke", "Spark", "Beam"]
min_particles = 0
max_particles = 3
#------------------------------------------------------------------------------
# Randomly selecting a particle image suffixed with numbers from min to max
# num particles and copying it from the input to the output.
#
# @author <NAME>
#
# @param Input path
# @param Output path
#------------------------------------------------------------------------------
def build(input_path, output_path):
print("-----------------------------------------")
print(" Randomizing particle images")
print("-----------------------------------------")
if(input_path.endswith("/") == False):
input_path = input_path + "/"
if(output_path.endswith("/") == False):
output_path = output_path + "/"
if (os.path.exists(input_path) == False):
print("Error: input path of " + input_path + " does not exist.")
return
if (os.path.exists(output_path) == False):
os.makedirs(output_path);
for particle_prefix in particle_prefixes:
random_particle = random.randint(min_particles, max_particles)
targeted_particle_file_path = input_path + particle_prefix + "_" + str(random_particle) + ".png"
output_particle_file_path = output_path + particle_prefix + ".png"
if os.path.exists(targeted_particle_file_path):
shutil.copyfile(targeted_particle_file_path, output_particle_file_path)
else:
print("Error: File with path " + targeted_particle_file_path + " not found.")
#------------------------------------------------------------------------------
# The entry point into the script.
#
# @author <NAME>
#
# @param The list of arguments.
#------------------------------------------------------------------------------
def main(args):
if not len(args) is 3:
print("ERROR: Incorrect parameters supplied.")
return
input_path = args[1]
output_path = args[2]
build(input_path, output_path)
if __name__ == "__main__":
main(sys.argv)
| [
"os.path.exists",
"shutil.copyfile",
"random.randint",
"os.makedirs"
] | [((2161, 2187), 'os.path.exists', 'os.path.exists', (['input_path'], {}), '(input_path)\n', (2175, 2187), False, 'import os\n'), ((2283, 2310), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (2297, 2310), False, 'import os\n'), ((2324, 2348), 'os.makedirs', 'os.makedirs', (['output_path'], {}), '(output_path)\n', (2335, 2348), False, 'import os\n'), ((2418, 2462), 'random.randint', 'random.randint', (['min_particles', 'max_particles'], {}), '(min_particles, max_particles)\n', (2432, 2462), False, 'import random\n'), ((2639, 2682), 'os.path.exists', 'os.path.exists', (['targeted_particle_file_path'], {}), '(targeted_particle_file_path)\n', (2653, 2682), False, 'import os\n'), ((2687, 2758), 'shutil.copyfile', 'shutil.copyfile', (['targeted_particle_file_path', 'output_particle_file_path'], {}), '(targeted_particle_file_path, output_particle_file_path)\n', (2702, 2758), False, 'import shutil\n')] |
from setuptools import setup
import gym
import flappy_env
def human_playing():
# Lets a human play but wrapping around the actual gym environment
env = flappy_env.FlappyEnv(server=False)
env.reset()
env.step(action=[])
total_reward = 0
while env.running:
actions = env.get_actions()
obs, reward, done, info = env.step(action=actions)
print(f"""
obs: {obs}
reward: {reward}
done: {done}
info: {info}
""")
env.render()
total_reward += reward
env.close()
print(f"total_reward: {total_reward}")
if __name__ == '__main__':
human_playing() | [
"flappy_env.FlappyEnv"
] | [((155, 189), 'flappy_env.FlappyEnv', 'flappy_env.FlappyEnv', ([], {'server': '(False)'}), '(server=False)\n', (175, 189), False, 'import flappy_env\n')] |
from random import randint
from time import sleep
print(40*'-')
print(f'{"JOGA NA MEGA SENA":^40}')
print(40*'-')
q = int(input('Quantos jogos você quer que eu sorteie? '))
print(f'-=-=-=-=-= SORTEANDO {q} JOGOS =-=-=-=-=-')
for i in range(0,q):
p = []
while len(p)!=6:
n = randint(1,60)
if n not in p:
p.append(n)
p.sort()
print(f'Jogo {i+1}: {p[:]}')
p.clear()
sleep(1)
print(f'{"BOA SORTE!":=^40}') | [
"random.randint",
"time.sleep"
] | [((415, 423), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (420, 423), False, 'from time import sleep\n'), ((290, 304), 'random.randint', 'randint', (['(1)', '(60)'], {}), '(1, 60)\n', (297, 304), False, 'from random import randint\n')] |
from pathlib import Path
import cv2
import wandb
from catalyst.dl import ConfusionMatrixCallback, IRunner, Callback, CallbackOrder, CallbackNode, CallbackScope, \
WandbLogger
import numpy as np
from wandb.sklearn import confusion_matrix
class ConfusionMatrixWandbCallback(ConfusionMatrixCallback):
def on_stage_end(self, runner: IRunner):
class_names = self.class_names or [
str(i) for i in range(self.num_classes)
]
wandb.log({'confusion_matrix': confusion_matrix(self.targets, self.outputs, class_names)},
commit=False,
)
class WandbCustomCallback(WandbLogger):
def on_stage_end(self, runner: IRunner):
wandb.save(str(Path(runner.logdir) / "checkpoints" / "best_full.path"))
wandb.save(str(Path(runner.logdir) / "checkpoints" / "last_full.path"))
class WandbCustomInputCallback(WandbCustomCallback):
def on_stage_start(self, runner: "IRunner"):
super().on_stage_start(runner)
images = []
for batch in runner.loaders.get("train"):
for img, cls in zip(batch[0], batch[1]):
img = img.numpy().transpose((1, 2, 0))
img = (img - img.min())
img = (img / img.max() * 255).astype(np.uint8)
images.append(wandb.Image(img, caption=f"cls: {cls}"))
break
wandb.log({"examples": images},
commit=False,
step=runner.global_sample_step)
class WandbCustomSegmCallback(WandbCustomCallback):
def on_stage_start(self, runner: "IRunner"):
super().on_stage_start(runner)
images = []
for batch in runner.loaders.get("train"):
for img, mask in zip(batch[0], batch[1]):
img = img.numpy().transpose((1, 2, 0))
img = (img - img.min())
img = (img / img.max() * 255).astype(np.uint8)
mask = (255*mask.numpy()).astype(np.uint8)
img = cv2.addWeighted(img, 0.6, cv2.cvtColor(mask[0], cv2.COLOR_GRAY2BGR), 0.4, 0)
images.append(wandb.Image(img))
break
wandb.log({"examples": images},
commit=False,
step=runner.global_sample_step)
| [
"wandb.Image",
"wandb.log",
"pathlib.Path",
"wandb.sklearn.confusion_matrix",
"cv2.cvtColor"
] | [((1378, 1455), 'wandb.log', 'wandb.log', (["{'examples': images}"], {'commit': '(False)', 'step': 'runner.global_sample_step'}), "({'examples': images}, commit=False, step=runner.global_sample_step)\n", (1387, 1455), False, 'import wandb\n'), ((2149, 2226), 'wandb.log', 'wandb.log', (["{'examples': images}"], {'commit': '(False)', 'step': 'runner.global_sample_step'}), "({'examples': images}, commit=False, step=runner.global_sample_step)\n", (2158, 2226), False, 'import wandb\n'), ((496, 553), 'wandb.sklearn.confusion_matrix', 'confusion_matrix', (['self.targets', 'self.outputs', 'class_names'], {}), '(self.targets, self.outputs, class_names)\n', (512, 553), False, 'from wandb.sklearn import confusion_matrix\n'), ((1311, 1350), 'wandb.Image', 'wandb.Image', (['img'], {'caption': 'f"""cls: {cls}"""'}), "(img, caption=f'cls: {cls}')\n", (1322, 1350), False, 'import wandb\n'), ((2024, 2065), 'cv2.cvtColor', 'cv2.cvtColor', (['mask[0]', 'cv2.COLOR_GRAY2BGR'], {}), '(mask[0], cv2.COLOR_GRAY2BGR)\n', (2036, 2065), False, 'import cv2\n'), ((2105, 2121), 'wandb.Image', 'wandb.Image', (['img'], {}), '(img)\n', (2116, 2121), False, 'import wandb\n'), ((719, 738), 'pathlib.Path', 'Path', (['runner.logdir'], {}), '(runner.logdir)\n', (723, 738), False, 'from pathlib import Path\n'), ((799, 818), 'pathlib.Path', 'Path', (['runner.logdir'], {}), '(runner.logdir)\n', (803, 818), False, 'from pathlib import Path\n')] |
from inspect import isgenerator
from fluiddb.cache.namespace import CachingNamespaceAPI
from fluiddb.data.path import getParentPath
from fluiddb.data.permission import Operation
from fluiddb.security.exceptions import PermissionDeniedError
from fluiddb.security.permission import checkPermissions
# FIXME We should probably have an INamespaceAPI interface and assert that
# SecureNamespaceAPI and NamespaceAPI both implement it. And also move the
# docstrings there.
class SecureNamespaceAPI(object):
"""The public API to secure L{Namespace}-related functionality.
@param user: The L{User} to perform operations on behalf of.
"""
def __init__(self, user):
self._api = CachingNamespaceAPI(user)
self._user = user
def create(self, values):
"""See L{NamespaceAPI.create}.
@raise PermissionDeniedError: Raised if the user is not authorized to
create L{Namespace}s.
"""
paths = []
pathsAndOperations = []
for path, description in values:
parentPath = getParentPath(path)
pathsAndOperations.append((parentPath, Operation.CREATE_NAMESPACE))
paths.append(path)
deniedOperations = checkPermissions(self._user, pathsAndOperations)
if deniedOperations:
raise PermissionDeniedError(self._user.username, deniedOperations)
return self._api.create(values)
def delete(self, paths):
"""See L{NamespaceAPI.delete}.
@raise PermissionDeniedError: Raised if the user is not authorized to
delete a given L{Namespace}.
"""
if isgenerator(paths):
paths = list(paths)
pathsAndOperations = [(path, Operation.DELETE_NAMESPACE)
for path in paths]
deniedOperations = checkPermissions(self._user, pathsAndOperations)
if deniedOperations:
raise PermissionDeniedError(self._user.username, deniedOperations)
return self._api.delete(paths)
def get(self, paths, withDescriptions=None, withNamespaces=None,
withTags=None):
"""See L{NamespaceAPI.get}.
@raise PermissionDeniedError: Raised if the user is not authorized to
list a given L{Namespace}.
"""
if withNamespaces or withTags:
pathsAndOperations = [(path, Operation.LIST_NAMESPACE)
for path in paths]
deniedOperations = checkPermissions(self._user, pathsAndOperations)
if deniedOperations:
raise PermissionDeniedError(self._user.username,
deniedOperations)
return self._api.get(paths, withDescriptions=withDescriptions,
withNamespaces=withNamespaces, withTags=withTags)
def set(self, values):
"""See L{NamespaceAPI.set}.
@raise PermissionDeniedError: Raised if the user is not authorized to
update a given L{Namespace}.
"""
pathsAndOperations = [(path, Operation.UPDATE_NAMESPACE)
for path in values.iterkeys()]
deniedOperations = checkPermissions(self._user, pathsAndOperations)
if deniedOperations:
raise PermissionDeniedError(self._user.username, deniedOperations)
return self._api.set(values)
| [
"fluiddb.data.path.getParentPath",
"inspect.isgenerator",
"fluiddb.security.exceptions.PermissionDeniedError",
"fluiddb.security.permission.checkPermissions",
"fluiddb.cache.namespace.CachingNamespaceAPI"
] | [((698, 723), 'fluiddb.cache.namespace.CachingNamespaceAPI', 'CachingNamespaceAPI', (['user'], {}), '(user)\n', (717, 723), False, 'from fluiddb.cache.namespace import CachingNamespaceAPI\n'), ((1221, 1269), 'fluiddb.security.permission.checkPermissions', 'checkPermissions', (['self._user', 'pathsAndOperations'], {}), '(self._user, pathsAndOperations)\n', (1237, 1269), False, 'from fluiddb.security.permission import checkPermissions\n'), ((1631, 1649), 'inspect.isgenerator', 'isgenerator', (['paths'], {}), '(paths)\n', (1642, 1649), False, 'from inspect import isgenerator\n'), ((1824, 1872), 'fluiddb.security.permission.checkPermissions', 'checkPermissions', (['self._user', 'pathsAndOperations'], {}), '(self._user, pathsAndOperations)\n', (1840, 1872), False, 'from fluiddb.security.permission import checkPermissions\n'), ((3184, 3232), 'fluiddb.security.permission.checkPermissions', 'checkPermissions', (['self._user', 'pathsAndOperations'], {}), '(self._user, pathsAndOperations)\n', (3200, 3232), False, 'from fluiddb.security.permission import checkPermissions\n'), ((1062, 1081), 'fluiddb.data.path.getParentPath', 'getParentPath', (['path'], {}), '(path)\n', (1075, 1081), False, 'from fluiddb.data.path import getParentPath\n'), ((1317, 1377), 'fluiddb.security.exceptions.PermissionDeniedError', 'PermissionDeniedError', (['self._user.username', 'deniedOperations'], {}), '(self._user.username, deniedOperations)\n', (1338, 1377), False, 'from fluiddb.security.exceptions import PermissionDeniedError\n'), ((1920, 1980), 'fluiddb.security.exceptions.PermissionDeniedError', 'PermissionDeniedError', (['self._user.username', 'deniedOperations'], {}), '(self._user.username, deniedOperations)\n', (1941, 1980), False, 'from fluiddb.security.exceptions import PermissionDeniedError\n'), ((2475, 2523), 'fluiddb.security.permission.checkPermissions', 'checkPermissions', (['self._user', 'pathsAndOperations'], {}), '(self._user, pathsAndOperations)\n', (2491, 2523), False, 'from fluiddb.security.permission import checkPermissions\n'), ((3280, 3340), 'fluiddb.security.exceptions.PermissionDeniedError', 'PermissionDeniedError', (['self._user.username', 'deniedOperations'], {}), '(self._user.username, deniedOperations)\n', (3301, 3340), False, 'from fluiddb.security.exceptions import PermissionDeniedError\n'), ((2579, 2639), 'fluiddb.security.exceptions.PermissionDeniedError', 'PermissionDeniedError', (['self._user.username', 'deniedOperations'], {}), '(self._user.username, deniedOperations)\n', (2600, 2639), False, 'from fluiddb.security.exceptions import PermissionDeniedError\n')] |
# impoted in __init__.py bcs tests r not running without
from django.test import (TestCase, Client)
from django.contrib.auth import get_user_model
from django.urls import reverse # generates urls for django admin page
class AdminSiteTest(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='<EMAIL>',
password='<PASSWORD>'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email='<EMAIL>',
password='<PASSWORD>',
name='test user name'
)
def test_users_listed(self):
"""Tests that users r listed on user page"""
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
"""Test that the user page works"""
url = reverse('admin:core_user_change', args=[self.user.id])
# /admin/core/user/1
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
"""Test taht the create user page works"""
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| [
"django.urls.reverse",
"django.contrib.auth.get_user_model",
"django.test.Client"
] | [((296, 304), 'django.test.Client', 'Client', ([], {}), '()\n', (302, 304), False, 'from django.test import TestCase, Client\n'), ((763, 800), 'django.urls.reverse', 'reverse', (['"""admin:core_user_changelist"""'], {}), "('admin:core_user_changelist')\n", (770, 800), False, 'from django.urls import reverse\n'), ((1032, 1086), 'django.urls.reverse', 'reverse', (['"""admin:core_user_change"""'], {'args': '[self.user.id]'}), "('admin:core_user_change', args=[self.user.id])\n", (1039, 1086), False, 'from django.urls import reverse\n'), ((1302, 1332), 'django.urls.reverse', 'reverse', (['"""admin:core_user_add"""'], {}), "('admin:core_user_add')\n", (1309, 1332), False, 'from django.urls import reverse\n'), ((331, 347), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (345, 347), False, 'from django.contrib.auth import get_user_model\n'), ((516, 532), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (530, 532), False, 'from django.contrib.auth import get_user_model\n')] |
# Utsiktsberäkning
# Copyright (c) 2022 <NAME> & <NAME> | CC BY-NC-SA 4.0
import json
import math
import sys
from posixpath import lexists
import numpy as np
import plotly.express as px
import pyproj as proj
import rasterio
from numpy.lib.function_base import i0
from datetime import datetime
import time
from contextlib import redirect_stdout
import worldfiles as wf
sys.path.append('./py/__pymodules__')
# Longitude = X ; Latitude = Y
# Define ellipsoid properties
earthRadius = 6371000 # meters
equatorRadius = 6378137
poleRadius = 6356752
maxCurveRadius = (equatorRadius**2)/poleRadius # used for efficiently estemating the curveshift on far away tiles
# Define projection-conversions
wmTOeu = proj.Transformer.from_crs("epsg:4326", "epsg:3035", always_xy=True)
euTOwm = proj.Transformer.from_crs("epsg:3035", "epsg:4326", always_xy=True)
exportData = [] # :TEMP:
def log(*msg):
with open('temp/py_log.txt', 'a+') as f:
time = datetime.now().strftime("%m/%d/%Y, %H:%M:%S")
msgCon = " ".join(map(str, msg))
with redirect_stdout(f):
print(time + " -- " + msgCon)
def tileIndexToCoord(tLon, tLat, x, y):
return(tLon*100000 + x*25, (tLat+1)*100000 - (y+1)*25) # Longitude, Latitude
def coordToTileIndex(lon, lat):
# lon, lat = lon-12.5, lat+12.5 # Adjust for tif-grid coordinates being referenced top-right instead of center # :TODO:
tLon, tLat = math.floor(lon/100000), math.floor(lat/100000)
x, y = round((lon-(tLon*100000))/25), round(3999-((lat-(tLat*100000))/25))
return(tLon, tLat, x, y)
def tileId(tLon, tLat): #Convert tile-index (array [x, y]) to tile-id (string "x_y")
return(str(tLon) + "_" + str(tLat))
def tileIndex(tilename): #Convert tile-id (string "x_y") to tile-index (array [x, y])
return(list(map(int, tilename.split("_"))))
def getTileArea(tLon, tLat):
# Returns a leaflet polyline representing the edge of a tile
latlngs = []
latlngs.append([*euTOwm.transform(tLon*100000, tLat*100000)])
latlngs.append([*euTOwm.transform((tLon+1)*100000, tLat*100000)])
latlngs.append([*euTOwm.transform((tLon+1)*100000, (tLat+1)*100000)])
latlngs.append([*euTOwm.transform(tLon*100000, (tLat+1)*100000)])
latlngs.append([*euTOwm.transform(tLon*100000, tLat*100000)])
for itt in range(len(latlngs)):
latlngs[itt].reverse()
return(latlngs)
def radiusCalculation(lat): # Calculates the earths radius at a given latitude
lat = lat*(math.pi/180) # Convert to radians
R = (
(equatorRadius*poleRadius)
/
math.sqrt((poleRadius*math.cos(lat))**2 + ((equatorRadius*math.sin(lat))**2))
)
# R_old = math.sqrt( # :TEMP:
# (((equatorRadius**2)*(math.cos(lat))) **2 + ((poleRadius**2)*(math.sin(lat)))**2)
# /
# (((equatorRadius)*(math.cos(lat)))**2 + ((poleRadius)*(math.sin(lat)))**2)
# )
return(R)
def getLinePoints(tile, startX, startY, endX, endY):
points = []
if (startX-endX != 0):
v = math.atan((startY-endY)/(startX-endX))
else:
v = math.pi
l = math.sqrt((startY-endY)**2 + (startX-endX)**2)
for i in range(math.floor(l)):
x = startX + round(math.cos(v)*i)
y = startY + round(math.sin(v)*i)
points.append(tile[y][x])
return(points)
def getLinePointsAndCoords(tile, tLon, tLat, startX, startY, v):
points = []
coords = []
v = -v
i = 0
x = startX
y = startY
while(x >= 0 and x <= 3999 and y >= 0 and y <= 3999):
points.append(tile[y][x])
lat, lon = euTOwm.transform(*tileIndexToCoord(tLon, tLat, x, y))
coords.append([lon, lat])
i += 1
x = startX + round(math.cos(v)*i)
y = startY + round(math.sin(v)*i)
return(points, coords)
def exportPointsToCSV(data):
# [row][col]
with open("temp/plotPoints.csv", "a+") as f:
f.write("sep=;\n")
for i in range(len(data)):
for j in range(len(data[i])):
f.write(str(data[i][j]).replace(".", ","))
if j != len(data[i]) - 1:
f.write(";")
f.write("\n")
def plotlyShow(data):
xPl = []
yPl = []
cPl = []
sPl = []
for i in range(len(data)):
xPl.append(data[i][0])
yPl.append(data[i][1])
cPl.append(data[i][2])
sPl.append(data[i][3])
fig = px.scatter(x=xPl, y=yPl, color=cPl, symbol=sPl)
fig.show()
def inBounds(x, y, top, left, bottom, right):
return(x >= left and x <= right and y >= top and y <= bottom)
def pointSteps(di):
# The change between calculation-points should be at least 1 full pixel in x or y direction
if abs(math.cos(di)) > abs(math.sin(di)):
xChange = (math.cos(di)/abs(math.cos(di)))
yChange = abs(math.tan(di)) * ((math.sin(di) / abs(math.sin(di))) if math.sin(di) else 0)
else:
yChange = (math.sin(di)/abs(math.sin(di)))
xChange = abs(1/(math.tan(di) if math.tan(di) else 1)) * ((math.cos(di) / abs(math.cos(di))) if math.cos(di) else 0)
lChange = math.sqrt(xChange**2 + yChange**2)
return(xChange, yChange, lChange)
def sssAngle(R1, R2, l):
cosv = ((R1**2 + R2**2 - l**2)/(2*R1*R2))
return(math.acos(cosv) if cosv <= 1 else math.acos(cosv**-1))
def createResQueue(lon, lat, res):
tLon, tLat, startX, startY = coordToTileIndex(lon, lat)
startTileId = tileId(tLon, tLat)
queue = {startTileId: []}
for i in range(res):
queue[startTileId].append(
{
"p": {"x": startX, "y": startY},
"di": ((2*math.pi)/res) * i,
"start": {"v": -4, "lSurf": 0, "radius": 0},
"last": 0
}
)
return(queue)
def createDiQueue(lon, lat, dis):
tLon, tLat, startX, startY = coordToTileIndex(lon, lat)
startTileId = tileId(tLon, tLat)
queue = {startTileId: []}
for di in dis:
queue[startTileId].append(
{
"p": {"x": startX, "y": startY},
"di": di,
"start": {"v": -4, "lSurf": 0, "radius": 0},
"last": 0
}
)
return(queue)
def nextTileBorder(tilename, x, y, di):
xLen = (4000-x) if math.cos(di) > 0 else -1-x
yLen = -1-y if math.sin(di) > 0 else (4000-y)
xCost = abs(xLen / math.cos(di)) if math.cos(di) else 10000
yCost = abs(yLen / math.sin(di)) if math.sin(di) else 10000
if xCost < yCost:
nX = x + xLen
nY = y + abs(xLen * math.tan(di))*(1 if math.sin(di) > 0 else -1)
else:
nX = x + abs(yLen / math.tan(di))*(1 if math.cos(di) > 0 else -1)
nY = y + yLen
return(coordToTileIndex(*tileIndexToCoord(*tileIndex(tilename), nX, nY)))
def checkNextTile(tilename, x, y, di, vMax, hOffset, lSurf, demTiles, maxElev, startAngle, startRadius, depth): # :TODO:
tLonNext, tLatNext, xNext, yNext = nextTileBorder(tilename, x, y, di)
tilenameNext = tileId(tLonNext, tLatNext)
lon, lat = tileIndexToCoord(*tileIndex(tilename), x, y)
lonNext, latNext = tileIndexToCoord(tLonNext, tLatNext, xNext, yNext)
d_lSurf = math.sqrt(((lon-lonNext)/25)**2 + ((lat-latNext)/25)**2)
lSurf += d_lSurf
if tilenameNext in demTiles["elev"]:
if False: # :TODO: # [4339550, 3914250]
l = 0
step = 4
calcAngle = startAngle
while l < d_lSurf:
l += step
x += round(math.cos(di)*step)
y += round(math.sin(di)*step)
lon, lat = euTOwm.transform(*tileIndexToCoord(*tileIndex(tilename), x, y))
pRadius = radiusCalculation(lat) # Earths radius in the current point (in meters)
calcAngle += sssAngle(calcAngle, pRadius, step*25)
xA = math.sin(calcAngle)*pRadius
curveShift = math.sqrt(pRadius**2 - xA**2) - startRadius
requiredElev = xA*math.tan(vMax) + curveShift + hOffset
angle = calcAngle
else:
curveShift = maxCurveRadius - math.cos((lSurf*25)/maxCurveRadius)*maxCurveRadius
requiredElev = math.sin((lSurf*25)/maxCurveRadius)*math.tan(vMax) + curveShift + hOffset
angle = (lSurf*25)/earthRadius
if maxElev[tilenameNext] < requiredElev:
if maxElev["global"] < requiredElev:
return(0, "")
else:
return(checkNextTile(tilenameNext, xNext, yNext, di, vMax, hOffset, lSurf, demTiles, maxElev, startAngle, startRadius, depth+1))
else:
lon, lat = euTOwm.transform(*tileIndexToCoord(*tileIndex(tilenameNext), xNext, yNext))
return(1, [tilenameNext, {"x": xNext, "y": yNext, "lSurf": lSurf, "radius": radiusCalculation(lat), "angle": angle}])
else:
return(2, "")
# Returns a leaflet polyline object representing visible areas
def calcViewLine(tiles, point, tilename, viewHeight, demTiles, maxElev, skipObj):
pX = point["p"]["x"]
pY = point["p"]["y"]
di = point["di"]
vMax = point["start"]["v"]
lSurf = point["start"]["lSurf"]
log("---", tilename, "---") # :TEMP: # :HERE:
tileMaxElev = maxElev[tilename]
latlngs = [] # List of lines to visualize the view
lladd = [] # Stores consecutive points to be added to the latlngs list
llon = False # Keeps track of whether the last point was added to the latlngs list
if "h" in point["start"]:
h0 = point["start"]["h"]
else:
h0 = tiles["elev"][pY, pX] if tiles["elev"][pY, pX] > -10000 else 0 # Elevation of the first point
hBreak = False # Keeps track of whether the calculation stopped due to max elevation being reached
# Longitude, Latitude of the first point (in degrees)
lon, lat = euTOwm.transform(*tileIndexToCoord(*tileIndex(tilename), pX, pY))
startRadius = point["start"]["radius"] if point["start"]["radius"] else radiusCalculation(lat) # Earths radius in the first point (in meters)
xChange, yChange, lChange = pointSteps(di)
lastPoint = point["last"] if point["last"] else {
"radius": radiusCalculation(lat),
"angle": 0
}
calcAngle = lastPoint["angle"]
while inBounds(pX, pY, -.5, -.5, 3999.5, 3999.5):
# h # the surface-height perpendicular to the ellipsoid.
# x # absolute x-position
h = tiles["elev"][round(pY), round(pX)]
h = h if h > -10000 else 0
if tiles["hasObj"]:
objH = tiles["obj"][round(pY), round(pX)]
objH = objH if objH >= 0 else 0
if lSurf > skipObj/25:
h += objH
lon, lat = euTOwm.transform(*tileIndexToCoord(*tileIndex(tilename), pX, pY))
pRadius = radiusCalculation(lat) # Earths radius in the current point (in meters)
# print(calcAngle) # :TEMP:
x = math.sin(calcAngle)*pRadius # Account for the earths curvature droping of
curveShift = math.sqrt(pRadius**2 - x**2) - startRadius # Shift in absolute y-position due to earths curvature
x -= math.sin(calcAngle)*h # Account for the hight data being perpendicular to the earths surface
y = math.cos(calcAngle)*h + curveShift - h0 - viewHeight
calcAngle += sssAngle(lastPoint["radius"], pRadius, lChange*25)
lastPoint = {
"radius": pRadius,
"angle": calcAngle
}
# Detect visibility
v = math.atan(y / x) if x else -math.pi/2
global exportData
# exportData.append([lSurf, x, y, curveShift, h, objH, ("a" if v > vMax else "b")]) # :TEMP:
if v > vMax and x > 0:
# Point is visible, add it to the current line (lladd)
if llon:
if len(lladd) > 1:
lladd[1] = [lat, lon]
else:
lladd.append([lat, lon])
else:
lladd.append([lat, lon])
llon = True
vMax = v
elif llon:
# Point is not visible, break and append the current line (lladd) to the latlngs list
if len(lladd) < 2:
lladd = [lladd[0], lladd[0]]
latlngs.append(lladd)
llon = False
lladd = []
# Elevation required to see a point with the current angle
requiredElev = (math.tan(vMax)*x) - curveShift + h0 + viewHeight
if requiredElev > tileMaxElev and x > 0:
hBreak = True
break
lSurf += lChange
#pY -= math.sin(di) ; pX += math.cos(di) # :TEMP:
pY -= yChange; pX += xChange
# exportPointsToCSV(data=exportData) # :TEMP:
if llon: # Add the current line (lladd) to the latlngs list before returning
latlngs.append(lladd)
queueObj = {
"p": {"x": 0, "y": 0},
"di": di,
"start": {"v": vMax, "lSurf": lSurf, "radius": startRadius, "h": h0},
"last": lastPoint
}
tLon, tLat, stX, stY = coordToTileIndex(*tileIndexToCoord(*tileIndex(tilename), round(pX), round(pY)))
if hBreak:
lTime = time.time()
cnCode, cnObj = checkNextTile(tilename, pX, pY, di, vMax, (h0 + viewHeight), lSurf, demTiles, maxElev, lastPoint["angle"], startRadius, 0)
#cnCode = 0 # :TEMP:
# log("checkTiles time:", time.time()-lTime);
if cnCode == 0:
return(latlngs, 0, "")
elif cnCode == 1:
# print("Next tile:", cnObj[0], "at", cnObj[1], "with dir", di) # :TEMP:
queueObj["p"] = {"x": cnObj[1]["x"], "y": cnObj[1]["y"]}
queueObj["start"]["lSurf"] = cnObj[1]["lSurf"]
queueObj["last"] = {"radius": cnObj[1]["radius"], "angle": cnObj[1]["angle"]}
log(math.sin(cnObj[1]["angle"])*earthRadius, queueObj)
return(latlngs, 1, [cnObj[0], queueObj])
elif cnCode == 2:
return(latlngs, 2, ["warn", "Some of the view is not visible due to the lack of DEM data"])
elif tileId(tLon, tLat) in demTiles["elev"]:
queueObj["p"] = {"x": stX, "y": stY}
return(latlngs, 1, [tileId(tLon, tLat), queueObj])
else:
return(latlngs, 2, ["warn", "Some of the view is not visible due to the lack of DEM data"])
def calcViewPolys(queue, viewHeight):
lines = [] # Sightlines
hzPoly = [] # Horizon polygon
exInfo = [] # Extra info about the execution
# Open the DEM information files
demFileData = json.load(open("./serverParameters/demfiles.json", "r"))
demPath = demFileData["path"]
demTiles = demFileData["tiles"]
maxElev = json.load(open("./calcData/maxElevations.json", "r"))
while (len(queue) > 0):
# Get the next tile to process
tilename = list(queue)[0]
element = queue[tilename]
tiles = {
"elev": rasterio.open(demPath + "/elev/dem_" + tilename + ".tif").read()[0],
"obj": (rasterio.open(demPath + "/objects/" + tilename + ".tif").read()[0] if tilename in demTiles["obj"] else -1),
"hasObj": (True if tilename in demTiles["obj"] else False)
}
# Process all (starting points and directions) in queue for the current tile
while len(element) > 0:
point = element.pop(0)
line, status, ex = calcViewLine(tiles, point, tilename, viewHeight, demTiles, maxElev, 200)
# Add visible lines to the lines list
for l in line:
lines.append(l)
# Add next starting points to the queue or add execution info to the exInfo list
if status == 1:
if ex[0] in queue:
queue[ex[0]].append(ex[1])
else:
queue[ex[0]] = [ex[1]]
elif status == 2 and ex not in exInfo:
exInfo.append(ex)
del queue[tilename]
# plotlyShow(exportData) # :TEMP:
return(lines, hzPoly, exInfo)
def main():
tilename = "45_39"
demFileData = json.load(open("./serverParameters/demfiles.json", "r"))
demPath = demFileData["path"]
demTiles = demFileData["tiles"]
tile = rasterio.open(demPath + "/objects/" + tilename + ".tif").read()
print(tile[0][79][5])
#main() | [
"contextlib.redirect_stdout",
"plotly.express.scatter",
"math.tan",
"math.floor",
"math.acos",
"rasterio.open",
"math.sqrt",
"math.cos",
"datetime.datetime.now",
"pyproj.Transformer.from_crs",
"time.time",
"math.sin",
"sys.path.append",
"math.atan"
] | [((373, 410), 'sys.path.append', 'sys.path.append', (['"""./py/__pymodules__"""'], {}), "('./py/__pymodules__')\n", (388, 410), False, 'import sys\n'), ((709, 776), 'pyproj.Transformer.from_crs', 'proj.Transformer.from_crs', (['"""epsg:4326"""', '"""epsg:3035"""'], {'always_xy': '(True)'}), "('epsg:4326', 'epsg:3035', always_xy=True)\n", (734, 776), True, 'import pyproj as proj\n'), ((786, 853), 'pyproj.Transformer.from_crs', 'proj.Transformer.from_crs', (['"""epsg:3035"""', '"""epsg:4326"""'], {'always_xy': '(True)'}), "('epsg:3035', 'epsg:4326', always_xy=True)\n", (811, 853), True, 'import pyproj as proj\n'), ((3092, 3146), 'math.sqrt', 'math.sqrt', (['((startY - endY) ** 2 + (startX - endX) ** 2)'], {}), '((startY - endY) ** 2 + (startX - endX) ** 2)\n', (3101, 3146), False, 'import math\n'), ((4391, 4438), 'plotly.express.scatter', 'px.scatter', ([], {'x': 'xPl', 'y': 'yPl', 'color': 'cPl', 'symbol': 'sPl'}), '(x=xPl, y=yPl, color=cPl, symbol=sPl)\n', (4401, 4438), True, 'import plotly.express as px\n'), ((5081, 5119), 'math.sqrt', 'math.sqrt', (['(xChange ** 2 + yChange ** 2)'], {}), '(xChange ** 2 + yChange ** 2)\n', (5090, 5119), False, 'import math\n'), ((7156, 7224), 'math.sqrt', 'math.sqrt', (['(((lon - lonNext) / 25) ** 2 + ((lat - latNext) / 25) ** 2)'], {}), '(((lon - lonNext) / 25) ** 2 + ((lat - latNext) / 25) ** 2)\n', (7165, 7224), False, 'import math\n'), ((1417, 1441), 'math.floor', 'math.floor', (['(lon / 100000)'], {}), '(lon / 100000)\n', (1427, 1441), False, 'import math\n'), ((1441, 1465), 'math.floor', 'math.floor', (['(lat / 100000)'], {}), '(lat / 100000)\n', (1451, 1465), False, 'import math\n'), ((3015, 3059), 'math.atan', 'math.atan', (['((startY - endY) / (startX - endX))'], {}), '((startY - endY) / (startX - endX))\n', (3024, 3059), False, 'import math\n'), ((3158, 3171), 'math.floor', 'math.floor', (['l'], {}), '(l)\n', (3168, 3171), False, 'import math\n'), ((5238, 5253), 'math.acos', 'math.acos', (['cosv'], {}), '(cosv)\n', (5247, 5253), False, 'import math\n'), ((5272, 5293), 'math.acos', 'math.acos', (['(cosv ** -1)'], {}), '(cosv ** -1)\n', (5281, 5293), False, 'import math\n'), ((6374, 6386), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (6382, 6386), False, 'import math\n'), ((6438, 6450), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (6446, 6450), False, 'import math\n'), ((13129, 13140), 'time.time', 'time.time', ([], {}), '()\n', (13138, 13140), False, 'import time\n'), ((1056, 1074), 'contextlib.redirect_stdout', 'redirect_stdout', (['f'], {}), '(f)\n', (1071, 1074), False, 'from contextlib import redirect_stdout\n'), ((4697, 4709), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (4705, 4709), False, 'import math\n'), ((4717, 4729), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (4725, 4729), False, 'import math\n'), ((4751, 4763), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (4759, 4763), False, 'import math\n'), ((4910, 4922), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (4918, 4922), False, 'import math\n'), ((6256, 6268), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (6264, 6268), False, 'import math\n'), ((6302, 6314), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (6310, 6314), False, 'import math\n'), ((10891, 10910), 'math.sin', 'math.sin', (['calcAngle'], {}), '(calcAngle)\n', (10899, 10910), False, 'import math\n'), ((10986, 11018), 'math.sqrt', 'math.sqrt', (['(pRadius ** 2 - x ** 2)'], {}), '(pRadius ** 2 - x ** 2)\n', (10995, 11018), False, 'import math\n'), ((11097, 11116), 'math.sin', 'math.sin', (['calcAngle'], {}), '(calcAngle)\n', (11105, 11116), False, 'import math\n'), ((11464, 11480), 'math.atan', 'math.atan', (['(y / x)'], {}), '(y / x)\n', (11473, 11480), False, 'import math\n'), ((16163, 16219), 'rasterio.open', 'rasterio.open', (["(demPath + '/objects/' + tilename + '.tif')"], {}), "(demPath + '/objects/' + tilename + '.tif')\n", (16176, 16219), False, 'import rasterio\n'), ((956, 970), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (968, 970), False, 'from datetime import datetime\n'), ((4768, 4780), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (4776, 4780), False, 'import math\n'), ((4805, 4817), 'math.tan', 'math.tan', (['di'], {}), '(di)\n', (4813, 4817), False, 'import math\n'), ((4860, 4872), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (4868, 4872), False, 'import math\n'), ((4927, 4939), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (4935, 4939), False, 'import math\n'), ((5046, 5058), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (5054, 5058), False, 'import math\n'), ((6357, 6369), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (6365, 6369), False, 'import math\n'), ((6421, 6433), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (6429, 6433), False, 'import math\n'), ((7838, 7857), 'math.sin', 'math.sin', (['calcAngle'], {}), '(calcAngle)\n', (7846, 7857), False, 'import math\n'), ((7891, 7924), 'math.sqrt', 'math.sqrt', (['(pRadius ** 2 - xA ** 2)'], {}), '(pRadius ** 2 - xA ** 2)\n', (7900, 7924), False, 'import math\n'), ((3201, 3212), 'math.cos', 'math.cos', (['v'], {}), '(v)\n', (3209, 3212), False, 'import math\n'), ((3243, 3254), 'math.sin', 'math.sin', (['v'], {}), '(v)\n', (3251, 3254), False, 'import math\n'), ((3704, 3715), 'math.cos', 'math.cos', (['v'], {}), '(v)\n', (3712, 3715), False, 'import math\n'), ((3746, 3757), 'math.sin', 'math.sin', (['v'], {}), '(v)\n', (3754, 3757), False, 'import math\n'), ((4823, 4835), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (4831, 4835), False, 'import math\n'), ((5009, 5021), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (5017, 5021), False, 'import math\n'), ((8091, 8128), 'math.cos', 'math.cos', (['(lSurf * 25 / maxCurveRadius)'], {}), '(lSurf * 25 / maxCurveRadius)\n', (8099, 8128), False, 'import math\n'), ((2598, 2611), 'math.cos', 'math.cos', (['lat'], {}), '(lat)\n', (2606, 2611), False, 'import math\n'), ((2634, 2647), 'math.sin', 'math.sin', (['lat'], {}), '(lat)\n', (2642, 2647), False, 'import math\n'), ((4842, 4854), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (4850, 4854), False, 'import math\n'), ((4983, 4995), 'math.tan', 'math.tan', (['di'], {}), '(di)\n', (4991, 4995), False, 'import math\n'), ((4967, 4979), 'math.tan', 'math.tan', (['di'], {}), '(di)\n', (4975, 4979), False, 'import math\n'), ((5028, 5040), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (5036, 5040), False, 'import math\n'), ((6535, 6547), 'math.tan', 'math.tan', (['di'], {}), '(di)\n', (6543, 6547), False, 'import math\n'), ((6555, 6567), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (6563, 6567), False, 'import math\n'), ((6619, 6631), 'math.tan', 'math.tan', (['di'], {}), '(di)\n', (6627, 6631), False, 'import math\n'), ((6639, 6651), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (6647, 6651), False, 'import math\n'), ((7482, 7494), 'math.cos', 'math.cos', (['di'], {}), '(di)\n', (7490, 7494), False, 'import math\n'), ((7528, 7540), 'math.sin', 'math.sin', (['di'], {}), '(di)\n', (7536, 7540), False, 'import math\n'), ((7965, 7979), 'math.tan', 'math.tan', (['vMax'], {}), '(vMax)\n', (7973, 7979), False, 'import math\n'), ((8169, 8206), 'math.sin', 'math.sin', (['(lSurf * 25 / maxCurveRadius)'], {}), '(lSurf * 25 / maxCurveRadius)\n', (8177, 8206), False, 'import math\n'), ((8205, 8219), 'math.tan', 'math.tan', (['vMax'], {}), '(vMax)\n', (8213, 8219), False, 'import math\n'), ((11202, 11221), 'math.cos', 'math.cos', (['calcAngle'], {}), '(calcAngle)\n', (11210, 11221), False, 'import math\n'), ((12378, 12392), 'math.tan', 'math.tan', (['vMax'], {}), '(vMax)\n', (12386, 12392), False, 'import math\n'), ((13776, 13803), 'math.sin', 'math.sin', (["cnObj[1]['angle']"], {}), "(cnObj[1]['angle'])\n", (13784, 13803), False, 'import math\n'), ((14870, 14927), 'rasterio.open', 'rasterio.open', (["(demPath + '/elev/dem_' + tilename + '.tif')"], {}), "(demPath + '/elev/dem_' + tilename + '.tif')\n", (14883, 14927), False, 'import rasterio\n'), ((14959, 15015), 'rasterio.open', 'rasterio.open', (["(demPath + '/objects/' + tilename + '.tif')"], {}), "(demPath + '/objects/' + tilename + '.tif')\n", (14972, 15015), False, 'import rasterio\n')] |
import cv2, os, glob, tqdm, sys
import numpy
from utils import get_image_paths, load_images, stack_images
import dlib
import face_recognition
import face_recognition_models
from umeyama import umeyama
from moviepy.editor import *
from tqdm import tqdm
predictor_68_point_model = face_recognition_models.pose_predictor_model_location()
pose_predictor = dlib.shape_predictor(predictor_68_point_model)
mean_face_x = numpy.array([
0.000213256, 0.0752622, 0.18113, 0.29077, 0.393397, 0.586856, 0.689483, 0.799124,
0.904991, 0.98004, 0.490127, 0.490127, 0.490127, 0.490127, 0.36688, 0.426036,
0.490127, 0.554217, 0.613373, 0.121737, 0.187122, 0.265825, 0.334606, 0.260918,
0.182743, 0.645647, 0.714428, 0.793132, 0.858516, 0.79751, 0.719335, 0.254149,
0.340985, 0.428858, 0.490127, 0.551395, 0.639268, 0.726104, 0.642159, 0.556721,
0.490127, 0.423532, 0.338094, 0.290379, 0.428096, 0.490127, 0.552157, 0.689874,
0.553364, 0.490127, 0.42689 ])
mean_face_y = numpy.array([
0.106454, 0.038915, 0.0187482, 0.0344891, 0.0773906, 0.0773906, 0.0344891,
0.0187482, 0.038915, 0.106454, 0.203352, 0.307009, 0.409805, 0.515625, 0.587326,
0.609345, 0.628106, 0.609345, 0.587326, 0.216423, 0.178758, 0.179852, 0.231733,
0.245099, 0.244077, 0.231733, 0.179852, 0.178758, 0.216423, 0.244077, 0.245099,
0.780233, 0.745405, 0.727388, 0.742578, 0.727388, 0.745405, 0.780233, 0.864805,
0.902192, 0.909281, 0.902192, 0.864805, 0.784792, 0.778746, 0.785343, 0.778746,
0.784792, 0.824182, 0.831803, 0.824182 ])
landmarks_2D = numpy.stack( [ mean_face_x, mean_face_y ], axis=1 )
# get face align matrix from landmarks
def get_align_mat(face):
return umeyama( numpy.array(face.landmarksAsXY()[17:]), landmarks_2D, True )[0:2]
# get inverse face align matrix from landmarks
def get_align_mat_inv(face):
return umeyama(landmarks_2D, numpy.array(face.landmarksAsXY()[17:]), True )[0:2]
# detect faces in image
def detect_faces(frame):
face_locations = face_recognition.face_locations(frame)
landmarks = _raw_face_landmarks(frame, face_locations)
for ((y, right, bottom, x), landmarks) in zip(face_locations, landmarks):
yield DetectedFace(frame[y: bottom, x: right], x, right - x, y, bottom - y, landmarks)
# extract all faces in image
def extract_faces(image, size):
facelist = []
for face in detect_faces(image):
if face.landmarks == None:
print("Warning! landmarks not found. Switching to crop!")
facelist.append((face, cv2.resize(face.image, (size, size))))
alignment = get_align_mat( face )
facelist.append((face, transform( image, alignment, size, 48 )))
return facelist
def transform(image, mat, size, padding=48 ):
mat = mat * (size - 2 * padding)
mat[:,2] += padding
return cv2.warpAffine( image, mat, ( size, size ) )
def _raw_face_landmarks(face_image, face_locations):
face_locations = [_css_to_rect(face_location) for face_location in face_locations]
return [pose_predictor(face_image, face_location) for face_location in face_locations]
def _css_to_rect(css):
return dlib.rectangle(css[3], css[0], css[1], css[2])
# detected face class
class DetectedFace(object):
def __init__(self, image, x, w, y, h, landmarks):
self.image = image
self.x = x
self.w = w
self.y = y
self.h = h
self.landmarks = landmarks
# retrieve landmarks as tuple list
def landmarksAsXY(self):
return [(p.x, p.y) for p in self.landmarks.parts()]
# this method is used to insert extracted faces again into the original image
def blend_warp(src, dst, mat):
# use some kind of blend to smooth the border
imgMask = numpy.ones(src.shape)
imgMask[0,:,:] = 0
imgMask[:,0,:] = 0
imgMask[-1,:,:] = 0
imgMask[:,-1,:] = 0
imgMaskWarped = cv2.warpAffine( imgMask, mat, (dst.shape[1],dst.shape[0]))[:, :, 0]
src_warped = cv2.warpAffine( src, mat, (dst.shape[1],dst.shape[0]))
# make the colors smoother with a maximum face alpha of 95%
alpha = imgMaskWarped * 0.95
beta = (1.0 - alpha)
res_warped = dst.copy()
for c in range(0, 3):
res_warped[:, :, c] = (beta * dst[:, :, c] + alpha * src_warped[:, :, c])
return res_warped | [
"face_recognition_models.pose_predictor_model_location",
"face_recognition.face_locations",
"cv2.warpAffine",
"numpy.ones",
"dlib.rectangle",
"dlib.shape_predictor",
"numpy.array",
"numpy.stack",
"cv2.resize"
] | [((283, 338), 'face_recognition_models.pose_predictor_model_location', 'face_recognition_models.pose_predictor_model_location', ([], {}), '()\n', (336, 338), False, 'import face_recognition_models\n'), ((356, 402), 'dlib.shape_predictor', 'dlib.shape_predictor', (['predictor_68_point_model'], {}), '(predictor_68_point_model)\n', (376, 402), False, 'import dlib\n'), ((418, 974), 'numpy.array', 'numpy.array', (['[0.000213256, 0.0752622, 0.18113, 0.29077, 0.393397, 0.586856, 0.689483, \n 0.799124, 0.904991, 0.98004, 0.490127, 0.490127, 0.490127, 0.490127, \n 0.36688, 0.426036, 0.490127, 0.554217, 0.613373, 0.121737, 0.187122, \n 0.265825, 0.334606, 0.260918, 0.182743, 0.645647, 0.714428, 0.793132, \n 0.858516, 0.79751, 0.719335, 0.254149, 0.340985, 0.428858, 0.490127, \n 0.551395, 0.639268, 0.726104, 0.642159, 0.556721, 0.490127, 0.423532, \n 0.338094, 0.290379, 0.428096, 0.490127, 0.552157, 0.689874, 0.553364, \n 0.490127, 0.42689]'], {}), '([0.000213256, 0.0752622, 0.18113, 0.29077, 0.393397, 0.586856, \n 0.689483, 0.799124, 0.904991, 0.98004, 0.490127, 0.490127, 0.490127, \n 0.490127, 0.36688, 0.426036, 0.490127, 0.554217, 0.613373, 0.121737, \n 0.187122, 0.265825, 0.334606, 0.260918, 0.182743, 0.645647, 0.714428, \n 0.793132, 0.858516, 0.79751, 0.719335, 0.254149, 0.340985, 0.428858, \n 0.490127, 0.551395, 0.639268, 0.726104, 0.642159, 0.556721, 0.490127, \n 0.423532, 0.338094, 0.290379, 0.428096, 0.490127, 0.552157, 0.689874, \n 0.553364, 0.490127, 0.42689])\n', (429, 974), False, 'import numpy\n'), ((957, 1519), 'numpy.array', 'numpy.array', (['[0.106454, 0.038915, 0.0187482, 0.0344891, 0.0773906, 0.0773906, 0.0344891,\n 0.0187482, 0.038915, 0.106454, 0.203352, 0.307009, 0.409805, 0.515625, \n 0.587326, 0.609345, 0.628106, 0.609345, 0.587326, 0.216423, 0.178758, \n 0.179852, 0.231733, 0.245099, 0.244077, 0.231733, 0.179852, 0.178758, \n 0.216423, 0.244077, 0.245099, 0.780233, 0.745405, 0.727388, 0.742578, \n 0.727388, 0.745405, 0.780233, 0.864805, 0.902192, 0.909281, 0.902192, \n 0.864805, 0.784792, 0.778746, 0.785343, 0.778746, 0.784792, 0.824182, \n 0.831803, 0.824182]'], {}), '([0.106454, 0.038915, 0.0187482, 0.0344891, 0.0773906, 0.0773906,\n 0.0344891, 0.0187482, 0.038915, 0.106454, 0.203352, 0.307009, 0.409805,\n 0.515625, 0.587326, 0.609345, 0.628106, 0.609345, 0.587326, 0.216423, \n 0.178758, 0.179852, 0.231733, 0.245099, 0.244077, 0.231733, 0.179852, \n 0.178758, 0.216423, 0.244077, 0.245099, 0.780233, 0.745405, 0.727388, \n 0.742578, 0.727388, 0.745405, 0.780233, 0.864805, 0.902192, 0.909281, \n 0.902192, 0.864805, 0.784792, 0.778746, 0.785343, 0.778746, 0.784792, \n 0.824182, 0.831803, 0.824182])\n', (968, 1519), False, 'import numpy\n'), ((1505, 1552), 'numpy.stack', 'numpy.stack', (['[mean_face_x, mean_face_y]'], {'axis': '(1)'}), '([mean_face_x, mean_face_y], axis=1)\n', (1516, 1552), False, 'import numpy\n'), ((1941, 1979), 'face_recognition.face_locations', 'face_recognition.face_locations', (['frame'], {}), '(frame)\n', (1972, 1979), False, 'import face_recognition\n'), ((2775, 2815), 'cv2.warpAffine', 'cv2.warpAffine', (['image', 'mat', '(size, size)'], {}), '(image, mat, (size, size))\n', (2789, 2815), False, 'import cv2, os, glob, tqdm, sys\n'), ((3087, 3133), 'dlib.rectangle', 'dlib.rectangle', (['css[3]', 'css[0]', 'css[1]', 'css[2]'], {}), '(css[3], css[0], css[1], css[2])\n', (3101, 3133), False, 'import dlib\n'), ((3680, 3701), 'numpy.ones', 'numpy.ones', (['src.shape'], {}), '(src.shape)\n', (3690, 3701), False, 'import numpy\n'), ((3906, 3960), 'cv2.warpAffine', 'cv2.warpAffine', (['src', 'mat', '(dst.shape[1], dst.shape[0])'], {}), '(src, mat, (dst.shape[1], dst.shape[0]))\n', (3920, 3960), False, 'import cv2, os, glob, tqdm, sys\n'), ((3816, 3874), 'cv2.warpAffine', 'cv2.warpAffine', (['imgMask', 'mat', '(dst.shape[1], dst.shape[0])'], {}), '(imgMask, mat, (dst.shape[1], dst.shape[0]))\n', (3830, 3874), False, 'import cv2, os, glob, tqdm, sys\n'), ((2477, 2513), 'cv2.resize', 'cv2.resize', (['face.image', '(size, size)'], {}), '(face.image, (size, size))\n', (2487, 2513), False, 'import cv2, os, glob, tqdm, sys\n')] |
import sys
import pandas as pd
# # ETL Pipeline Preparation
def load_data(messages_filepath, categories_filepath):
# load messages dataset
messages = pd.read_csv(messages_filepath)
# load categories dataset
categories = pd.read_csv(categories_filepath)
# ## Merge datasets.
# - Merge the messages and categories datasets using the common id
# - Assign this combined dataset to `df`, which will be cleaned in
# the following steps
df = pd.merge(messages, categories, on=['id'])
return df
def clean_data(df):
### Split `categories` into separate category columns.
# - Split the values in the `categories` column on the `;` character so
# that each value becomes a separate column.
# - Use the first row of categories dataframe to create column names for the categories data.
# - Rename columns of `categories` with new column names.
categories = df.categories.str.split(pat=';', expand=True)
# select the first row of the categories dataframe
row = categories.iloc[0]
# use this row to extract a list of new column names for categories.
# one way is to apply a lambda function that takes everything
# up to the second to last character of each string with slicing
category_colnames = [item.split("-")[0] for item in row]
# rename the columns of `categories`
categories.columns = category_colnames
# ### Convert category values to just numbers 0 or 1.
for column in categories:
# set each value to be the last character of the string
categories[column] = categories[column].str[-1]
# convert column from string to numeric
categories[column] = categories[column].astype(int)
# ### Replace categories column in df with new category columns.
# Drop the categories column from the df dataframe since it is no longer needed.
# Concatenate df and categories data frames.
# drop the original categories column from `df`
df = df.drop(labels='categories', axis=1)
# concatenate the original dataframe with the new `categories` dataframe
df = pd.concat([df, categories], axis=1, join_axes=[df.index])
# ### Remove duplicates.
# Check how many duplicates are in this dataset.
# Drop the duplicates.
# Confirm duplicates were removed.
# drop duplicates
df = df.drop_duplicates()
return df
def save_data(df, database_filename):
from sqlalchemy import create_engine
engine = create_engine('sqlite:///'+ database_filename)
conn = engine.connect()
df.to_sql('MessagesCategories', engine, index=False, if_exists='replace')
conn.close()
engine.dispose()
def main():
if len(sys.argv) == 4:
messages_filepath, categories_filepath, database_filepath = sys.argv[1:]
print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
df = load_data(messages_filepath, categories_filepath)
print('Cleaning data...')
df = clean_data(df)
print('Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('Cleaned data saved to database!')
else:
print('Please provide the filepaths of the messages and categories '\
'datasets as the first and second argument respectively, as '\
'well as the filepath of the database to save the cleaned data '\
'to as the third argument. \n\nExample: python process_data.py '\
'disaster_messages.csv disaster_categories.csv '\
'DisasterResponse.db')
if __name__ == '__main__':
main() | [
"pandas.merge",
"pandas.concat",
"pandas.read_csv",
"sqlalchemy.create_engine"
] | [((165, 195), 'pandas.read_csv', 'pd.read_csv', (['messages_filepath'], {}), '(messages_filepath)\n', (176, 195), True, 'import pandas as pd\n'), ((248, 280), 'pandas.read_csv', 'pd.read_csv', (['categories_filepath'], {}), '(categories_filepath)\n', (259, 280), True, 'import pandas as pd\n'), ((494, 535), 'pandas.merge', 'pd.merge', (['messages', 'categories'], {'on': "['id']"}), "(messages, categories, on=['id'])\n", (502, 535), True, 'import pandas as pd\n'), ((2173, 2230), 'pandas.concat', 'pd.concat', (['[df, categories]'], {'axis': '(1)', 'join_axes': '[df.index]'}), '([df, categories], axis=1, join_axes=[df.index])\n', (2182, 2230), True, 'import pandas as pd\n'), ((2559, 2606), 'sqlalchemy.create_engine', 'create_engine', (["('sqlite:///' + database_filename)"], {}), "('sqlite:///' + database_filename)\n", (2572, 2606), False, 'from sqlalchemy import create_engine\n')] |
import datetime
from .store import *
def create_textset(name, description, source, url, filename):
"""
creates a text set object
:param name: name of the text set
:param filename: filename of the data
:return: a textset object
"""
# create object and control data
creation_date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
ts = TextSet(0, name, description, source, url, filename, creation_date)
# save and create objects related to the textset
ts.textset_id = str(incr_key_store('textset:counter'))
set_key_store('textset:%s:status' % ts.textset_id, 'created')
rpush_key_store('textset:list', ts.textset_id)
ts.finalize_creation()
ts.save()
return ts
def get_textset(textset_id):
"""
retrieves a textset object from its id
:param textset_id: id
:return: textset object
"""
d = get_key_store('textset:%s' % textset_id)
ds = TextSet(**d['init_data'])
ds.load(d['load_data'])
ds.status = get_key_store('textset:%s:status' % textset_id)
return ds
def get_textset_status(textset_id):
"""
retrieves the status of a textset from its id
:param textset_id: id
:return: status (string)
"""
return get_key_store('textset:%s:status' % textset_id)
def get_textset_list():
"""
get the list of all textsets
:return: list of textsets objects or empty list if error (eg. redis or environment not set)
"""
#try:
return [get_textset(textset_id) for textset_id in get_textset_ids()]
#except:
# return []
def get_textset_ids():
"""
get the list of ids all textsets
:return: list of ids
"""
return list_key_store('textset:list')
def update_textset(textset_id, name, description, source, url):
"""
update specific fields of the textset
:param textset_id: id of the textset
:param name: new name of the textset
:param description: new description of the textset
:param source: source of the textset
:param url: url of the textset
:return:
"""
ts = get_textset(textset_id)
ts.name = name
ts.description = description
ts.source = source
ts.url = url
ts.save()
def reset_textset(textset_id):
"""
reset the results
:param textset_id: id
:return:
"""
# removes entries
set_key_store('textset:%s:status' % textset_id, 'created')
def delete_textset(textset_id):
"""
deletes a textset and the results
:param textset_id: id
:return:
"""
# removes entries
del_key_store('textset:%s:status' % textset_id)
lrem_key_store('textset:list', textset_id)
# delete file
os.remove(get_data_folder() + '/texts' + '/' + str(textset_id) + '.txt')
class TextSet(object):
def __init__(self, textset_id, name, description, source, url, filename, creation_date):
self.textset_id = textset_id
self.name = name
self.description = description
self.url = url
self.source = source # url or file id
if filename == '':
raise ValueError('filename cannot be empty')
self.filename = filename
ext = filename.split('.')[-1].lower()
if ext not in ['txt']:
raise TypeError('unknown text format: use txt')
if not os.path.exists(filename):
raise ValueError('file %s not found' % filename)
self.size = 0
self.lines = 0
self.creation_date = creation_date
def finalize_creation(self):
# import text
with open(self.filename, 'r') as f:
txt = f.readlines()
self.size = sum([len(s) for s in txt])
self.lines = len(txt)
folder = get_data_folder() + '/texts'
if not os.path.exists(folder):
os.makedirs(folder)
with open(folder + '/' + str(self.textset_id) + '.txt', 'w') as f:
for s in txt:
f.write(s + '\n')
def save(self):
store = {'init_data': {'textset_id': self.textset_id, 'name': self.name, 'description': self.description,
'source': self.source, 'url': self.url, 'filename': self.filename,
'creation_date': self.creation_date},
'load_data': {'size': self.size, 'lines': self.lines}}
set_key_store('textset:%s' % self.textset_id, store)
def load(self, store):
# reload data from json
for k in store.keys():
setattr(self, k, store[k])
| [
"datetime.datetime.now"
] | [((315, 338), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (336, 338), False, 'import datetime\n')] |
Subsets and Splits